From fd1109d6597278e2af9aaefb46b05ce1a1105d18 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 13 Sep 2023 12:28:49 +0000 Subject: [PATCH] deploy: 1b4bad3dd37e0fcad599100d3d6011e7126ee581 --- .nojekyll | 0 404.html | 32 + CNAME | 1 + assets/css/styles.afdeec1f.css | 1 + ...Bold-8ca10ba7a8f4dfc517918497d4738352.woff | Bin 0 -> 45248 bytes ...Light-333da16a3f3cc391d0876c6d773efc6f.ttf | Bin 0 -> 167000 bytes ...gular-fc2b5060f7accec5cf74437196c1b027.ttf | Bin 0 -> 168260 bytes ...gular-34e46962590bff8eefe5f14af6ea24e3.ttf | Bin 0 -> 86908 bytes ...edium-115acab02aed19275f712214686d778e.ttf | Bin 0 -> 116056 bytes assets/js/002d14fa.b553e184.js | 1 + assets/js/0030fd86.cda43b2b.js | 1 + assets/js/036db789.b29b3dc1.js | 1 + assets/js/04d4af82.f9ae366c.js | 1 + assets/js/0582779b.aec163df.js | 1 + assets/js/060147ec.e48d0a0e.js | 1 + assets/js/065bbf18.851ffe9a.js | 1 + assets/js/06acf88d.54a41cbf.js | 1 + assets/js/09cca5f2.a6e2df67.js | 1 + assets/js/0a79db1f.58fed57a.js | 1 + assets/js/0d766b78.42ab4cf9.js | 1 + assets/js/0d927e9a.ee128e68.js | 1 + assets/js/0fb5d45b.a1df13f6.js | 1 + assets/js/0ff0556c.0b867a3c.js | 1 + assets/js/1030.7d064482.js | 1 + assets/js/10df9fdc.6033a07f.js | 1 + assets/js/111ae602.55a30aac.js | 1 + assets/js/1128ab4d.708c7b33.js | 1 + assets/js/1187a271.6b0af2cc.js | 1 + assets/js/11c86bb5.38ea69ae.js | 1 + assets/js/1244450e.79248efa.js | 1 + assets/js/13bdfbad.a7bd1580.js | 1 + assets/js/14056c2c.8964dc73.js | 1 + assets/js/14111b0c.d1701b86.js | 1 + assets/js/14f7f42b.27993dc9.js | 1 + assets/js/15aa5f44.44966933.js | 1 + assets/js/15f1310d.015cd27f.js | 1 + assets/js/1674a630.97d72d03.js | 1 + assets/js/16e87abe.02193372.js | 1 + assets/js/17896441.8961bab4.js | 1 + assets/js/1957b43a.daf8773c.js | 1 + assets/js/196c63a7.f818c35d.js | 1 + assets/js/1a4e3797.a52196f1.js | 2 + assets/js/1a4e3797.a52196f1.js.LICENSE.txt | 1 + assets/js/1be78505.228b1716.js | 1 + assets/js/1d4d4e46.53235266.js | 1 + assets/js/1efdbea1.365af71b.js | 1 + assets/js/1f0a946f.fcc5a3d2.js | 1 + assets/js/1f1765ab.bbeac427.js | 1 + assets/js/205a719b.fba55762.js | 1 + assets/js/20f8c1fd.f55a6b8d.js | 1 + assets/js/222e7c49.9c8fb23c.js | 1 + assets/js/232ab88c.c37b9f14.js | 1 + assets/js/23c607c1.55560318.js | 1 + assets/js/243cddb9.83dc99e3.js | 1 + assets/js/2622e95a.0b16f714.js | 1 + assets/js/29105828.3c1f0326.js | 1 + assets/js/2ae68e65.2da2affc.js | 1 + assets/js/2afa602b.f892f0f7.js | 1 + assets/js/2b2faa0a.bb2930b4.js | 1 + assets/js/2bc15a09.b35b76c3.js | 1 + assets/js/2c797d78.264327d1.js | 1 + assets/js/2e96a196.47765f9b.js | 1 + assets/js/2fe15297.5d5bccf7.js | 1 + assets/js/3087bb2d.d34d4c8e.js | 1 + assets/js/35d7f647.eeaa1f4e.js | 1 + assets/js/381a15bc.fecfc237.js | 1 + assets/js/38a44003.5bed27ee.js | 1 + assets/js/40415b6c.d29ab3ac.js | 1 + assets/js/409b7aa0.924e3028.js | 1 + assets/js/414d4a37.2900f4a9.js | 1 + assets/js/456c5d82.1356cd36.js | 1 + assets/js/46d2add0.08babb68.js | 1 + assets/js/478692f7.e9e2abf4.js | 1 + assets/js/47ac2e75.3ab08e6a.js | 1 + assets/js/48199270.467a59ac.js | 1 + assets/js/4972.0680bd7d.js | 1 + assets/js/4a00fd3a.497faf2c.js | 1 + assets/js/4a2f1dfa.5bc1686b.js | 1 + assets/js/4a9e4762.9c7fce8d.js | 1 + assets/js/4ace981f.70fbc63c.js | 1 + assets/js/4c4d6ef6.8a27cfcd.js | 1 + assets/js/4d11873e.e7adb799.js | 1 + assets/js/4d517c40.eadb0bc3.js | 1 + assets/js/4e5074e6.b0d53ffd.js | 1 + assets/js/4f8e8160.b9904ca2.js | 1 + assets/js/514a13f6.826a6d2f.js | 1 + assets/js/516ebbd1.b21303ee.js | 1 + assets/js/5300e879.8a8dc6e5.js | 1 + assets/js/5347168a.4d2a41c3.js | 1 + assets/js/5527e5b7.a5884eda.js | 1 + assets/js/5534c352.d3e76e0e.js | 1 + assets/js/5584c47d.98379ebd.js | 1 + assets/js/58b4829f.f9857ac8.js | 1 + assets/js/58f10d9f.49ed8c8f.js | 1 + assets/js/5a11a8c6.882f537d.js | 1 + assets/js/5cf0f698.9f573412.js | 1 + assets/js/61386b8d.6fd0979f.js | 1 + assets/js/62ff7ec9.ec600e31.js | 1 + assets/js/647303d6.98160b7c.js | 1 + assets/js/65ab9689.c83f172b.js | 1 + assets/js/6780.b8374b3c.js | 1 + assets/js/68c835af.1e1795fe.js | 1 + assets/js/68d54528.f4b4a695.js | 1 + assets/js/6945.a9a2d87d.js | 1 + assets/js/69a9729f.c03205b8.js | 1 + assets/js/6af17b1d.d647c97a.js | 1 + assets/js/6b76d411.86c99a48.js | 1 + assets/js/6c174e6d.40d06062.js | 1 + assets/js/6c450cd6.73253a7e.js | 1 + assets/js/6cafb666.6d7d0081.js | 1 + assets/js/6d9c0b04.bac51284.js | 1 + assets/js/6dbdf8e8.90843a66.js | 1 + assets/js/6e7b1bc6.ac852278.js | 1 + assets/js/7107eb83.9b621a01.js | 1 + assets/js/7245ce96.2ad33dfd.js | 1 + assets/js/74e1ba0d.3a0419d8.js | 1 + assets/js/75af10bd.f657f121.js | 1 + assets/js/7ae5d564.6ea40e89.js | 1 + assets/js/7b4381d3.483f4f09.js | 1 + assets/js/7b589963.32aefe0f.js | 1 + assets/js/80f42d74.392d78fb.js | 1 + assets/js/81b6783d.a996001b.js | 1 + assets/js/81bf77fc.e86cf07e.js | 1 + assets/js/83ec613f.6af4e02b.js | 1 + assets/js/847c12c2.0dcae720.js | 1 + assets/js/87b29f85.960f3d83.js | 1 + assets/js/87f59f37.6b9e11c6.js | 1 + assets/js/8804eadc.73cda2d1.js | 1 + assets/js/8894.674c4c01.js | 1 + assets/js/898ba646.9c58f3c1.js | 1 + assets/js/8ad68633.50914cab.js | 1 + assets/js/8c27608b.c4322928.js | 1 + assets/js/8d193b98.faf1a7fe.js | 1 + assets/js/8ff5d7ba.c28181ba.js | 1 + assets/js/935f2afb.c2b93cd4.js | 1 + assets/js/9440fd12.93ff0ee3.js | 1 + assets/js/94d2eef0.b68cf51d.js | 1 + assets/js/97a352ae.33fe6fad.js | 1 + assets/js/980c25d7.6573b859.js | 2 + assets/js/980c25d7.6573b859.js.LICENSE.txt | 4 + assets/js/982d0b04.f756f754.js | 1 + assets/js/9980ea0e.b9be0f2e.js | 1 + assets/js/99912bf6.1a9c1ee9.js | 1 + assets/js/99bfca7e.4ea0922d.js | 1 + assets/js/99d969f2.d89c6b4c.js | 1 + assets/js/9af63d42.bb0edfcf.js | 1 + assets/js/9defa5b7.dbb12a65.js | 1 + assets/js/9fc8d1d9.4bcec99f.js | 1 + assets/js/9fda8563.2e6d9fba.js | 1 + assets/js/a03cde8f.cdce6d67.js | 1 + assets/js/a07fb1cb.ef0f7575.js | 1 + assets/js/a17dbf83.57177a4c.js | 1 + assets/js/a34ed3b2.a8b3a7f3.js | 1 + assets/js/a4055066.ba2a7e48.js | 1 + assets/js/a4cbee7f.d067a9ae.js | 1 + assets/js/a5b090b0.a4364faf.js | 1 + assets/js/a624bde7.bd5c926b.js | 1 + assets/js/a686ca68.cc915e73.js | 1 + assets/js/a6c229c0.135ade25.js | 1 + assets/js/a7914a5c.64fb6d59.js | 1 + assets/js/a80d168f.2854157b.js | 1 + assets/js/a9ab9f8f.bdfd8b0f.js | 1 + assets/js/aa946361.9a9d22e0.js | 1 + assets/js/aacd1d40.c4e40d4c.js | 1 + assets/js/ac02e102.06e01491.js | 1 + assets/js/ae1efb81.ef2d54c6.js | 1 + assets/js/b1b6a961.74208c3e.js | 1 + assets/js/b24805c2.5ca38736.js | 1 + assets/js/b638c32b.ab27dab2.js | 1 + assets/js/b70bee8d.45dd4d58.js | 1 + assets/js/b7f60777.7085564d.js | 1 + assets/js/b91921d6.198b04b6.js | 1 + assets/js/b9d0db8e.9fbaa8a9.js | 1 + assets/js/ba3b9f5c.009751df.js | 1 + assets/js/ba9d536d.e1fae3ca.js | 1 + assets/js/be529d37.6621d11d.js | 1 + assets/js/beaba6c2.94675e66.js | 1 + assets/js/bfac6a8d.33d1069c.js | 1 + assets/js/c0e3ff8b.4348bcbf.js | 1 + assets/js/c16f65ec.04e07d23.js | 1 + assets/js/c192c597.2da137e0.js | 1 + assets/js/c248ee7e.56274b20.js | 1 + assets/js/c377a04b.e32687a6.js | 1 + assets/js/c3d488fa.7534f536.js | 1 + assets/js/c4a14462.17d186df.js | 1 + assets/js/c4f5d8e4.b78e8a3e.js | 1 + assets/js/c602cd44.c0889991.js | 1 + assets/js/c95b781b.54e317b5.js | 1 + assets/js/c9eeccbf.fa9d425c.js | 1 + assets/js/ca2bf8a3.b136f6a8.js | 1 + assets/js/ca36df4d.bdcd738d.js | 1 + assets/js/cac45e38.ffd6c350.js | 1 + assets/js/cd19d898.67faad21.js | 1 + assets/js/cd59f9ef.54f4605b.js | 1 + assets/js/d0381ee6.20083f4a.js | 1 + assets/js/d2282d9e.ad5da899.js | 1 + assets/js/d241d4ef.a87ac3a3.js | 1 + assets/js/d2af0b95.61dc7ae0.js | 1 + assets/js/d2b827bd.868ae315.js | 1 + assets/js/d35204c3.f3736ac4.js | 1 + assets/js/d40fb48f.0dd8a669.js | 1 + assets/js/d67a4111.aa568655.js | 1 + assets/js/d73efefc.77589437.js | 1 + assets/js/d7dfec52.b5cc8811.js | 1 + assets/js/d87f7f29.8b85c627.js | 1 + assets/js/d9bd3427.e1f1f230.js | 1 + assets/js/d9ce81b2.de1fc440.js | 1 + assets/js/dbc0f590.302c834a.js | 1 + assets/js/dc75700c.84688a9d.js | 1 + assets/js/dde1ff6e.936ed816.js | 1 + assets/js/de2621c2.5348d14a.js | 1 + assets/js/e109b3ff.16e87717.js | 1 + assets/js/e1584d63.e755620a.js | 1 + assets/js/e323208f.514adfe6.js | 1 + assets/js/e333f535.1e236bcc.js | 1 + assets/js/e4d0ad4d.f67d40a9.js | 1 + assets/js/e56c502c.7f764e63.js | 1 + assets/js/e6eb5527.890dde78.js | 1 + assets/js/e7ab2684.124a91a6.js | 1 + assets/js/e8ae88bc.3ee8c934.js | 1 + assets/js/e968e69e.7145b4d3.js | 1 + assets/js/e97b3564.ead957c7.js | 1 + assets/js/ebc40d40.75714ad4.js | 1 + assets/js/ee2e0a62.ea183c41.js | 1 + assets/js/f2954f34.e942d91a.js | 1 + assets/js/f2aaa4e5.df8937bd.js | 1 + assets/js/f35e2aba.396a2a4d.js | 1 + assets/js/f39642a1.dcbef318.js | 1 + assets/js/f7e229b3.1f12b6b5.js | 1 + assets/js/f8edae29.e9acd7b2.js | 1 + assets/js/fb969bb3.8d0af030.js | 1 + assets/js/fc8a86b2.1a0d2a4d.js | 1 + assets/js/fd2e624b.b7df8a31.js | 1 + assets/js/fdc5233c.73a1ac59.js | 1 + assets/js/fe73cc84.50771324.js | 1 + assets/js/fff0a46d.a0ecfadf.js | 1 + assets/js/main.e19c364c.js | 2 + assets/js/main.e19c364c.js.LICENSE.txt | 63 ++ assets/js/runtime~main.bd2548d2.js | 1 + demo/index.html | 32 + docs/0.5.0/CHANGELOG/index.html | 32 + .../0.5.0/api/fastkafka/KafkaEvent/index.html | 32 + .../encoder/avsc_to_pydantic/index.html | 32 + docs/0.5.0/api/fastkafka/index.html | 468 ++++++++++ .../testing/ApacheKafkaBroker/index.html | 32 + .../testing/LocalRedpandaBroker/index.html | 32 + .../api/fastkafka/testing/Tester/index.html | 274 ++++++ docs/0.5.0/cli/fastkafka/index.html | 32 + .../run_fastkafka_server_process/index.html | 32 + .../guides/Guide_00_FastKafka_Demo/index.html | 122 +++ docs/0.5.0/guides/Guide_01_Intro/index.html | 51 ++ .../guides/Guide_02_First_Steps/index.html | 49 + .../guides/Guide_03_Authentication/index.html | 37 + .../index.html | 42 + .../Guide_05_Lifespan_Handler/index.html | 75 ++ .../index.html | 80 ++ .../index.html | 150 +++ .../Guide_11_Consumes_Basics/index.html | 61 ++ .../Guide_21_Produces_Basics/index.html | 62 ++ .../guides/Guide_22_Partition_Keys/index.html | 55 ++ .../index.html | 73 ++ .../index.html | 143 +++ docs/0.5.0/index.html | 139 +++ docs/0.6.0/CHANGELOG/index.html | 33 + docs/0.6.0/CONTRIBUTING/index.html | 36 + docs/0.6.0/LICENSE/index.html | 168 ++++ .../api/fastkafka/EventMetadata/index.html | 32 + .../0.6.0/api/fastkafka/KafkaEvent/index.html | 32 + .../api/fastkafka/encoder/AvroBase/index.html | 32 + .../fastkafka/encoder/avro_decoder/index.html | 32 + .../fastkafka/encoder/avro_encoder/index.html | 32 + .../encoder/avsc_to_pydantic/index.html | 32 + .../fastkafka/encoder/json_decoder/index.html | 32 + .../fastkafka/encoder/json_encoder/index.html | 32 + .../executors/DynamicTaskExecutor/index.html | 35 + .../executors/SequentialExecutor/index.html | 35 + docs/0.6.0/api/fastkafka/index.html | 475 ++++++++++ .../testing/ApacheKafkaBroker/index.html | 32 + .../testing/LocalRedpandaBroker/index.html | 32 + .../api/fastkafka/testing/Tester/index.html | 281 ++++++ docs/0.6.0/cli/fastkafka/index.html | 32 + .../run_fastkafka_server_process/index.html | 32 + .../guides/Guide_00_FastKafka_Demo/index.html | 122 +++ docs/0.6.0/guides/Guide_01_Intro/index.html | 51 ++ .../guides/Guide_02_First_Steps/index.html | 49 + .../guides/Guide_03_Authentication/index.html | 37 + .../index.html | 42 + .../Guide_05_Lifespan_Handler/index.html | 75 ++ .../index.html | 80 ++ .../index.html | 150 +++ .../Guide_11_Consumes_Basics/index.html | 88 ++ .../Guide_21_Produces_Basics/index.html | 62 ++ .../guides/Guide_22_Partition_Keys/index.html | 55 ++ .../Guide_23_Batch_Producing/index.html | 55 ++ .../index.html | 73 ++ .../index.html | 143 +++ docs/0.6.0/index.html | 139 +++ docs/0.7.0/CHANGELOG/index.html | 33 + docs/0.7.0/CONTRIBUTING/index.html | 36 + docs/0.7.0/LICENSE/index.html | 168 ++++ .../api/fastkafka/EventMetadata/index.html | 32 + .../0.7.0/api/fastkafka/KafkaEvent/index.html | 32 + .../api/fastkafka/encoder/AvroBase/index.html | 32 + .../fastkafka/encoder/avro_decoder/index.html | 32 + .../fastkafka/encoder/avro_encoder/index.html | 32 + .../encoder/avsc_to_pydantic/index.html | 32 + .../fastkafka/encoder/json_decoder/index.html | 32 + .../fastkafka/encoder/json_encoder/index.html | 32 + .../executors/DynamicTaskExecutor/index.html | 35 + .../executors/SequentialExecutor/index.html | 35 + docs/0.7.0/api/fastkafka/index.html | 496 ++++++++++ .../testing/ApacheKafkaBroker/index.html | 32 + .../testing/LocalRedpandaBroker/index.html | 32 + .../api/fastkafka/testing/Tester/index.html | 289 ++++++ docs/0.7.0/cli/fastkafka/index.html | 32 + .../run_fastkafka_server_process/index.html | 32 + .../guides/Guide_00_FastKafka_Demo/index.html | 117 +++ docs/0.7.0/guides/Guide_01_Intro/index.html | 51 ++ .../guides/Guide_02_First_Steps/index.html | 49 + .../guides/Guide_03_Authentication/index.html | 37 + .../index.html | 37 + .../Guide_05_Lifespan_Handler/index.html | 75 ++ .../index.html | 67 ++ .../index.html | 137 +++ .../Guide_11_Consumes_Basics/index.html | 87 ++ .../Guide_12_Batch_Consuming/index.html | 47 + .../Guide_21_Produces_Basics/index.html | 62 ++ .../guides/Guide_22_Partition_Keys/index.html | 53 ++ .../Guide_23_Batch_Producing/index.html | 52 ++ .../index.html | 155 ++++ .../index.html | 69 ++ .../index.html | 134 +++ .../index.html | 68 ++ docs/0.7.0/index.html | 118 +++ docs/0.7.1/CHANGELOG/index.html | 33 + docs/0.7.1/CONTRIBUTING/index.html | 36 + docs/0.7.1/LICENSE/index.html | 168 ++++ .../api/fastkafka/EventMetadata/index.html | 32 + .../0.7.1/api/fastkafka/KafkaEvent/index.html | 32 + .../api/fastkafka/encoder/AvroBase/index.html | 32 + .../fastkafka/encoder/avro_decoder/index.html | 32 + .../fastkafka/encoder/avro_encoder/index.html | 32 + .../encoder/avsc_to_pydantic/index.html | 32 + .../fastkafka/encoder/json_decoder/index.html | 32 + .../fastkafka/encoder/json_encoder/index.html | 32 + .../executors/DynamicTaskExecutor/index.html | 35 + .../executors/SequentialExecutor/index.html | 35 + docs/0.7.1/api/fastkafka/index.html | 497 ++++++++++ .../testing/ApacheKafkaBroker/index.html | 32 + .../testing/LocalRedpandaBroker/index.html | 32 + .../api/fastkafka/testing/Tester/index.html | 290 ++++++ docs/0.7.1/cli/fastkafka/index.html | 32 + .../run_fastkafka_server_process/index.html | 32 + .../guides/Guide_00_FastKafka_Demo/index.html | 122 +++ docs/0.7.1/guides/Guide_01_Intro/index.html | 51 ++ .../guides/Guide_02_First_Steps/index.html | 49 + .../guides/Guide_03_Authentication/index.html | 37 + .../index.html | 42 + .../Guide_05_Lifespan_Handler/index.html | 75 ++ .../index.html | 80 ++ .../index.html | 150 +++ .../Guide_11_Consumes_Basics/index.html | 88 ++ .../Guide_12_Batch_Consuming/index.html | 47 + .../Guide_21_Produces_Basics/index.html | 62 ++ .../guides/Guide_22_Partition_Keys/index.html | 55 ++ .../Guide_23_Batch_Producing/index.html | 55 ++ .../index.html | 155 ++++ .../index.html | 73 ++ .../index.html | 143 +++ .../index.html | 78 ++ docs/0.7.1/index.html | 121 +++ docs/CHANGELOG/index.html | 33 + docs/CONTRIBUTING/index.html | 36 + docs/LICENSE/index.html | 168 ++++ docs/api/fastkafka/EventMetadata/index.html | 32 + docs/api/fastkafka/KafkaEvent/index.html | 32 + .../api/fastkafka/encoder/AvroBase/index.html | 38 + .../fastkafka/encoder/avro_decoder/index.html | 32 + .../fastkafka/encoder/avro_encoder/index.html | 32 + .../encoder/avsc_to_pydantic/index.html | 32 + .../fastkafka/encoder/json_decoder/index.html | 32 + .../fastkafka/encoder/json_encoder/index.html | 32 + .../executors/DynamicTaskExecutor/index.html | 33 + .../executors/SequentialExecutor/index.html | 33 + docs/api/fastkafka/index.html | 39 + .../testing/ApacheKafkaBroker/index.html | 34 + .../testing/LocalRedpandaBroker/index.html | 34 + docs/api/fastkafka/testing/Tester/index.html | 39 + docs/cli/fastkafka/index.html | 32 + .../run_fastkafka_server_process/index.html | 32 + .../guides/Guide_00_FastKafka_Demo/index.html | 122 +++ docs/guides/Guide_01_Intro/index.html | 51 ++ docs/guides/Guide_02_First_Steps/index.html | 49 + .../guides/Guide_03_Authentication/index.html | 37 + .../index.html | 42 + .../Guide_05_Lifespan_Handler/index.html | 75 ++ .../index.html | 80 ++ .../index.html | 150 +++ .../Guide_11_Consumes_Basics/index.html | 90 ++ .../Guide_12_Batch_Consuming/index.html | 47 + .../Guide_21_Produces_Basics/index.html | 62 ++ .../guides/Guide_22_Partition_Keys/index.html | 55 ++ .../Guide_23_Batch_Producing/index.html | 55 ++ .../index.html | 155 ++++ .../index.html | 73 ++ .../index.html | 143 +++ .../index.html | 78 ++ docs/index.html | 121 +++ docs/next/CHANGELOG/index.html | 33 + docs/next/CONTRIBUTING/index.html | 36 + docs/next/LICENSE/index.html | 168 ++++ .../api/fastkafka/EventMetadata/index.html | 32 + docs/next/api/fastkafka/KafkaEvent/index.html | 32 + .../api/fastkafka/encoder/AvroBase/index.html | 38 + .../fastkafka/encoder/avro_decoder/index.html | 32 + .../fastkafka/encoder/avro_encoder/index.html | 32 + .../encoder/avsc_to_pydantic/index.html | 32 + .../fastkafka/encoder/json_decoder/index.html | 32 + .../fastkafka/encoder/json_encoder/index.html | 32 + .../executors/DynamicTaskExecutor/index.html | 33 + .../executors/SequentialExecutor/index.html | 33 + docs/next/api/fastkafka/index.html | 39 + .../testing/ApacheKafkaBroker/index.html | 34 + .../testing/LocalRedpandaBroker/index.html | 34 + .../api/fastkafka/testing/Tester/index.html | 39 + docs/next/cli/fastkafka/index.html | 32 + .../run_fastkafka_server_process/index.html | 32 + .../guides/Guide_00_FastKafka_Demo/index.html | 122 +++ docs/next/guides/Guide_01_Intro/index.html | 51 ++ .../guides/Guide_02_First_Steps/index.html | 49 + .../guides/Guide_03_Authentication/index.html | 37 + .../index.html | 42 + .../Guide_05_Lifespan_Handler/index.html | 75 ++ .../index.html | 80 ++ .../index.html | 150 +++ .../Guide_11_Consumes_Basics/index.html | 90 ++ .../Guide_12_Batch_Consuming/index.html | 47 + .../Guide_21_Produces_Basics/index.html | 62 ++ .../guides/Guide_22_Partition_Keys/index.html | 55 ++ .../Guide_23_Batch_Producing/index.html | 55 ++ .../index.html | 155 ++++ .../index.html | 73 ++ .../index.html | 143 +++ .../index.html | 78 ++ .../index.html | 72 ++ docs/next/index.html | 121 +++ font/Panton-SemiBold.woff | Bin 0 -> 45248 bytes font/Roboto-Light.ttf | Bin 0 -> 167000 bytes font/Roboto-Regular.ttf | Bin 0 -> 168260 bytes font/RobotoMono-Regular.ttf | Bin 0 -> 86908 bytes font/Rubik-Medium.ttf | Bin 0 -> 116056 bytes img/AIRT_icon_blue.svg | 11 + img/I.svg | 1 + img/Y_Combinator_Logo.png | Bin 0 -> 10270 bytes img/a-alphabet-round-icon.png | Bin 0 -> 4191 bytes img/compass-outline.png | Bin 0 -> 1085 bytes img/home-icon.svg | 20 + img/icon-arrow-right-blue.svg | 9 + img/icon-discord.svg | 17 + img/icon-facebook.svg | 13 + img/icon-github.svg | 16 + img/icon-linkedin.svg | 19 + img/icon-twitter.svg | 16 + img/p-alphabet-round-icon.png | Bin 0 -> 3753 bytes img/prototype.svg | 148 +++ img/reddit-logo.png | Bin 0 -> 4638 bytes img/robot-footer.svg | 76 ++ img/robot-hero.svg | 862 ++++++++++++++++++ img/streamline.svg | 75 ++ img/twitter-logo.svg | 6 + img/undraw_docusaurus_mountain.svg | 171 ++++ img/undraw_docusaurus_react.svg | 170 ++++ img/undraw_docusaurus_tree.svg | 40 + img/write.svg | 113 +++ index.html | 32 + opensearch.xml | 11 + search/index.html | 32 + sitemap.xml | 1 + 478 files changed, 17440 insertions(+) create mode 100644 .nojekyll create mode 100644 404.html create mode 100644 CNAME create mode 100644 assets/css/styles.afdeec1f.css create mode 100644 assets/fonts/Panton-SemiBold-8ca10ba7a8f4dfc517918497d4738352.woff create mode 100644 assets/fonts/Roboto-Light-333da16a3f3cc391d0876c6d773efc6f.ttf create mode 100644 assets/fonts/Roboto-Regular-fc2b5060f7accec5cf74437196c1b027.ttf create mode 100644 assets/fonts/RobotoMono-Regular-34e46962590bff8eefe5f14af6ea24e3.ttf create mode 100644 assets/fonts/Rubik-Medium-115acab02aed19275f712214686d778e.ttf create mode 100644 assets/js/002d14fa.b553e184.js create mode 100644 assets/js/0030fd86.cda43b2b.js create mode 100644 assets/js/036db789.b29b3dc1.js create mode 100644 assets/js/04d4af82.f9ae366c.js create mode 100644 assets/js/0582779b.aec163df.js create mode 100644 assets/js/060147ec.e48d0a0e.js create mode 100644 assets/js/065bbf18.851ffe9a.js create mode 100644 assets/js/06acf88d.54a41cbf.js create mode 100644 assets/js/09cca5f2.a6e2df67.js create mode 100644 assets/js/0a79db1f.58fed57a.js create mode 100644 assets/js/0d766b78.42ab4cf9.js create mode 100644 assets/js/0d927e9a.ee128e68.js create mode 100644 assets/js/0fb5d45b.a1df13f6.js create mode 100644 assets/js/0ff0556c.0b867a3c.js create mode 100644 assets/js/1030.7d064482.js create mode 100644 assets/js/10df9fdc.6033a07f.js create mode 100644 assets/js/111ae602.55a30aac.js create mode 100644 assets/js/1128ab4d.708c7b33.js create mode 100644 assets/js/1187a271.6b0af2cc.js create mode 100644 assets/js/11c86bb5.38ea69ae.js create mode 100644 assets/js/1244450e.79248efa.js create mode 100644 assets/js/13bdfbad.a7bd1580.js create mode 100644 assets/js/14056c2c.8964dc73.js create mode 100644 assets/js/14111b0c.d1701b86.js create mode 100644 assets/js/14f7f42b.27993dc9.js create mode 100644 assets/js/15aa5f44.44966933.js create mode 100644 assets/js/15f1310d.015cd27f.js create mode 100644 assets/js/1674a630.97d72d03.js create mode 100644 assets/js/16e87abe.02193372.js create mode 100644 assets/js/17896441.8961bab4.js create mode 100644 assets/js/1957b43a.daf8773c.js create mode 100644 assets/js/196c63a7.f818c35d.js create mode 100644 assets/js/1a4e3797.a52196f1.js create mode 100644 assets/js/1a4e3797.a52196f1.js.LICENSE.txt create mode 100644 assets/js/1be78505.228b1716.js create mode 100644 assets/js/1d4d4e46.53235266.js create mode 100644 assets/js/1efdbea1.365af71b.js create mode 100644 assets/js/1f0a946f.fcc5a3d2.js create mode 100644 assets/js/1f1765ab.bbeac427.js create mode 100644 assets/js/205a719b.fba55762.js create mode 100644 assets/js/20f8c1fd.f55a6b8d.js create mode 100644 assets/js/222e7c49.9c8fb23c.js create mode 100644 assets/js/232ab88c.c37b9f14.js create mode 100644 assets/js/23c607c1.55560318.js create mode 100644 assets/js/243cddb9.83dc99e3.js create mode 100644 assets/js/2622e95a.0b16f714.js create mode 100644 assets/js/29105828.3c1f0326.js create mode 100644 assets/js/2ae68e65.2da2affc.js create mode 100644 assets/js/2afa602b.f892f0f7.js create mode 100644 assets/js/2b2faa0a.bb2930b4.js create mode 100644 assets/js/2bc15a09.b35b76c3.js create mode 100644 assets/js/2c797d78.264327d1.js create mode 100644 assets/js/2e96a196.47765f9b.js create mode 100644 assets/js/2fe15297.5d5bccf7.js create mode 100644 assets/js/3087bb2d.d34d4c8e.js create mode 100644 assets/js/35d7f647.eeaa1f4e.js create mode 100644 assets/js/381a15bc.fecfc237.js create mode 100644 assets/js/38a44003.5bed27ee.js create mode 100644 assets/js/40415b6c.d29ab3ac.js create mode 100644 assets/js/409b7aa0.924e3028.js create mode 100644 assets/js/414d4a37.2900f4a9.js create mode 100644 assets/js/456c5d82.1356cd36.js create mode 100644 assets/js/46d2add0.08babb68.js create mode 100644 assets/js/478692f7.e9e2abf4.js create mode 100644 assets/js/47ac2e75.3ab08e6a.js create mode 100644 assets/js/48199270.467a59ac.js create mode 100644 assets/js/4972.0680bd7d.js create mode 100644 assets/js/4a00fd3a.497faf2c.js create mode 100644 assets/js/4a2f1dfa.5bc1686b.js create mode 100644 assets/js/4a9e4762.9c7fce8d.js create mode 100644 assets/js/4ace981f.70fbc63c.js create mode 100644 assets/js/4c4d6ef6.8a27cfcd.js create mode 100644 assets/js/4d11873e.e7adb799.js create mode 100644 assets/js/4d517c40.eadb0bc3.js create mode 100644 assets/js/4e5074e6.b0d53ffd.js create mode 100644 assets/js/4f8e8160.b9904ca2.js create mode 100644 assets/js/514a13f6.826a6d2f.js create mode 100644 assets/js/516ebbd1.b21303ee.js create mode 100644 assets/js/5300e879.8a8dc6e5.js create mode 100644 assets/js/5347168a.4d2a41c3.js create mode 100644 assets/js/5527e5b7.a5884eda.js create mode 100644 assets/js/5534c352.d3e76e0e.js create mode 100644 assets/js/5584c47d.98379ebd.js create mode 100644 assets/js/58b4829f.f9857ac8.js create mode 100644 assets/js/58f10d9f.49ed8c8f.js create mode 100644 assets/js/5a11a8c6.882f537d.js create mode 100644 assets/js/5cf0f698.9f573412.js create mode 100644 assets/js/61386b8d.6fd0979f.js create mode 100644 assets/js/62ff7ec9.ec600e31.js create mode 100644 assets/js/647303d6.98160b7c.js create mode 100644 assets/js/65ab9689.c83f172b.js create mode 100644 assets/js/6780.b8374b3c.js create mode 100644 assets/js/68c835af.1e1795fe.js create mode 100644 assets/js/68d54528.f4b4a695.js create mode 100644 assets/js/6945.a9a2d87d.js create mode 100644 assets/js/69a9729f.c03205b8.js create mode 100644 assets/js/6af17b1d.d647c97a.js create mode 100644 assets/js/6b76d411.86c99a48.js create mode 100644 assets/js/6c174e6d.40d06062.js create mode 100644 assets/js/6c450cd6.73253a7e.js create mode 100644 assets/js/6cafb666.6d7d0081.js create mode 100644 assets/js/6d9c0b04.bac51284.js create mode 100644 assets/js/6dbdf8e8.90843a66.js create mode 100644 assets/js/6e7b1bc6.ac852278.js create mode 100644 assets/js/7107eb83.9b621a01.js create mode 100644 assets/js/7245ce96.2ad33dfd.js create mode 100644 assets/js/74e1ba0d.3a0419d8.js create mode 100644 assets/js/75af10bd.f657f121.js create mode 100644 assets/js/7ae5d564.6ea40e89.js create mode 100644 assets/js/7b4381d3.483f4f09.js create mode 100644 assets/js/7b589963.32aefe0f.js create mode 100644 assets/js/80f42d74.392d78fb.js create mode 100644 assets/js/81b6783d.a996001b.js create mode 100644 assets/js/81bf77fc.e86cf07e.js create mode 100644 assets/js/83ec613f.6af4e02b.js create mode 100644 assets/js/847c12c2.0dcae720.js create mode 100644 assets/js/87b29f85.960f3d83.js create mode 100644 assets/js/87f59f37.6b9e11c6.js create mode 100644 assets/js/8804eadc.73cda2d1.js create mode 100644 assets/js/8894.674c4c01.js create mode 100644 assets/js/898ba646.9c58f3c1.js create mode 100644 assets/js/8ad68633.50914cab.js create mode 100644 assets/js/8c27608b.c4322928.js create mode 100644 assets/js/8d193b98.faf1a7fe.js create mode 100644 assets/js/8ff5d7ba.c28181ba.js create mode 100644 assets/js/935f2afb.c2b93cd4.js create mode 100644 assets/js/9440fd12.93ff0ee3.js create mode 100644 assets/js/94d2eef0.b68cf51d.js create mode 100644 assets/js/97a352ae.33fe6fad.js create mode 100644 assets/js/980c25d7.6573b859.js create mode 100644 assets/js/980c25d7.6573b859.js.LICENSE.txt create mode 100644 assets/js/982d0b04.f756f754.js create mode 100644 assets/js/9980ea0e.b9be0f2e.js create mode 100644 assets/js/99912bf6.1a9c1ee9.js create mode 100644 assets/js/99bfca7e.4ea0922d.js create mode 100644 assets/js/99d969f2.d89c6b4c.js create mode 100644 assets/js/9af63d42.bb0edfcf.js create mode 100644 assets/js/9defa5b7.dbb12a65.js create mode 100644 assets/js/9fc8d1d9.4bcec99f.js create mode 100644 assets/js/9fda8563.2e6d9fba.js create mode 100644 assets/js/a03cde8f.cdce6d67.js create mode 100644 assets/js/a07fb1cb.ef0f7575.js create mode 100644 assets/js/a17dbf83.57177a4c.js create mode 100644 assets/js/a34ed3b2.a8b3a7f3.js create mode 100644 assets/js/a4055066.ba2a7e48.js create mode 100644 assets/js/a4cbee7f.d067a9ae.js create mode 100644 assets/js/a5b090b0.a4364faf.js create mode 100644 assets/js/a624bde7.bd5c926b.js create mode 100644 assets/js/a686ca68.cc915e73.js create mode 100644 assets/js/a6c229c0.135ade25.js create mode 100644 assets/js/a7914a5c.64fb6d59.js create mode 100644 assets/js/a80d168f.2854157b.js create mode 100644 assets/js/a9ab9f8f.bdfd8b0f.js create mode 100644 assets/js/aa946361.9a9d22e0.js create mode 100644 assets/js/aacd1d40.c4e40d4c.js create mode 100644 assets/js/ac02e102.06e01491.js create mode 100644 assets/js/ae1efb81.ef2d54c6.js create mode 100644 assets/js/b1b6a961.74208c3e.js create mode 100644 assets/js/b24805c2.5ca38736.js create mode 100644 assets/js/b638c32b.ab27dab2.js create mode 100644 assets/js/b70bee8d.45dd4d58.js create mode 100644 assets/js/b7f60777.7085564d.js create mode 100644 assets/js/b91921d6.198b04b6.js create mode 100644 assets/js/b9d0db8e.9fbaa8a9.js create mode 100644 assets/js/ba3b9f5c.009751df.js create mode 100644 assets/js/ba9d536d.e1fae3ca.js create mode 100644 assets/js/be529d37.6621d11d.js create mode 100644 assets/js/beaba6c2.94675e66.js create mode 100644 assets/js/bfac6a8d.33d1069c.js create mode 100644 assets/js/c0e3ff8b.4348bcbf.js create mode 100644 assets/js/c16f65ec.04e07d23.js create mode 100644 assets/js/c192c597.2da137e0.js create mode 100644 assets/js/c248ee7e.56274b20.js create mode 100644 assets/js/c377a04b.e32687a6.js create mode 100644 assets/js/c3d488fa.7534f536.js create mode 100644 assets/js/c4a14462.17d186df.js create mode 100644 assets/js/c4f5d8e4.b78e8a3e.js create mode 100644 assets/js/c602cd44.c0889991.js create mode 100644 assets/js/c95b781b.54e317b5.js create mode 100644 assets/js/c9eeccbf.fa9d425c.js create mode 100644 assets/js/ca2bf8a3.b136f6a8.js create mode 100644 assets/js/ca36df4d.bdcd738d.js create mode 100644 assets/js/cac45e38.ffd6c350.js create mode 100644 assets/js/cd19d898.67faad21.js create mode 100644 assets/js/cd59f9ef.54f4605b.js create mode 100644 assets/js/d0381ee6.20083f4a.js create mode 100644 assets/js/d2282d9e.ad5da899.js create mode 100644 assets/js/d241d4ef.a87ac3a3.js create mode 100644 assets/js/d2af0b95.61dc7ae0.js create mode 100644 assets/js/d2b827bd.868ae315.js create mode 100644 assets/js/d35204c3.f3736ac4.js create mode 100644 assets/js/d40fb48f.0dd8a669.js create mode 100644 assets/js/d67a4111.aa568655.js create mode 100644 assets/js/d73efefc.77589437.js create mode 100644 assets/js/d7dfec52.b5cc8811.js create mode 100644 assets/js/d87f7f29.8b85c627.js create mode 100644 assets/js/d9bd3427.e1f1f230.js create mode 100644 assets/js/d9ce81b2.de1fc440.js create mode 100644 assets/js/dbc0f590.302c834a.js create mode 100644 assets/js/dc75700c.84688a9d.js create mode 100644 assets/js/dde1ff6e.936ed816.js create mode 100644 assets/js/de2621c2.5348d14a.js create mode 100644 assets/js/e109b3ff.16e87717.js create mode 100644 assets/js/e1584d63.e755620a.js create mode 100644 assets/js/e323208f.514adfe6.js create mode 100644 assets/js/e333f535.1e236bcc.js create mode 100644 assets/js/e4d0ad4d.f67d40a9.js create mode 100644 assets/js/e56c502c.7f764e63.js create mode 100644 assets/js/e6eb5527.890dde78.js create mode 100644 assets/js/e7ab2684.124a91a6.js create mode 100644 assets/js/e8ae88bc.3ee8c934.js create mode 100644 assets/js/e968e69e.7145b4d3.js create mode 100644 assets/js/e97b3564.ead957c7.js create mode 100644 assets/js/ebc40d40.75714ad4.js create mode 100644 assets/js/ee2e0a62.ea183c41.js create mode 100644 assets/js/f2954f34.e942d91a.js create mode 100644 assets/js/f2aaa4e5.df8937bd.js create mode 100644 assets/js/f35e2aba.396a2a4d.js create mode 100644 assets/js/f39642a1.dcbef318.js create mode 100644 assets/js/f7e229b3.1f12b6b5.js create mode 100644 assets/js/f8edae29.e9acd7b2.js create mode 100644 assets/js/fb969bb3.8d0af030.js create mode 100644 assets/js/fc8a86b2.1a0d2a4d.js create mode 100644 assets/js/fd2e624b.b7df8a31.js create mode 100644 assets/js/fdc5233c.73a1ac59.js create mode 100644 assets/js/fe73cc84.50771324.js create mode 100644 assets/js/fff0a46d.a0ecfadf.js create mode 100644 assets/js/main.e19c364c.js create mode 100644 assets/js/main.e19c364c.js.LICENSE.txt create mode 100644 assets/js/runtime~main.bd2548d2.js create mode 100644 demo/index.html create mode 100644 docs/0.5.0/CHANGELOG/index.html create mode 100644 docs/0.5.0/api/fastkafka/KafkaEvent/index.html create mode 100644 docs/0.5.0/api/fastkafka/encoder/avsc_to_pydantic/index.html create mode 100644 docs/0.5.0/api/fastkafka/index.html create mode 100644 docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker/index.html create mode 100644 docs/0.5.0/api/fastkafka/testing/LocalRedpandaBroker/index.html create mode 100644 docs/0.5.0/api/fastkafka/testing/Tester/index.html create mode 100644 docs/0.5.0/cli/fastkafka/index.html create mode 100644 docs/0.5.0/cli/run_fastkafka_server_process/index.html create mode 100644 docs/0.5.0/guides/Guide_00_FastKafka_Demo/index.html create mode 100644 docs/0.5.0/guides/Guide_01_Intro/index.html create mode 100644 docs/0.5.0/guides/Guide_02_First_Steps/index.html create mode 100644 docs/0.5.0/guides/Guide_03_Authentication/index.html create mode 100644 docs/0.5.0/guides/Guide_04_Github_Actions_Workflow/index.html create mode 100644 docs/0.5.0/guides/Guide_05_Lifespan_Handler/index.html create mode 100644 docs/0.5.0/guides/Guide_06_Benchmarking_FastKafka/index.html create mode 100644 docs/0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html create mode 100644 docs/0.5.0/guides/Guide_11_Consumes_Basics/index.html create mode 100644 docs/0.5.0/guides/Guide_21_Produces_Basics/index.html create mode 100644 docs/0.5.0/guides/Guide_22_Partition_Keys/index.html create mode 100644 docs/0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html create mode 100644 docs/0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html create mode 100644 docs/0.5.0/index.html create mode 100644 docs/0.6.0/CHANGELOG/index.html create mode 100644 docs/0.6.0/CONTRIBUTING/index.html create mode 100644 docs/0.6.0/LICENSE/index.html create mode 100644 docs/0.6.0/api/fastkafka/EventMetadata/index.html create mode 100644 docs/0.6.0/api/fastkafka/KafkaEvent/index.html create mode 100644 docs/0.6.0/api/fastkafka/encoder/AvroBase/index.html create mode 100644 docs/0.6.0/api/fastkafka/encoder/avro_decoder/index.html create mode 100644 docs/0.6.0/api/fastkafka/encoder/avro_encoder/index.html create mode 100644 docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic/index.html create mode 100644 docs/0.6.0/api/fastkafka/encoder/json_decoder/index.html create mode 100644 docs/0.6.0/api/fastkafka/encoder/json_encoder/index.html create mode 100644 docs/0.6.0/api/fastkafka/executors/DynamicTaskExecutor/index.html create mode 100644 docs/0.6.0/api/fastkafka/executors/SequentialExecutor/index.html create mode 100644 docs/0.6.0/api/fastkafka/index.html create mode 100644 docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker/index.html create mode 100644 docs/0.6.0/api/fastkafka/testing/LocalRedpandaBroker/index.html create mode 100644 docs/0.6.0/api/fastkafka/testing/Tester/index.html create mode 100644 docs/0.6.0/cli/fastkafka/index.html create mode 100644 docs/0.6.0/cli/run_fastkafka_server_process/index.html create mode 100644 docs/0.6.0/guides/Guide_00_FastKafka_Demo/index.html create mode 100644 docs/0.6.0/guides/Guide_01_Intro/index.html create mode 100644 docs/0.6.0/guides/Guide_02_First_Steps/index.html create mode 100644 docs/0.6.0/guides/Guide_03_Authentication/index.html create mode 100644 docs/0.6.0/guides/Guide_04_Github_Actions_Workflow/index.html create mode 100644 docs/0.6.0/guides/Guide_05_Lifespan_Handler/index.html create mode 100644 docs/0.6.0/guides/Guide_06_Benchmarking_FastKafka/index.html create mode 100644 docs/0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html create mode 100644 docs/0.6.0/guides/Guide_11_Consumes_Basics/index.html create mode 100644 docs/0.6.0/guides/Guide_21_Produces_Basics/index.html create mode 100644 docs/0.6.0/guides/Guide_22_Partition_Keys/index.html create mode 100644 docs/0.6.0/guides/Guide_23_Batch_Producing/index.html create mode 100644 docs/0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html create mode 100644 docs/0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html create mode 100644 docs/0.6.0/index.html create mode 100644 docs/0.7.0/CHANGELOG/index.html create mode 100644 docs/0.7.0/CONTRIBUTING/index.html create mode 100644 docs/0.7.0/LICENSE/index.html create mode 100644 docs/0.7.0/api/fastkafka/EventMetadata/index.html create mode 100644 docs/0.7.0/api/fastkafka/KafkaEvent/index.html create mode 100644 docs/0.7.0/api/fastkafka/encoder/AvroBase/index.html create mode 100644 docs/0.7.0/api/fastkafka/encoder/avro_decoder/index.html create mode 100644 docs/0.7.0/api/fastkafka/encoder/avro_encoder/index.html create mode 100644 docs/0.7.0/api/fastkafka/encoder/avsc_to_pydantic/index.html create mode 100644 docs/0.7.0/api/fastkafka/encoder/json_decoder/index.html create mode 100644 docs/0.7.0/api/fastkafka/encoder/json_encoder/index.html create mode 100644 docs/0.7.0/api/fastkafka/executors/DynamicTaskExecutor/index.html create mode 100644 docs/0.7.0/api/fastkafka/executors/SequentialExecutor/index.html create mode 100644 docs/0.7.0/api/fastkafka/index.html create mode 100644 docs/0.7.0/api/fastkafka/testing/ApacheKafkaBroker/index.html create mode 100644 docs/0.7.0/api/fastkafka/testing/LocalRedpandaBroker/index.html create mode 100644 docs/0.7.0/api/fastkafka/testing/Tester/index.html create mode 100644 docs/0.7.0/cli/fastkafka/index.html create mode 100644 docs/0.7.0/cli/run_fastkafka_server_process/index.html create mode 100644 docs/0.7.0/guides/Guide_00_FastKafka_Demo/index.html create mode 100644 docs/0.7.0/guides/Guide_01_Intro/index.html create mode 100644 docs/0.7.0/guides/Guide_02_First_Steps/index.html create mode 100644 docs/0.7.0/guides/Guide_03_Authentication/index.html create mode 100644 docs/0.7.0/guides/Guide_04_Github_Actions_Workflow/index.html create mode 100644 docs/0.7.0/guides/Guide_05_Lifespan_Handler/index.html create mode 100644 docs/0.7.0/guides/Guide_06_Benchmarking_FastKafka/index.html create mode 100644 docs/0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html create mode 100644 docs/0.7.0/guides/Guide_11_Consumes_Basics/index.html create mode 100644 docs/0.7.0/guides/Guide_12_Batch_Consuming/index.html create mode 100644 docs/0.7.0/guides/Guide_21_Produces_Basics/index.html create mode 100644 docs/0.7.0/guides/Guide_22_Partition_Keys/index.html create mode 100644 docs/0.7.0/guides/Guide_23_Batch_Producing/index.html create mode 100644 docs/0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html create mode 100644 docs/0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html create mode 100644 docs/0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html create mode 100644 docs/0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html create mode 100644 docs/0.7.0/index.html create mode 100644 docs/0.7.1/CHANGELOG/index.html create mode 100644 docs/0.7.1/CONTRIBUTING/index.html create mode 100644 docs/0.7.1/LICENSE/index.html create mode 100644 docs/0.7.1/api/fastkafka/EventMetadata/index.html create mode 100644 docs/0.7.1/api/fastkafka/KafkaEvent/index.html create mode 100644 docs/0.7.1/api/fastkafka/encoder/AvroBase/index.html create mode 100644 docs/0.7.1/api/fastkafka/encoder/avro_decoder/index.html create mode 100644 docs/0.7.1/api/fastkafka/encoder/avro_encoder/index.html create mode 100644 docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic/index.html create mode 100644 docs/0.7.1/api/fastkafka/encoder/json_decoder/index.html create mode 100644 docs/0.7.1/api/fastkafka/encoder/json_encoder/index.html create mode 100644 docs/0.7.1/api/fastkafka/executors/DynamicTaskExecutor/index.html create mode 100644 docs/0.7.1/api/fastkafka/executors/SequentialExecutor/index.html create mode 100644 docs/0.7.1/api/fastkafka/index.html create mode 100644 docs/0.7.1/api/fastkafka/testing/ApacheKafkaBroker/index.html create mode 100644 docs/0.7.1/api/fastkafka/testing/LocalRedpandaBroker/index.html create mode 100644 docs/0.7.1/api/fastkafka/testing/Tester/index.html create mode 100644 docs/0.7.1/cli/fastkafka/index.html create mode 100644 docs/0.7.1/cli/run_fastkafka_server_process/index.html create mode 100644 docs/0.7.1/guides/Guide_00_FastKafka_Demo/index.html create mode 100644 docs/0.7.1/guides/Guide_01_Intro/index.html create mode 100644 docs/0.7.1/guides/Guide_02_First_Steps/index.html create mode 100644 docs/0.7.1/guides/Guide_03_Authentication/index.html create mode 100644 docs/0.7.1/guides/Guide_04_Github_Actions_Workflow/index.html create mode 100644 docs/0.7.1/guides/Guide_05_Lifespan_Handler/index.html create mode 100644 docs/0.7.1/guides/Guide_06_Benchmarking_FastKafka/index.html create mode 100644 docs/0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html create mode 100644 docs/0.7.1/guides/Guide_11_Consumes_Basics/index.html create mode 100644 docs/0.7.1/guides/Guide_12_Batch_Consuming/index.html create mode 100644 docs/0.7.1/guides/Guide_21_Produces_Basics/index.html create mode 100644 docs/0.7.1/guides/Guide_22_Partition_Keys/index.html create mode 100644 docs/0.7.1/guides/Guide_23_Batch_Producing/index.html create mode 100644 docs/0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html create mode 100644 docs/0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html create mode 100644 docs/0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html create mode 100644 docs/0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html create mode 100644 docs/0.7.1/index.html create mode 100644 docs/CHANGELOG/index.html create mode 100644 docs/CONTRIBUTING/index.html create mode 100644 docs/LICENSE/index.html create mode 100644 docs/api/fastkafka/EventMetadata/index.html create mode 100644 docs/api/fastkafka/KafkaEvent/index.html create mode 100644 docs/api/fastkafka/encoder/AvroBase/index.html create mode 100644 docs/api/fastkafka/encoder/avro_decoder/index.html create mode 100644 docs/api/fastkafka/encoder/avro_encoder/index.html create mode 100644 docs/api/fastkafka/encoder/avsc_to_pydantic/index.html create mode 100644 docs/api/fastkafka/encoder/json_decoder/index.html create mode 100644 docs/api/fastkafka/encoder/json_encoder/index.html create mode 100644 docs/api/fastkafka/executors/DynamicTaskExecutor/index.html create mode 100644 docs/api/fastkafka/executors/SequentialExecutor/index.html create mode 100644 docs/api/fastkafka/index.html create mode 100644 docs/api/fastkafka/testing/ApacheKafkaBroker/index.html create mode 100644 docs/api/fastkafka/testing/LocalRedpandaBroker/index.html create mode 100644 docs/api/fastkafka/testing/Tester/index.html create mode 100644 docs/cli/fastkafka/index.html create mode 100644 docs/cli/run_fastkafka_server_process/index.html create mode 100644 docs/guides/Guide_00_FastKafka_Demo/index.html create mode 100644 docs/guides/Guide_01_Intro/index.html create mode 100644 docs/guides/Guide_02_First_Steps/index.html create mode 100644 docs/guides/Guide_03_Authentication/index.html create mode 100644 docs/guides/Guide_04_Github_Actions_Workflow/index.html create mode 100644 docs/guides/Guide_05_Lifespan_Handler/index.html create mode 100644 docs/guides/Guide_06_Benchmarking_FastKafka/index.html create mode 100644 docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html create mode 100644 docs/guides/Guide_11_Consumes_Basics/index.html create mode 100644 docs/guides/Guide_12_Batch_Consuming/index.html create mode 100644 docs/guides/Guide_21_Produces_Basics/index.html create mode 100644 docs/guides/Guide_22_Partition_Keys/index.html create mode 100644 docs/guides/Guide_23_Batch_Producing/index.html create mode 100644 docs/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html create mode 100644 docs/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html create mode 100644 docs/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html create mode 100644 docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html create mode 100644 docs/index.html create mode 100644 docs/next/CHANGELOG/index.html create mode 100644 docs/next/CONTRIBUTING/index.html create mode 100644 docs/next/LICENSE/index.html create mode 100644 docs/next/api/fastkafka/EventMetadata/index.html create mode 100644 docs/next/api/fastkafka/KafkaEvent/index.html create mode 100644 docs/next/api/fastkafka/encoder/AvroBase/index.html create mode 100644 docs/next/api/fastkafka/encoder/avro_decoder/index.html create mode 100644 docs/next/api/fastkafka/encoder/avro_encoder/index.html create mode 100644 docs/next/api/fastkafka/encoder/avsc_to_pydantic/index.html create mode 100644 docs/next/api/fastkafka/encoder/json_decoder/index.html create mode 100644 docs/next/api/fastkafka/encoder/json_encoder/index.html create mode 100644 docs/next/api/fastkafka/executors/DynamicTaskExecutor/index.html create mode 100644 docs/next/api/fastkafka/executors/SequentialExecutor/index.html create mode 100644 docs/next/api/fastkafka/index.html create mode 100644 docs/next/api/fastkafka/testing/ApacheKafkaBroker/index.html create mode 100644 docs/next/api/fastkafka/testing/LocalRedpandaBroker/index.html create mode 100644 docs/next/api/fastkafka/testing/Tester/index.html create mode 100644 docs/next/cli/fastkafka/index.html create mode 100644 docs/next/cli/run_fastkafka_server_process/index.html create mode 100644 docs/next/guides/Guide_00_FastKafka_Demo/index.html create mode 100644 docs/next/guides/Guide_01_Intro/index.html create mode 100644 docs/next/guides/Guide_02_First_Steps/index.html create mode 100644 docs/next/guides/Guide_03_Authentication/index.html create mode 100644 docs/next/guides/Guide_04_Github_Actions_Workflow/index.html create mode 100644 docs/next/guides/Guide_05_Lifespan_Handler/index.html create mode 100644 docs/next/guides/Guide_06_Benchmarking_FastKafka/index.html create mode 100644 docs/next/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html create mode 100644 docs/next/guides/Guide_11_Consumes_Basics/index.html create mode 100644 docs/next/guides/Guide_12_Batch_Consuming/index.html create mode 100644 docs/next/guides/Guide_21_Produces_Basics/index.html create mode 100644 docs/next/guides/Guide_22_Partition_Keys/index.html create mode 100644 docs/next/guides/Guide_23_Batch_Producing/index.html create mode 100644 docs/next/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html create mode 100644 docs/next/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html create mode 100644 docs/next/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html create mode 100644 docs/next/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html create mode 100644 docs/next/guides/Guide_33_Using_Tester_class_to_test_fastkafka/index.html create mode 100644 docs/next/index.html create mode 100644 font/Panton-SemiBold.woff create mode 100644 font/Roboto-Light.ttf create mode 100644 font/Roboto-Regular.ttf create mode 100644 font/RobotoMono-Regular.ttf create mode 100644 font/Rubik-Medium.ttf create mode 100644 img/AIRT_icon_blue.svg create mode 100644 img/I.svg create mode 100644 img/Y_Combinator_Logo.png create mode 100644 img/a-alphabet-round-icon.png create mode 100644 img/compass-outline.png create mode 100644 img/home-icon.svg create mode 100644 img/icon-arrow-right-blue.svg create mode 100644 img/icon-discord.svg create mode 100644 img/icon-facebook.svg create mode 100644 img/icon-github.svg create mode 100644 img/icon-linkedin.svg create mode 100644 img/icon-twitter.svg create mode 100644 img/p-alphabet-round-icon.png create mode 100644 img/prototype.svg create mode 100644 img/reddit-logo.png create mode 100644 img/robot-footer.svg create mode 100644 img/robot-hero.svg create mode 100644 img/streamline.svg create mode 100644 img/twitter-logo.svg create mode 100644 img/undraw_docusaurus_mountain.svg create mode 100644 img/undraw_docusaurus_react.svg create mode 100644 img/undraw_docusaurus_tree.svg create mode 100644 img/write.svg create mode 100644 index.html create mode 100644 opensearch.xml create mode 100644 search/index.html create mode 100644 sitemap.xml diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/404.html b/404.html new file mode 100644 index 0000000..031fe6a --- /dev/null +++ b/404.html @@ -0,0 +1,32 @@ + + + + + +Page Not Found | FastKafka + + + + + + + + + + +
+
Skip to main content

Page Not Found

We could not find what you were looking for.

Please contact the owner of the site that linked you to the original URL and let them know their link is broken.

+ + + + \ No newline at end of file diff --git a/CNAME b/CNAME new file mode 100644 index 0000000..91a0c5a --- /dev/null +++ b/CNAME @@ -0,0 +1 @@ +fastkafka.airt.ai diff --git a/assets/css/styles.afdeec1f.css b/assets/css/styles.afdeec1f.css new file mode 100644 index 0000000..985239a --- /dev/null +++ b/assets/css/styles.afdeec1f.css @@ -0,0 +1 @@ +.col,.container{padding:0 var(--ifm-spacing-horizontal);width:100%}.markdown>h2,.markdown>h3,.markdown>h4,.markdown>h5,.markdown>h6{margin-bottom:calc(var(--ifm-heading-vertical-rhythm-bottom)*var(--ifm-leading))}.markdown li,body{word-wrap:break-word}body,ol ol,ol ul,ul ol,ul ul{margin:0}pre,table{overflow:auto}blockquote,pre{margin:0 0 var(--ifm-spacing-vertical)}.breadcrumbs__link,.button{transition-timing-function:var(--ifm-transition-timing-default)}.button,code{vertical-align:middle}.button--outline.button--active,.button--outline:active,.button--outline:hover,:root{--ifm-button-color:var(--ifm-font-color-base-inverse)}.menu__link:hover,a{transition:color var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.navbar--dark,:root{--ifm-navbar-link-hover-color:var(--ifm-color-primary)}.menu,.navbar-sidebar{overflow-x:hidden}:root,html[data-theme=dark]{--ifm-color-emphasis-500:var(--ifm-color-gray-500)}.toggleButton_gllP,html{-webkit-tap-highlight-color:transparent}*,.DocSearch-Container,.DocSearch-Container *{box-sizing:border-box}:root{--ifm-color-scheme:light;--ifm-dark-value:10%;--ifm-darker-value:15%;--ifm-darkest-value:30%;--ifm-light-value:15%;--ifm-lighter-value:30%;--ifm-lightest-value:50%;--ifm-contrast-background-value:90%;--ifm-contrast-foreground-value:70%;--ifm-contrast-background-dark-value:70%;--ifm-contrast-foreground-dark-value:90%;--ifm-color-primary:#3578e5;--ifm-color-secondary:#ebedf0;--ifm-color-success:#00a400;--ifm-color-info:#54c7ec;--ifm-color-warning:#ffba00;--ifm-color-danger:#fa383e;--ifm-color-primary-dark:#306cce;--ifm-color-primary-darker:#2d66c3;--ifm-color-primary-darkest:#2554a0;--ifm-color-primary-light:#538ce9;--ifm-color-primary-lighter:#72a1ed;--ifm-color-primary-lightest:#9abcf2;--ifm-color-primary-contrast-background:#ebf2fc;--ifm-color-primary-contrast-foreground:#102445;--ifm-color-secondary-dark:#d4d5d8;--ifm-color-secondary-darker:#c8c9cc;--ifm-color-secondary-darkest:#a4a6a8;--ifm-color-secondary-light:#eef0f2;--ifm-color-secondary-lighter:#f1f2f5;--ifm-color-secondary-lightest:#f5f6f8;--ifm-color-secondary-contrast-background:#fdfdfe;--ifm-color-secondary-contrast-foreground:#474748;--ifm-color-success-dark:#009400;--ifm-color-success-darker:#008b00;--ifm-color-success-darkest:#007300;--ifm-color-success-light:#26b226;--ifm-color-success-lighter:#4dbf4d;--ifm-color-success-lightest:#80d280;--ifm-color-success-contrast-background:#e6f6e6;--ifm-color-success-contrast-foreground:#003100;--ifm-color-info-dark:#4cb3d4;--ifm-color-info-darker:#47a9c9;--ifm-color-info-darkest:#3b8ba5;--ifm-color-info-light:#6ecfef;--ifm-color-info-lighter:#87d8f2;--ifm-color-info-lightest:#aae3f6;--ifm-color-info-contrast-background:#eef9fd;--ifm-color-info-contrast-foreground:#193c47;--ifm-color-warning-dark:#e6a700;--ifm-color-warning-darker:#d99e00;--ifm-color-warning-darkest:#b38200;--ifm-color-warning-light:#ffc426;--ifm-color-warning-lighter:#ffcf4d;--ifm-color-warning-lightest:#ffdd80;--ifm-color-warning-contrast-background:#fff8e6;--ifm-color-warning-contrast-foreground:#4d3800;--ifm-color-danger-dark:#e13238;--ifm-color-danger-darker:#d53035;--ifm-color-danger-darkest:#af272b;--ifm-color-danger-light:#fb565b;--ifm-color-danger-lighter:#fb7478;--ifm-color-danger-lightest:#fd9c9f;--ifm-color-danger-contrast-background:#ffebec;--ifm-color-danger-contrast-foreground:#4b1113;--ifm-color-white:#fff;--ifm-color-black:#000;--ifm-color-gray-0:var(--ifm-color-white);--ifm-color-gray-100:#f5f6f7;--ifm-color-gray-200:#ebedf0;--ifm-color-gray-300:#dadde1;--ifm-color-gray-400:#ccd0d5;--ifm-color-gray-500:#bec3c9;--ifm-color-gray-600:#8d949e;--ifm-color-gray-700:#606770;--ifm-color-gray-800:#444950;--ifm-color-gray-900:#1c1e21;--ifm-color-gray-1000:var(--ifm-color-black);--ifm-color-emphasis-0:var(--ifm-color-gray-0);--ifm-color-emphasis-100:var(--ifm-color-gray-100);--ifm-color-emphasis-200:var(--ifm-color-gray-200);--ifm-color-emphasis-300:var(--ifm-color-gray-300);--ifm-color-emphasis-400:var(--ifm-color-gray-400);--ifm-color-emphasis-600:var(--ifm-color-gray-600);--ifm-color-emphasis-700:var(--ifm-color-gray-700);--ifm-color-emphasis-800:var(--ifm-color-gray-800);--ifm-color-emphasis-900:var(--ifm-color-gray-900);--ifm-color-emphasis-1000:var(--ifm-color-gray-1000);--ifm-color-content:var(--ifm-color-emphasis-900);--ifm-color-content-inverse:var(--ifm-color-emphasis-0);--ifm-color-content-secondary:#525860;--ifm-background-color:#0000;--ifm-background-surface-color:var(--ifm-color-content-inverse);--ifm-global-border-width:1px;--ifm-global-radius:0.4rem;--ifm-hover-overlay:#0000000d;--ifm-font-color-base:var(--ifm-color-content);--ifm-font-color-base-inverse:var(--ifm-color-content-inverse);--ifm-font-color-secondary:var(--ifm-color-content-secondary);--ifm-font-family-base:system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol";--ifm-font-family-monospace:SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;--ifm-font-size-base:100%;--ifm-font-weight-light:300;--ifm-font-weight-normal:400;--ifm-font-weight-semibold:500;--ifm-font-weight-bold:700;--ifm-font-weight-base:var(--ifm-font-weight-normal);--ifm-line-height-base:1.65;--ifm-global-spacing:1rem;--ifm-spacing-vertical:var(--ifm-global-spacing);--ifm-spacing-horizontal:var(--ifm-global-spacing);--ifm-transition-fast:200ms;--ifm-transition-slow:400ms;--ifm-transition-timing-default:cubic-bezier(0.08,0.52,0.52,1);--ifm-global-shadow-lw:0 1px 2px 0 #0000001a;--ifm-global-shadow-md:0 5px 40px #0003;--ifm-global-shadow-tl:0 12px 28px 0 #0003,0 2px 4px 0 #0000001a;--ifm-z-index-dropdown:100;--ifm-z-index-fixed:200;--ifm-z-index-overlay:400;--ifm-container-width:1140px;--ifm-container-width-xl:1320px;--ifm-code-background:#f6f7f8;--ifm-code-border-radius:var(--ifm-global-radius);--ifm-code-font-size:90%;--ifm-code-padding-horizontal:0.1rem;--ifm-code-padding-vertical:0.1rem;--ifm-pre-background:var(--ifm-code-background);--ifm-pre-border-radius:var(--ifm-code-border-radius);--ifm-pre-color:inherit;--ifm-pre-line-height:1.45;--ifm-pre-padding:1rem;--ifm-heading-color:inherit;--ifm-heading-margin-top:0;--ifm-heading-margin-bottom:var(--ifm-spacing-vertical);--ifm-heading-font-family:var(--ifm-font-family-base);--ifm-heading-font-weight:var(--ifm-font-weight-bold);--ifm-heading-line-height:1.25;--ifm-h1-font-size:2rem;--ifm-h2-font-size:1.5rem;--ifm-h3-font-size:1.25rem;--ifm-h4-font-size:1rem;--ifm-h5-font-size:0.875rem;--ifm-h6-font-size:0.85rem;--ifm-image-alignment-padding:1.25rem;--ifm-leading-desktop:1.25;--ifm-leading:calc(var(--ifm-leading-desktop)*1rem);--ifm-list-left-padding:2rem;--ifm-list-margin:1rem;--ifm-list-item-margin:0.25rem;--ifm-list-paragraph-margin:1rem;--ifm-table-cell-padding:0.75rem;--ifm-table-background:#0000;--ifm-table-stripe-background:#00000008;--ifm-table-border-width:1px;--ifm-table-border-color:var(--ifm-color-emphasis-300);--ifm-table-head-background:inherit;--ifm-table-head-color:inherit;--ifm-table-head-font-weight:var(--ifm-font-weight-bold);--ifm-table-cell-color:inherit;--ifm-link-color:var(--ifm-color-primary);--ifm-link-decoration:none;--ifm-link-hover-color:var(--ifm-link-color);--ifm-link-hover-decoration:underline;--ifm-paragraph-margin-bottom:var(--ifm-leading);--ifm-blockquote-font-size:var(--ifm-font-size-base);--ifm-blockquote-border-left-width:2px;--ifm-blockquote-padding-horizontal:var(--ifm-spacing-horizontal);--ifm-blockquote-padding-vertical:0;--ifm-blockquote-shadow:none;--ifm-blockquote-color:var(--ifm-color-emphasis-800);--ifm-blockquote-border-color:var(--ifm-color-emphasis-300);--ifm-hr-background-color:var(--ifm-color-emphasis-500);--ifm-hr-height:1px;--ifm-hr-margin-vertical:1.5rem;--ifm-scrollbar-size:7px;--ifm-scrollbar-track-background-color:#f1f1f1;--ifm-scrollbar-thumb-background-color:silver;--ifm-scrollbar-thumb-hover-background-color:#a7a7a7;--ifm-alert-background-color:inherit;--ifm-alert-border-color:inherit;--ifm-alert-border-radius:var(--ifm-global-radius);--ifm-alert-border-width:0px;--ifm-alert-border-left-width:5px;--ifm-alert-color:var(--ifm-font-color-base);--ifm-alert-padding-horizontal:var(--ifm-spacing-horizontal);--ifm-alert-padding-vertical:var(--ifm-spacing-vertical);--ifm-alert-shadow:var(--ifm-global-shadow-lw);--ifm-avatar-intro-margin:1rem;--ifm-avatar-intro-alignment:inherit;--ifm-avatar-photo-size:3rem;--ifm-badge-background-color:inherit;--ifm-badge-border-color:inherit;--ifm-badge-border-radius:var(--ifm-global-radius);--ifm-badge-border-width:var(--ifm-global-border-width);--ifm-badge-color:var(--ifm-color-white);--ifm-badge-padding-horizontal:calc(var(--ifm-spacing-horizontal)*0.5);--ifm-badge-padding-vertical:calc(var(--ifm-spacing-vertical)*0.25);--ifm-breadcrumb-border-radius:1.5rem;--ifm-breadcrumb-spacing:0.5rem;--ifm-breadcrumb-color-active:var(--ifm-color-primary);--ifm-breadcrumb-item-background-active:var(--ifm-hover-overlay);--ifm-breadcrumb-padding-horizontal:0.8rem;--ifm-breadcrumb-padding-vertical:0.4rem;--ifm-breadcrumb-size-multiplier:1;--ifm-breadcrumb-separator:url('data:image/svg+xml;utf8,');--ifm-breadcrumb-separator-filter:none;--ifm-breadcrumb-separator-size:0.5rem;--ifm-breadcrumb-separator-size-multiplier:1.25;--ifm-button-background-color:inherit;--ifm-button-border-color:var(--ifm-button-background-color);--ifm-button-border-width:var(--ifm-global-border-width);--ifm-button-font-weight:var(--ifm-font-weight-bold);--ifm-button-padding-horizontal:1.5rem;--ifm-button-padding-vertical:0.375rem;--ifm-button-size-multiplier:1;--ifm-button-transition-duration:var(--ifm-transition-fast);--ifm-button-border-radius:calc(var(--ifm-global-radius)*var(--ifm-button-size-multiplier));--ifm-button-group-spacing:2px;--ifm-card-background-color:var(--ifm-background-surface-color);--ifm-card-border-radius:calc(var(--ifm-global-radius)*2);--ifm-card-horizontal-spacing:var(--ifm-global-spacing);--ifm-card-vertical-spacing:var(--ifm-global-spacing);--ifm-toc-border-color:var(--ifm-color-emphasis-300);--ifm-toc-link-color:var(--ifm-color-content-secondary);--ifm-toc-padding-vertical:0.5rem;--ifm-toc-padding-horizontal:0.5rem;--ifm-dropdown-background-color:var(--ifm-background-surface-color);--ifm-dropdown-font-weight:var(--ifm-font-weight-semibold);--ifm-dropdown-link-color:var(--ifm-font-color-base);--ifm-dropdown-hover-background-color:var(--ifm-hover-overlay);--ifm-footer-background-color:var(--ifm-color-emphasis-100);--ifm-footer-color:inherit;--ifm-footer-link-color:var(--ifm-color-emphasis-700);--ifm-footer-link-hover-color:var(--ifm-color-primary);--ifm-footer-link-horizontal-spacing:0.5rem;--ifm-footer-padding-horizontal:calc(var(--ifm-spacing-horizontal)*2);--ifm-footer-padding-vertical:calc(var(--ifm-spacing-vertical)*2);--ifm-footer-title-color:inherit;--ifm-footer-logo-max-width:min(30rem,90vw);--ifm-hero-background-color:var(--ifm-background-surface-color);--ifm-hero-text-color:var(--ifm-color-emphasis-800);--ifm-menu-color:var(--ifm-color-emphasis-700);--ifm-menu-color-active:var(--ifm-color-primary);--ifm-menu-color-background-active:var(--ifm-hover-overlay);--ifm-menu-color-background-hover:var(--ifm-hover-overlay);--ifm-menu-link-padding-horizontal:0.75rem;--ifm-menu-link-padding-vertical:0.375rem;--ifm-menu-link-sublist-icon:url('data:image/svg+xml;utf8,');--ifm-menu-link-sublist-icon-filter:none;--ifm-navbar-background-color:var(--ifm-background-surface-color);--ifm-navbar-height:3.75rem;--ifm-navbar-item-padding-horizontal:0.75rem;--ifm-navbar-item-padding-vertical:0.25rem;--ifm-navbar-link-color:var(--ifm-font-color-base);--ifm-navbar-link-active-color:var(--ifm-link-color);--ifm-navbar-padding-horizontal:var(--ifm-spacing-horizontal);--ifm-navbar-padding-vertical:calc(var(--ifm-spacing-vertical)*0.5);--ifm-navbar-shadow:var(--ifm-global-shadow-lw);--ifm-navbar-search-input-background-color:var(--ifm-color-emphasis-200);--ifm-navbar-search-input-color:var(--ifm-color-emphasis-800);--ifm-navbar-search-input-placeholder-color:var(--ifm-color-emphasis-500);--ifm-navbar-search-input-icon:url('data:image/svg+xml;utf8,');--ifm-navbar-sidebar-width:83vw;--ifm-pagination-border-radius:var(--ifm-global-radius);--ifm-pagination-color-active:var(--ifm-color-primary);--ifm-pagination-font-size:1rem;--ifm-pagination-item-active-background:var(--ifm-hover-overlay);--ifm-pagination-page-spacing:0.2em;--ifm-pagination-padding-horizontal:calc(var(--ifm-spacing-horizontal)*1);--ifm-pagination-padding-vertical:calc(var(--ifm-spacing-vertical)*0.25);--ifm-pagination-nav-border-radius:var(--ifm-global-radius);--ifm-pagination-nav-color-hover:var(--ifm-color-primary);--ifm-pills-color-active:var(--ifm-color-primary);--ifm-pills-color-background-active:var(--ifm-hover-overlay);--ifm-pills-spacing:0.125rem;--ifm-tabs-color:var(--ifm-font-color-secondary);--ifm-tabs-color-active:var(--ifm-color-primary);--ifm-tabs-color-active-border:var(--ifm-tabs-color-active);--ifm-tabs-padding-horizontal:1rem;--ifm-tabs-padding-vertical:1rem}:root,[data-theme=dark]{--ifm-color-primary:#56b7e1;--ifm-color-primary-dark:#3cacdc;--ifm-color-primary-darker:#2ea6da;--ifm-color-primary-darkest:#218bb9;--ifm-color-primary-light:#70c2e6;--ifm-color-primary-lighter:#7ec8e8;--ifm-color-primary-lightest:#a5d9ef}.badge--danger,.badge--info,.badge--primary,.badge--secondary,.badge--success,.badge--warning{--ifm-badge-border-color:var(--ifm-badge-background-color)}.button--link,.button--outline{--ifm-button-background-color:#0000}html{-webkit-font-smoothing:antialiased;-webkit-text-size-adjust:100%;text-size-adjust:100%;background-color:var(--ifm-background-color);color:var(--ifm-font-color-base);color-scheme:var(--ifm-color-scheme);font:var(--ifm-font-size-base)/var(--ifm-line-height-base) var(--ifm-font-family-base);text-rendering:optimizelegibility}iframe{border:0;color-scheme:auto}.container{margin:0 auto;max-width:var(--ifm-container-width)}.container--fluid{max-width:inherit}.row{display:flex;flex-wrap:wrap;margin:0 calc(var(--ifm-spacing-horizontal)*-1)}.margin-bottom--none,.margin-vert--none,.markdown>:last-child{margin-bottom:0!important}.margin-top--none,.margin-vert--none{margin-top:0!important}.row--no-gutters{margin-left:0;margin-right:0}.margin-horiz--none,.margin-right--none{margin-right:0!important}.row--no-gutters>.col{padding-left:0;padding-right:0}.row--align-top{align-items:flex-start}.row--align-bottom{align-items:flex-end}.menuExternalLink_NmtK,.row--align-center{align-items:center}.row--align-stretch{align-items:stretch}.row--align-baseline{align-items:baseline}.col{--ifm-col-width:100%;flex:1 0;margin-left:0;max-width:var(--ifm-col-width)}.padding-bottom--none,.padding-vert--none{padding-bottom:0!important}.padding-top--none,.padding-vert--none{padding-top:0!important}.padding-horiz--none,.padding-left--none{padding-left:0!important}.padding-horiz--none,.padding-right--none{padding-right:0!important}.col[class*=col--]{flex:0 0 var(--ifm-col-width)}.col--1{--ifm-col-width:8.33333%}.col--offset-1{margin-left:8.33333%}.col--2{--ifm-col-width:16.66667%}.col--offset-2{margin-left:16.66667%}.col--3{--ifm-col-width:25%}.col--offset-3{margin-left:25%}.col--4{--ifm-col-width:33.33333%}.col--offset-4{margin-left:33.33333%}.col--5{--ifm-col-width:41.66667%}.col--offset-5{margin-left:41.66667%}.col--6{--ifm-col-width:50%}.col--offset-6{margin-left:50%}.col--7{--ifm-col-width:58.33333%}.col--offset-7{margin-left:58.33333%}.col--8{--ifm-col-width:66.66667%}.col--offset-8{margin-left:66.66667%}.col--9{--ifm-col-width:75%}.col--offset-9{margin-left:75%}.col--10{--ifm-col-width:83.33333%}.col--offset-10{margin-left:83.33333%}.col--11{--ifm-col-width:91.66667%}.col--offset-11{margin-left:91.66667%}.col--12{--ifm-col-width:100%}.col--offset-12{margin-left:100%}.margin-horiz--none,.margin-left--none{margin-left:0!important}.margin--none{margin:0!important}.margin-bottom--xs,.margin-vert--xs{margin-bottom:.25rem!important}.margin-top--xs,.margin-vert--xs{margin-top:.25rem!important}.margin-horiz--xs,.margin-left--xs{margin-left:.25rem!important}.margin-horiz--xs,.margin-right--xs{margin-right:.25rem!important}.margin--xs{margin:.25rem!important}.margin-bottom--sm,.margin-vert--sm{margin-bottom:.5rem!important}.margin-top--sm,.margin-vert--sm{margin-top:.5rem!important}.margin-horiz--sm,.margin-left--sm{margin-left:.5rem!important}.margin-horiz--sm,.margin-right--sm{margin-right:.5rem!important}.margin--sm{margin:.5rem!important}.margin-bottom--md,.margin-vert--md{margin-bottom:1rem!important}.margin-top--md,.margin-vert--md{margin-top:1rem!important}.margin-horiz--md,.margin-left--md{margin-left:1rem!important}.margin-horiz--md,.margin-right--md{margin-right:1rem!important}.margin--md{margin:1rem!important}.margin-bottom--lg,.margin-vert--lg{margin-bottom:2rem!important}.margin-top--lg,.margin-vert--lg{margin-top:2rem!important}.margin-horiz--lg,.margin-left--lg{margin-left:2rem!important}.margin-horiz--lg,.margin-right--lg{margin-right:2rem!important}.margin--lg{margin:2rem!important}.margin-bottom--xl,.margin-vert--xl{margin-bottom:5rem!important}.margin-top--xl,.margin-vert--xl{margin-top:5rem!important}.margin-horiz--xl,.margin-left--xl{margin-left:5rem!important}.margin-horiz--xl,.margin-right--xl{margin-right:5rem!important}.margin--xl{margin:5rem!important}.padding--none{padding:0!important}.padding-bottom--xs,.padding-vert--xs{padding-bottom:.25rem!important}.padding-top--xs,.padding-vert--xs{padding-top:.25rem!important}.padding-horiz--xs,.padding-left--xs{padding-left:.25rem!important}.padding-horiz--xs,.padding-right--xs{padding-right:.25rem!important}.padding--xs{padding:.25rem!important}.padding-bottom--sm,.padding-vert--sm{padding-bottom:.5rem!important}.padding-top--sm,.padding-vert--sm{padding-top:.5rem!important}.padding-horiz--sm,.padding-left--sm{padding-left:.5rem!important}.padding-horiz--sm,.padding-right--sm{padding-right:.5rem!important}.padding--sm{padding:.5rem!important}.padding-bottom--md,.padding-vert--md{padding-bottom:1rem!important}.padding-top--md,.padding-vert--md{padding-top:1rem!important}.padding-horiz--md,.padding-left--md{padding-left:1rem!important}.padding-horiz--md,.padding-right--md{padding-right:1rem!important}.padding--md{padding:1rem!important}.padding-bottom--lg,.padding-vert--lg{padding-bottom:2rem!important}.padding-top--lg,.padding-vert--lg{padding-top:2rem!important}.padding-horiz--lg,.padding-left--lg{padding-left:2rem!important}.padding-horiz--lg,.padding-right--lg{padding-right:2rem!important}.padding--lg{padding:2rem!important}.padding-bottom--xl,.padding-vert--xl{padding-bottom:5rem!important}.padding-top--xl,.padding-vert--xl{padding-top:5rem!important}.padding-horiz--xl,.padding-left--xl{padding-left:5rem!important}.padding-horiz--xl,.padding-right--xl{padding-right:5rem!important}.padding--xl{padding:5rem!important}code{background-color:var(--ifm-code-background);border:.1rem solid #0000001a;border-radius:var(--ifm-code-border-radius);font-family:var(--ifm-font-family-monospace);font-size:var(--ifm-code-font-size);padding:var(--ifm-code-padding-vertical) var(--ifm-code-padding-horizontal)}a code{color:inherit}pre{background-color:var(--ifm-pre-background);border-radius:var(--ifm-pre-border-radius);color:var(--ifm-pre-color);font:var(--ifm-code-font-size)/var(--ifm-pre-line-height) var(--ifm-font-family-monospace);padding:var(--ifm-pre-padding)}pre code{background-color:initial;border:none;font-size:100%;line-height:inherit;padding:0}kbd{background-color:var(--ifm-color-emphasis-0);border:1px solid var(--ifm-color-emphasis-400);border-radius:.2rem;box-shadow:inset 0 -1px 0 var(--ifm-color-emphasis-400);color:var(--ifm-color-emphasis-800);font:80% var(--ifm-font-family-monospace);padding:.15rem .3rem}h1,h2,h3,h4,h5,h6{color:var(--ifm-heading-color);font-family:var(--ifm-heading-font-family);font-weight:var(--ifm-heading-font-weight);line-height:var(--ifm-heading-line-height);margin:var(--ifm-heading-margin-top) 0 var(--ifm-heading-margin-bottom) 0}h1{font-size:var(--ifm-h1-font-size)}h2{font-size:var(--ifm-h2-font-size)}h3{font-size:var(--ifm-h3-font-size)}h4{font-size:var(--ifm-h4-font-size)}h5{font-size:var(--ifm-h5-font-size)}h6{font-size:var(--ifm-h6-font-size)}img{max-width:100%}img[align=right]{padding-left:var(--image-alignment-padding)}img[align=left]{padding-right:var(--image-alignment-padding)}.markdown{--ifm-h1-vertical-rhythm-top:3;--ifm-h2-vertical-rhythm-top:2;--ifm-h3-vertical-rhythm-top:1.5;--ifm-heading-vertical-rhythm-top:1.25;--ifm-h1-vertical-rhythm-bottom:1.25;--ifm-heading-vertical-rhythm-bottom:1}.markdown:after,.markdown:before{content:"";display:table}.markdown:after{clear:both}.markdown h1:first-child{--ifm-h1-font-size:3rem;margin-bottom:calc(var(--ifm-h1-vertical-rhythm-bottom)*var(--ifm-leading))}.markdown>h2{--ifm-h2-font-size:2rem;margin-top:calc(var(--ifm-h2-vertical-rhythm-top)*var(--ifm-leading))}.markdown>h3{--ifm-h3-font-size:1.5rem;margin-top:calc(var(--ifm-h3-vertical-rhythm-top)*var(--ifm-leading))}.markdown>h4,.markdown>h5,.markdown>h6{margin-top:calc(var(--ifm-heading-vertical-rhythm-top)*var(--ifm-leading))}.markdown>p,.markdown>pre,.markdown>ul{margin-bottom:var(--ifm-leading)}.markdown li>p{margin-top:var(--ifm-list-paragraph-margin)}.markdown li+li{margin-top:var(--ifm-list-item-margin)}ol,ul{margin:0 0 var(--ifm-list-margin);padding-left:var(--ifm-list-left-padding)}ol ol,ul ol{list-style-type:lower-roman}ol ol ol,ol ul ol,ul ol ol,ul ul ol{list-style-type:lower-alpha}table{border-collapse:collapse;display:block;margin-bottom:var(--ifm-spacing-vertical)}table thead tr{border-bottom:2px solid var(--ifm-table-border-color)}table thead,table tr:nth-child(2n){background-color:var(--ifm-table-stripe-background)}table tr{background-color:var(--ifm-table-background);border-top:var(--ifm-table-border-width) solid var(--ifm-table-border-color)}table td,table th{border:var(--ifm-table-border-width) solid var(--ifm-table-border-color);padding:var(--ifm-table-cell-padding)}table th{background-color:var(--ifm-table-head-background);color:var(--ifm-table-head-color);font-weight:var(--ifm-table-head-font-weight)}table td{color:var(--ifm-table-cell-color)}strong{font-weight:var(--ifm-font-weight-bold)}a{color:var(--ifm-link-color);text-decoration:var(--ifm-link-decoration)}a:hover{color:var(--ifm-link-hover-color);text-decoration:var(--ifm-link-hover-decoration)}.button:hover,.text--no-decoration,.text--no-decoration:hover,a:not([href]){text-decoration:none}p{margin:0 0 var(--ifm-paragraph-margin-bottom)}blockquote{border-left:var(--ifm-blockquote-border-left-width) solid var(--ifm-blockquote-border-color);box-shadow:var(--ifm-blockquote-shadow);color:var(--ifm-blockquote-color);font-size:var(--ifm-blockquote-font-size);padding:var(--ifm-blockquote-padding-vertical) var(--ifm-blockquote-padding-horizontal)}blockquote>:first-child{margin-top:0}blockquote>:last-child{margin-bottom:0}hr{background-color:var(--ifm-hr-background-color);border:0;height:var(--ifm-hr-height);margin:var(--ifm-hr-margin-vertical) 0}.shadow--lw{box-shadow:var(--ifm-global-shadow-lw)!important}.shadow--md{box-shadow:var(--ifm-global-shadow-md)!important}.shadow--tl{box-shadow:var(--ifm-global-shadow-tl)!important}.text--primary,.wordWrapButtonEnabled_EoeP .wordWrapButtonIcon_Bwma{color:var(--ifm-color-primary)}.text--secondary{color:var(--ifm-color-secondary)}.text--success{color:var(--ifm-color-success)}.text--info{color:var(--ifm-color-info)}.text--warning{color:var(--ifm-color-warning)}.text--danger{color:var(--ifm-color-danger)}.text--center{text-align:center}.text--left{text-align:left}.text--justify{text-align:justify}.text--right{text-align:right}.text--capitalize{text-transform:capitalize}.text--lowercase{text-transform:lowercase}.admonitionHeading_tbUL,.alert__heading,.text--uppercase{text-transform:uppercase}.text--light{font-weight:var(--ifm-font-weight-light)}.text--normal{font-weight:var(--ifm-font-weight-normal)}.text--semibold{font-weight:var(--ifm-font-weight-semibold)}.text--bold{font-weight:var(--ifm-font-weight-bold)}.text--italic{font-style:italic}.text--truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.text--break{word-wrap:break-word!important;word-break:break-word!important}.clean-btn{background:none;border:none;color:inherit;cursor:pointer;font-family:inherit;padding:0}.alert,.alert .close{color:var(--ifm-alert-foreground-color)}.clean-list{list-style:none;padding-left:0}.alert--primary{--ifm-alert-background-color:var(--ifm-color-primary-contrast-background);--ifm-alert-background-color-highlight:#3578e526;--ifm-alert-foreground-color:var(--ifm-color-primary-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-primary-dark)}.alert--secondary{--ifm-alert-background-color:var(--ifm-color-secondary-contrast-background);--ifm-alert-background-color-highlight:#ebedf026;--ifm-alert-foreground-color:var(--ifm-color-secondary-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-secondary-dark)}.alert--success{--ifm-alert-background-color:var(--ifm-color-success-contrast-background);--ifm-alert-background-color-highlight:#00a40026;--ifm-alert-foreground-color:var(--ifm-color-success-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-success-dark)}.alert--info{--ifm-alert-background-color:var(--ifm-color-info-contrast-background);--ifm-alert-background-color-highlight:#54c7ec26;--ifm-alert-foreground-color:var(--ifm-color-info-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-info-dark)}.alert--warning{--ifm-alert-background-color:var(--ifm-color-warning-contrast-background);--ifm-alert-background-color-highlight:#ffba0026;--ifm-alert-foreground-color:var(--ifm-color-warning-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-warning-dark)}.alert--danger{--ifm-alert-background-color:var(--ifm-color-danger-contrast-background);--ifm-alert-background-color-highlight:#fa383e26;--ifm-alert-foreground-color:var(--ifm-color-danger-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-danger-dark)}.alert{--ifm-code-background:var(--ifm-alert-background-color-highlight);--ifm-link-color:var(--ifm-alert-foreground-color);--ifm-link-hover-color:var(--ifm-alert-foreground-color);--ifm-link-decoration:underline;--ifm-tabs-color:var(--ifm-alert-foreground-color);--ifm-tabs-color-active:var(--ifm-alert-foreground-color);--ifm-tabs-color-active-border:var(--ifm-alert-border-color);background-color:var(--ifm-alert-background-color);border:var(--ifm-alert-border-width) solid var(--ifm-alert-border-color);border-left-width:var(--ifm-alert-border-left-width);border-radius:var(--ifm-alert-border-radius);box-shadow:var(--ifm-alert-shadow);padding:var(--ifm-alert-padding-vertical) var(--ifm-alert-padding-horizontal)}.alert__heading{align-items:center;display:flex;font:700 var(--ifm-h5-font-size)/var(--ifm-heading-line-height) var(--ifm-heading-font-family);margin-bottom:.5rem}.alert__icon{display:inline-flex;margin-right:.4em}.alert__icon svg{fill:var(--ifm-alert-foreground-color);stroke:var(--ifm-alert-foreground-color);stroke-width:0}.alert .close{margin:calc(var(--ifm-alert-padding-vertical)*-1) calc(var(--ifm-alert-padding-horizontal)*-1) 0 0;opacity:.75}.alert .close:focus,.alert .close:hover{opacity:1}.alert a{text-decoration-color:var(--ifm-alert-border-color)}.alert a:hover{text-decoration-thickness:2px}.avatar{column-gap:var(--ifm-avatar-intro-margin);display:flex}.avatar__photo{border-radius:50%;display:block;height:var(--ifm-avatar-photo-size);overflow:hidden;width:var(--ifm-avatar-photo-size)}.card--full-height,.navbar__logo img,body,html{height:100%}.avatar__photo--sm{--ifm-avatar-photo-size:2rem}.avatar__photo--lg{--ifm-avatar-photo-size:4rem}.avatar__photo--xl{--ifm-avatar-photo-size:6rem}.avatar__intro{display:flex;flex:1 1;flex-direction:column;justify-content:center;text-align:var(--ifm-avatar-intro-alignment)}.badge,.breadcrumbs__item,.breadcrumbs__link,.button,.dropdown>.navbar__link:after{display:inline-block}.avatar__name{font:700 var(--ifm-h4-font-size)/var(--ifm-heading-line-height) var(--ifm-font-family-base)}.avatar__subtitle{margin-top:.25rem}.avatar--vertical{--ifm-avatar-intro-alignment:center;--ifm-avatar-intro-margin:0.5rem;align-items:center;flex-direction:column}.badge{background-color:var(--ifm-badge-background-color);border:var(--ifm-badge-border-width) solid var(--ifm-badge-border-color);border-radius:var(--ifm-badge-border-radius);color:var(--ifm-badge-color);font-size:75%;font-weight:var(--ifm-font-weight-bold);line-height:1;padding:var(--ifm-badge-padding-vertical) var(--ifm-badge-padding-horizontal)}.badge--primary{--ifm-badge-background-color:var(--ifm-color-primary)}.badge--secondary{--ifm-badge-background-color:var(--ifm-color-secondary);color:var(--ifm-color-black)}.breadcrumbs__link,.button.button--secondary.button--outline:not(.button--active):not(:hover){color:var(--ifm-font-color-base)}.badge--success{--ifm-badge-background-color:var(--ifm-color-success)}.badge--info{--ifm-badge-background-color:var(--ifm-color-info)}.badge--warning{--ifm-badge-background-color:var(--ifm-color-warning)}.badge--danger{--ifm-badge-background-color:var(--ifm-color-danger)}.breadcrumbs{margin-bottom:0;padding-left:0}.breadcrumbs__item:not(:last-child):after{background:var(--ifm-breadcrumb-separator) center;content:" ";display:inline-block;filter:var(--ifm-breadcrumb-separator-filter);height:calc(var(--ifm-breadcrumb-separator-size)*var(--ifm-breadcrumb-size-multiplier)*var(--ifm-breadcrumb-separator-size-multiplier));margin:0 var(--ifm-breadcrumb-spacing);opacity:.5;width:calc(var(--ifm-breadcrumb-separator-size)*var(--ifm-breadcrumb-size-multiplier)*var(--ifm-breadcrumb-separator-size-multiplier))}.breadcrumbs__item--active .breadcrumbs__link{background:var(--ifm-breadcrumb-item-background-active);color:var(--ifm-breadcrumb-color-active)}.breadcrumbs__link{border-radius:var(--ifm-breadcrumb-border-radius);font-size:calc(1rem*var(--ifm-breadcrumb-size-multiplier));padding:calc(var(--ifm-breadcrumb-padding-vertical)*var(--ifm-breadcrumb-size-multiplier)) calc(var(--ifm-breadcrumb-padding-horizontal)*var(--ifm-breadcrumb-size-multiplier));transition-duration:var(--ifm-transition-fast);transition-property:background,color}.breadcrumbs__link:any-link:hover,.breadcrumbs__link:link:hover,.breadcrumbs__link:visited:hover,area[href].breadcrumbs__link:hover{background:var(--ifm-breadcrumb-item-background-active);text-decoration:none}.breadcrumbs--sm{--ifm-breadcrumb-size-multiplier:0.8}.breadcrumbs--lg{--ifm-breadcrumb-size-multiplier:1.2}.button{background-color:var(--ifm-button-background-color);border:var(--ifm-button-border-width) solid var(--ifm-button-border-color);border-radius:var(--ifm-button-border-radius);cursor:pointer;font-size:calc(.875rem*var(--ifm-button-size-multiplier));font-weight:var(--ifm-button-font-weight);line-height:1.5;padding:calc(var(--ifm-button-padding-vertical)*var(--ifm-button-size-multiplier)) calc(var(--ifm-button-padding-horizontal)*var(--ifm-button-size-multiplier));text-align:center;transition-duration:var(--ifm-button-transition-duration);transition-property:color,background,border-color;-webkit-user-select:none;user-select:none;white-space:nowrap}.button,.button:hover{color:var(--ifm-button-color)}.button--outline{--ifm-button-color:var(--ifm-button-border-color)}.button--outline:hover{--ifm-button-background-color:var(--ifm-button-border-color)}.button--link{--ifm-button-border-color:#0000;color:var(--ifm-link-color);text-decoration:var(--ifm-link-decoration)}.button--link.button--active,.button--link:active,.button--link:hover{color:var(--ifm-link-hover-color);text-decoration:var(--ifm-link-hover-decoration)}.button.disabled,.button:disabled,.button[disabled]{opacity:.65;pointer-events:none}.button--sm{--ifm-button-size-multiplier:0.8}.button--lg{--ifm-button-size-multiplier:1.35}.button--block{display:block;width:100%}.button.button--secondary{color:var(--ifm-color-gray-900)}:where(.button--primary){--ifm-button-background-color:var(--ifm-color-primary);--ifm-button-border-color:var(--ifm-color-primary)}:where(.button--primary):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-primary-dark);--ifm-button-border-color:var(--ifm-color-primary-dark)}.button--primary.button--active,.button--primary:active{--ifm-button-background-color:var(--ifm-color-primary-darker);--ifm-button-border-color:var(--ifm-color-primary-darker)}:where(.button--secondary){--ifm-button-background-color:var(--ifm-color-secondary);--ifm-button-border-color:var(--ifm-color-secondary)}:where(.button--secondary):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-secondary-dark);--ifm-button-border-color:var(--ifm-color-secondary-dark)}.button--secondary.button--active,.button--secondary:active{--ifm-button-background-color:var(--ifm-color-secondary-darker);--ifm-button-border-color:var(--ifm-color-secondary-darker)}:where(.button--success){--ifm-button-background-color:var(--ifm-color-success);--ifm-button-border-color:var(--ifm-color-success)}:where(.button--success):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-success-dark);--ifm-button-border-color:var(--ifm-color-success-dark)}.button--success.button--active,.button--success:active{--ifm-button-background-color:var(--ifm-color-success-darker);--ifm-button-border-color:var(--ifm-color-success-darker)}:where(.button--info){--ifm-button-background-color:var(--ifm-color-info);--ifm-button-border-color:var(--ifm-color-info)}:where(.button--info):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-info-dark);--ifm-button-border-color:var(--ifm-color-info-dark)}.button--info.button--active,.button--info:active{--ifm-button-background-color:var(--ifm-color-info-darker);--ifm-button-border-color:var(--ifm-color-info-darker)}:where(.button--warning){--ifm-button-background-color:var(--ifm-color-warning);--ifm-button-border-color:var(--ifm-color-warning)}:where(.button--warning):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-warning-dark);--ifm-button-border-color:var(--ifm-color-warning-dark)}.button--warning.button--active,.button--warning:active{--ifm-button-background-color:var(--ifm-color-warning-darker);--ifm-button-border-color:var(--ifm-color-warning-darker)}:where(.button--danger){--ifm-button-background-color:var(--ifm-color-danger);--ifm-button-border-color:var(--ifm-color-danger)}:where(.button--danger):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-danger-dark);--ifm-button-border-color:var(--ifm-color-danger-dark)}.button--danger.button--active,.button--danger:active{--ifm-button-background-color:var(--ifm-color-danger-darker);--ifm-button-border-color:var(--ifm-color-danger-darker)}.button-group{display:inline-flex;gap:var(--ifm-button-group-spacing)}.button-group>.button:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.button-group>.button:not(:last-child){border-bottom-right-radius:0;border-top-right-radius:0}.button-group--block{display:flex;justify-content:stretch}.button-group--block>.button{flex-grow:1}.card{background-color:var(--ifm-card-background-color);border-radius:var(--ifm-card-border-radius);box-shadow:var(--ifm-global-shadow-lw);display:flex;flex-direction:column;overflow:hidden}.card__image{padding-top:var(--ifm-card-vertical-spacing)}.card__image:first-child{padding-top:0}.card__body,.card__footer,.card__header{padding:var(--ifm-card-vertical-spacing) var(--ifm-card-horizontal-spacing)}.card__body:not(:last-child),.card__footer:not(:last-child),.card__header:not(:last-child){padding-bottom:0}.card__body>:last-child,.card__footer>:last-child,.card__header>:last-child{margin-bottom:0}.card__footer{margin-top:auto}.table-of-contents{font-size:.8rem;margin-bottom:0;padding:var(--ifm-toc-padding-vertical) 0}.table-of-contents,.table-of-contents ul{list-style:none;padding-left:var(--ifm-toc-padding-horizontal)}.table-of-contents li{margin:var(--ifm-toc-padding-vertical) var(--ifm-toc-padding-horizontal)}.table-of-contents__left-border{border-left:1px solid var(--ifm-toc-border-color)}.table-of-contents__link{color:var(--ifm-toc-link-color);display:block}.table-of-contents__link--active,.table-of-contents__link--active code,.table-of-contents__link:hover,.table-of-contents__link:hover code{color:var(--ifm-color-primary);text-decoration:none}.close{color:var(--ifm-color-black);float:right;font-size:1.5rem;font-weight:var(--ifm-font-weight-bold);line-height:1;opacity:.5;padding:1rem;transition:opacity var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.close:hover{opacity:.7}.close:focus,.theme-code-block-highlighted-line .codeLineNumber_Tfdd:before{opacity:.8}.dropdown{display:inline-flex;font-weight:var(--ifm-dropdown-font-weight);position:relative;vertical-align:top}.dropdown--hoverable:hover .dropdown__menu,.dropdown--show .dropdown__menu{opacity:1;pointer-events:all;transform:translateY(-1px);visibility:visible}#nprogress,.dropdown__menu,.navbar__item.dropdown .navbar__link:not([href]){pointer-events:none}.dropdown--right .dropdown__menu{left:inherit;right:0}.dropdown--nocaret .navbar__link:after{content:none!important}.dropdown__menu{background-color:var(--ifm-dropdown-background-color);border-radius:var(--ifm-global-radius);box-shadow:var(--ifm-global-shadow-md);left:0;list-style:none;max-height:80vh;min-width:10rem;opacity:0;overflow-y:auto;padding:.5rem;position:absolute;top:calc(100% - var(--ifm-navbar-item-padding-vertical) + .3rem);transform:translateY(-.625rem);transition-duration:var(--ifm-transition-fast);transition-property:opacity,transform,visibility;transition-timing-function:var(--ifm-transition-timing-default);visibility:hidden;z-index:var(--ifm-z-index-dropdown)}.menu__caret,.menu__link,.menu__list-item-collapsible{border-radius:.25rem;transition:background var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.dropdown__link{border-radius:.25rem;color:var(--ifm-dropdown-link-color);display:block;font-size:.875rem;margin-top:.2rem;padding:.25rem .5rem;white-space:nowrap}.dropdown__link--active,.dropdown__link:hover{background-color:var(--ifm-dropdown-hover-background-color);color:var(--ifm-dropdown-link-color);text-decoration:none}.dropdown__link--active,.dropdown__link--active:hover{--ifm-dropdown-link-color:var(--ifm-link-color)}.dropdown>.navbar__link:after{border-color:currentcolor #0000;border-style:solid;border-width:.4em .4em 0;content:"";position:relative;transform:translateY(-50%);font-size:.8rem;margin-left:.5em;top:1px}.footer{background-color:var(--ifm-footer-background-color);color:var(--ifm-footer-color);padding:var(--ifm-footer-padding-vertical) var(--ifm-footer-padding-horizontal)}.footer--dark{--ifm-footer-background-color:#303846;--ifm-footer-color:var(--ifm-footer-link-color);--ifm-footer-link-color:var(--ifm-color-secondary);--ifm-footer-title-color:var(--ifm-color-white)}.footer__links{margin-bottom:1rem}.footer__link-item{color:var(--ifm-footer-link-color);line-height:2}.footer__link-item:hover{color:var(--ifm-footer-link-hover-color)}.footer__link-separator{margin:0 var(--ifm-footer-link-horizontal-spacing)}.footer__logo{margin-top:1rem;max-width:var(--ifm-footer-logo-max-width)}.footer__title{color:var(--ifm-footer-title-color);font:700 var(--ifm-h4-font-size)/var(--ifm-heading-line-height) var(--ifm-font-family-base);margin-bottom:var(--ifm-heading-margin-bottom)}.menu,.navbar__link{font-weight:var(--ifm-font-weight-semibold)}.docItemContainer_Djhp article>:first-child,.docItemContainer_Djhp header+*,.footer__item{margin-top:0}.admonitionContent_S0QG>:last-child,.collapsibleContent_i85q>:last-child,.footer__items{margin-bottom:0}.codeBlockStandalone_MEMb,[type=checkbox]{padding:0}.hero{align-items:center;background-color:var(--ifm-hero-background-color);color:var(--ifm-hero-text-color);display:flex;padding:4rem 2rem}.hero--primary{--ifm-hero-background-color:var(--ifm-color-primary);--ifm-hero-text-color:var(--ifm-font-color-base-inverse)}.hero--dark{--ifm-hero-background-color:#303846;--ifm-hero-text-color:var(--ifm-color-white)}.hero__title{font-size:3rem}.hero__subtitle,.textContainer_jPR0 h3{font-size:1.5rem}.menu__list{list-style:none;margin:0;padding-left:0}.menu__caret,.menu__link{padding:var(--ifm-menu-link-padding-vertical) var(--ifm-menu-link-padding-horizontal)}.menu__list .menu__list{flex:0 0 100%;margin-top:.25rem;padding-left:var(--ifm-menu-link-padding-horizontal)}.menu__list-item:not(:first-child){margin-top:.25rem}.menu__list-item--collapsed .menu__list{height:0;overflow:hidden}.menu__list-item--collapsed .menu__caret:before,.menu__list-item--collapsed .menu__link--sublist:after{transform:rotate(90deg)}.menu__list-item-collapsible{display:flex;flex-wrap:wrap;position:relative}.menu__caret:hover,.menu__link:hover,.menu__list-item-collapsible--active,.menu__list-item-collapsible:hover{background:var(--ifm-menu-color-background-hover)}.menu__list-item-collapsible .menu__link--active,.menu__list-item-collapsible .menu__link:hover{background:none!important}.menu__caret,.menu__link{align-items:center;display:flex}.menu__link{color:var(--ifm-menu-color);flex:1;line-height:1.25}.menu__link:hover{color:var(--ifm-menu-color);text-decoration:none}.menu__caret:before,.menu__link--sublist-caret:after{content:"";filter:var(--ifm-menu-link-sublist-icon-filter);transition:transform var(--ifm-transition-fast) linear}.menu__link--sublist-caret:after{background:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem;margin-left:auto}.menu__link--active,.menu__link--active:hover{color:var(--ifm-menu-color-active)}.navbar__brand,.navbar__link{color:var(--ifm-navbar-link-color)}.menu__link--active:not(.menu__link--sublist){background-color:var(--ifm-menu-color-background-active)}.menu__caret:before{background:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem;height:1.25rem;transform:rotate(180deg);width:1.25rem}.navbar--dark,html[data-theme=dark]{--ifm-menu-link-sublist-icon-filter:invert(100%) sepia(94%) saturate(17%) hue-rotate(223deg) brightness(104%) contrast(98%)}.navbar{background-color:var(--ifm-navbar-background-color);box-shadow:var(--ifm-navbar-shadow);height:var(--ifm-navbar-height);padding:var(--ifm-navbar-padding-vertical) var(--ifm-navbar-padding-horizontal)}.navbar,.navbar>.container,.navbar>.container-fluid,.testimonialUserInfo_th5k{display:flex}.navbar--fixed-top{position:sticky;top:0;z-index:var(--ifm-z-index-fixed)}.navbar-sidebar,.navbar-sidebar__backdrop{bottom:0;opacity:0;position:fixed;transition-duration:var(--ifm-transition-fast);transition-timing-function:ease-in-out;top:0;left:0;visibility:hidden}.navbar__inner{display:flex;flex-wrap:wrap;justify-content:space-between;width:100%}.navbar__brand{align-items:center;display:flex;min-width:0}.navbar__brand:hover{color:var(--ifm-navbar-link-hover-color);text-decoration:none}.announcementBarContent_xLdY,.navbar__title{flex:1 1 auto}.navbar__toggle{display:none;margin-right:.5rem}.navbar__logo{flex:0 0 auto;margin-right:.5rem}.navbar__items{align-items:center;display:flex;flex:1;min-width:0}.navbar__items--center{flex:0 0 auto}.navbar__items--center .navbar__brand{margin:0}.navbar__items--center+.navbar__items--right{flex:1}.navbar__items--right{flex:0 0 auto;justify-content:flex-end}.navbar__items--right>:last-child{padding-right:0}.navbar__item{display:inline-block;padding:var(--ifm-navbar-item-padding-vertical) var(--ifm-navbar-item-padding-horizontal)}.navbar__link--active,.navbar__link:hover{color:var(--ifm-navbar-link-hover-color);text-decoration:none}.navbar--dark,.navbar--primary{--ifm-menu-color:var(--ifm-color-gray-300);--ifm-navbar-link-color:var(--ifm-color-gray-100);--ifm-navbar-search-input-background-color:#ffffff1a;--ifm-navbar-search-input-placeholder-color:#ffffff80;color:var(--ifm-color-white)}.navbar--dark{--ifm-navbar-background-color:#242526;--ifm-menu-color-background-active:#ffffff0d;--ifm-navbar-search-input-color:var(--ifm-color-white)}.navbar--primary{--ifm-navbar-background-color:var(--ifm-color-primary);--ifm-navbar-link-hover-color:var(--ifm-color-white);--ifm-menu-color-active:var(--ifm-color-white);--ifm-navbar-search-input-color:var(--ifm-color-emphasis-500)}.navbar__search-input{-webkit-appearance:none;appearance:none;background:var(--ifm-navbar-search-input-background-color) var(--ifm-navbar-search-input-icon) no-repeat .75rem center/1rem 1rem;border:none;border-radius:2rem;color:var(--ifm-navbar-search-input-color);cursor:text;display:inline-block;font-size:.9rem;height:2rem;padding:0 .5rem 0 2.25rem;width:12.5rem}.navbar__search-input::placeholder{color:var(--ifm-navbar-search-input-placeholder-color)}.navbar-sidebar{background-color:var(--ifm-navbar-background-color);box-shadow:var(--ifm-global-shadow-md);transform:translate3d(-100%,0,0);transition-property:opacity,visibility,transform;width:var(--ifm-navbar-sidebar-width)}.navbar-sidebar--show .navbar-sidebar,.navbar-sidebar__items{transform:translateZ(0)}.navbar-sidebar--show .navbar-sidebar,.navbar-sidebar--show .navbar-sidebar__backdrop{opacity:1;visibility:visible}.navbar-sidebar__backdrop{background-color:#0009;right:0;transition-property:opacity,visibility}.navbar-sidebar__brand{align-items:center;box-shadow:var(--ifm-navbar-shadow);display:flex;flex:1;height:var(--ifm-navbar-height);padding:var(--ifm-navbar-padding-vertical) var(--ifm-navbar-padding-horizontal)}.navbar-sidebar__items{display:flex;height:calc(100% - var(--ifm-navbar-height));transition:transform var(--ifm-transition-fast) ease-in-out}.navbar-sidebar__items--show-secondary{transform:translate3d(calc((var(--ifm-navbar-sidebar-width))*-1),0,0)}.navbar-sidebar__item{flex-shrink:0;padding:.5rem;width:calc(var(--ifm-navbar-sidebar-width))}.navbar-sidebar__back{background:var(--ifm-menu-color-background-active);font-size:15px;font-weight:var(--ifm-button-font-weight);margin:0 0 .2rem -.5rem;padding:.6rem 1.5rem;position:relative;text-align:left;top:-.5rem;width:calc(100% + 1rem)}.navbar-sidebar__close{display:flex;margin-left:auto}.pagination{column-gap:var(--ifm-pagination-page-spacing);display:flex;font-size:var(--ifm-pagination-font-size);padding-left:0}.pagination--sm{--ifm-pagination-font-size:0.8rem;--ifm-pagination-padding-horizontal:0.8rem;--ifm-pagination-padding-vertical:0.2rem}.pagination--lg{--ifm-pagination-font-size:1.2rem;--ifm-pagination-padding-horizontal:1.2rem;--ifm-pagination-padding-vertical:0.3rem}.pagination__item{display:inline-flex}.pagination__item>span{padding:var(--ifm-pagination-padding-vertical)}.pagination__item--active .pagination__link{color:var(--ifm-pagination-color-active)}.pagination__item--active .pagination__link,.pagination__item:not(.pagination__item--active):hover .pagination__link{background:var(--ifm-pagination-item-active-background)}.pagination__item--disabled,.pagination__item[disabled]{opacity:.25;pointer-events:none}.pagination__link{border-radius:var(--ifm-pagination-border-radius);color:var(--ifm-font-color-base);display:inline-block;padding:var(--ifm-pagination-padding-vertical) var(--ifm-pagination-padding-horizontal);transition:background var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.pagination__link:hover{text-decoration:none}.pagination-nav{grid-gap:var(--ifm-spacing-horizontal);display:grid;gap:var(--ifm-spacing-horizontal);grid-template-columns:repeat(2,1fr)}.pagination-nav__link{border:1px solid var(--ifm-color-emphasis-300);border-radius:var(--ifm-pagination-nav-border-radius);display:block;height:100%;line-height:var(--ifm-heading-line-height);padding:var(--ifm-global-spacing);transition:border-color var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.pagination-nav__link:hover{border-color:var(--ifm-pagination-nav-color-hover);text-decoration:none}.pagination-nav__link--next{grid-column:2/3;text-align:right}.pagination-nav__label{font-size:var(--ifm-h4-font-size);font-weight:var(--ifm-heading-font-weight);word-break:break-word}.pagination-nav__link--prev .pagination-nav__label:before{content:"« "}.pagination-nav__link--next .pagination-nav__label:after{content:" »"}.pagination-nav__sublabel{color:var(--ifm-color-content-secondary);font-size:var(--ifm-h5-font-size);font-weight:var(--ifm-font-weight-semibold);margin-bottom:.25rem}.pills__item,.tabs{font-weight:var(--ifm-font-weight-bold)}.pills{display:flex;gap:var(--ifm-pills-spacing);padding-left:0}.pills__item{border-radius:.5rem;cursor:pointer;display:inline-block;padding:.25rem 1rem;transition:background var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.footer.footer--dark .footer__col:first-child .footer__item:first-child,.tabs,:not(.containsTaskList_mC6p>li)>.containsTaskList_mC6p{padding-left:0}.pills__item--active{color:var(--ifm-pills-color-active)}.pills__item--active,.pills__item:not(.pills__item--active):hover{background:var(--ifm-pills-color-background-active)}.pills--block{justify-content:stretch}.pills--block .pills__item{flex-grow:1;text-align:center}.tabs{color:var(--ifm-tabs-color);display:flex;margin-bottom:0;overflow-x:auto}.tabs__item{border-bottom:3px solid #0000;border-radius:var(--ifm-global-radius);cursor:pointer;display:inline-flex;padding:var(--ifm-tabs-padding-vertical) var(--ifm-tabs-padding-horizontal);transition:background-color var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.fastkafka-home-mobile,html.plugin-pages .navbar.navbar-sidebar--show .navbar-sidebar .navbar-sidebar__brand>div,html.plugin-pages .navbar__items.navbar__items--right>a+div>button{display:none}.tabs__item--active{border-bottom-color:var(--ifm-tabs-color-active-border);border-bottom-left-radius:0;border-bottom-right-radius:0;color:var(--ifm-tabs-color-active)}.tabs__item:hover{background-color:var(--ifm-hover-overlay)}.tabs--block{justify-content:stretch}.tabs--block .tabs__item{flex-grow:1;justify-content:center}html[data-theme=dark]{--ifm-color-scheme:dark;--ifm-color-emphasis-0:var(--ifm-color-gray-1000);--ifm-color-emphasis-100:var(--ifm-color-gray-900);--ifm-color-emphasis-200:var(--ifm-color-gray-800);--ifm-color-emphasis-300:var(--ifm-color-gray-700);--ifm-color-emphasis-400:var(--ifm-color-gray-600);--ifm-color-emphasis-600:var(--ifm-color-gray-400);--ifm-color-emphasis-700:var(--ifm-color-gray-300);--ifm-color-emphasis-800:var(--ifm-color-gray-200);--ifm-color-emphasis-900:var(--ifm-color-gray-100);--ifm-color-emphasis-1000:var(--ifm-color-gray-0);--ifm-background-color:#1b1b1d;--ifm-background-surface-color:#242526;--ifm-hover-overlay:#ffffff0d;--ifm-color-content:#e3e3e3;--ifm-color-content-secondary:#fff;--ifm-breadcrumb-separator-filter:invert(64%) sepia(11%) saturate(0%) hue-rotate(149deg) brightness(99%) contrast(95%);--ifm-code-background:#ffffff1a;--ifm-scrollbar-track-background-color:#444;--ifm-scrollbar-thumb-background-color:#686868;--ifm-scrollbar-thumb-hover-background-color:#7a7a7a;--ifm-table-stripe-background:#ffffff12;--ifm-toc-border-color:var(--ifm-color-emphasis-200);--ifm-color-primary-contrast-background:#102445;--ifm-color-primary-contrast-foreground:#ebf2fc;--ifm-color-secondary-contrast-background:#474748;--ifm-color-secondary-contrast-foreground:#fdfdfe;--ifm-color-success-contrast-background:#003100;--ifm-color-success-contrast-foreground:#e6f6e6;--ifm-color-info-contrast-background:#193c47;--ifm-color-info-contrast-foreground:#eef9fd;--ifm-color-warning-contrast-background:#4d3800;--ifm-color-warning-contrast-foreground:#fff8e6;--ifm-color-danger-contrast-background:#4b1113;--ifm-color-danger-contrast-foreground:#ffebec;--docsearch-text-color:#f5f6f7;--docsearch-container-background:#090a11cc;--docsearch-modal-background:#15172a;--docsearch-modal-shadow:inset 1px 1px 0 0 #2c2e40,0 3px 8px 0 #000309;--docsearch-searchbox-background:#090a11;--docsearch-searchbox-focus-background:#000;--docsearch-hit-color:#bec3c9;--docsearch-hit-shadow:none;--docsearch-hit-background:#090a11;--docsearch-key-gradient:linear-gradient(-26.5deg,#565872,#31355b);--docsearch-key-shadow:inset 0 -2px 0 0 #282d55,inset 0 0 1px 1px #51577d,0 2px 2px 0 #0304094d;--docsearch-footer-background:#1e2136;--docsearch-footer-shadow:inset 0 1px 0 0 #494c6a80,0 -4px 8px 0 #0003;--docsearch-logo-color:#fff;--docsearch-muted-color:#7f8497}:root{--docusaurus-progress-bar-color:var(--ifm-color-primary);--ifm-font-family-monospace:"RobotoMono-Regular";--ifm-font-family-base:"Roboto-Regular";--ifm-heading-font-family:"Rubik-Medium";--ifm-code-font-size:95%;--docusaurus-highlighted-code-line-bg:#0000001a;--ifm-navbar-background-color:#003257;--ifm-dropdown-background-color:#003257;--ifm-navbar-height:4.69rem;font-family:Roboto-Regular;--docusaurus-announcement-bar-height:auto;--docusaurus-tag-list-border:var(--ifm-color-emphasis-300);--docusaurus-collapse-button-bg:#0000;--docusaurus-collapse-button-bg-hover:#0000001a;--doc-sidebar-width:300px;--doc-sidebar-hidden-width:30px;--docsearch-primary-color:#5468ff;--docsearch-text-color:#1c1e21;--docsearch-spacing:12px;--docsearch-icon-stroke-width:1.4;--docsearch-highlight-color:var(--docsearch-primary-color);--docsearch-muted-color:#969faf;--docsearch-container-background:#656c85cc;--docsearch-logo-color:#5468ff;--docsearch-modal-width:560px;--docsearch-modal-height:600px;--docsearch-modal-background:#f5f6f7;--docsearch-modal-shadow:inset 1px 1px 0 0 #ffffff80,0 3px 8px 0 #555a64;--docsearch-searchbox-height:56px;--docsearch-searchbox-background:#ebedf0;--docsearch-searchbox-focus-background:#fff;--docsearch-searchbox-shadow:inset 0 0 0 2px var(--docsearch-primary-color);--docsearch-hit-height:56px;--docsearch-hit-color:#444950;--docsearch-hit-active-color:#fff;--docsearch-hit-background:#fff;--docsearch-hit-shadow:0 1px 3px 0 #d4d9e1;--docsearch-key-gradient:linear-gradient(-225deg,#d5dbe4,#f8f8f8);--docsearch-key-shadow:inset 0 -2px 0 0 #cdcde6,inset 0 0 1px 1px #fff,0 1px 2px 1px #1e235a66;--docsearch-footer-height:44px;--docsearch-footer-background:#fff;--docsearch-footer-shadow:0 -1px 0 0 #e0e3e8,0 -3px 6px 0 #45629b1f;--docsearch-primary-color:var(--ifm-color-primary);--docsearch-text-color:var(--ifm-font-color-base)}.navbar__title,nav .navbar__inner .navbar__items .fastkafka-home>div>p{font-family:var(--ifm-heading-font-family);font-weight:100;line-height:var(--ifm-heading-line-height)}#nprogress .bar{background:var(--docusaurus-progress-bar-color);height:2px;left:0;position:fixed;top:0;width:100%;z-index:1031}#nprogress .peg{box-shadow:0 0 10px var(--docusaurus-progress-bar-color),0 0 5px var(--docusaurus-progress-bar-color);height:100%;opacity:1;position:absolute;right:0;transform:rotate(3deg) translateY(-4px);width:100px}@font-face{font-family:Rubik-Medium;src:url(/assets/fonts/Rubik-Medium-115acab02aed19275f712214686d778e.ttf) format("truetype")}@font-face{font-family:RobotoMono-Regular;src:url(/assets/fonts/RobotoMono-Regular-34e46962590bff8eefe5f14af6ea24e3.ttf) format("truetype")}@font-face{font-family:Roboto-Regular;src:url(/assets/fonts/Roboto-Regular-fc2b5060f7accec5cf74437196c1b027.ttf) format("truetype")}[data-theme=dark]{--docusaurus-highlighted-code-line-bg:#0000004d}html[data-theme=dark] .DocSearch-Button{background:#ebedf0;color:#969faf}html[data-theme=dark] .DocSearch-Button:hover{background:#fff;box-shadow:inset 0 0 0 2px var(--docsearch-primary-color);color:#1c1e21}html[data-theme=dark] .DocSearch-Button .DocSearch-Search-Icon{color:#1c1e21}html[data-theme=dark] .DocSearch-Button .DocSearch-Button-Key{background:linear-gradient(-225deg,#d5dbe4,#f8f8f8);box-shadow:inset 0 -2px 0 0 #cdcde6,inset 0 0 1px 1px #fff,0 1px 2px 1px #1e235a66;color:#969faf}.navbar-sidebar .navbar-sidebar__brand div>button,.navbar-sidebar .navbar-sidebar__items .navbar-sidebar__item>button,.navbar-sidebar .navbar-sidebar__items .navbar-sidebar__item>ul>li a,.navbar.navbar--fixed-top .navbar__items>a.fastkafka-home-mobile+div>button{color:#fff}.navbar-sidebar .navbar-sidebar__brand div>button:hover,.navbar.navbar--fixed-top .navbar__items>a.fastkafka-home-mobile+div>button:hover{background:#8c9fae}.navbar-sidebar .navbar-sidebar__brand div>button:hover,[data-theme=dark] .navbar.navbar--fixed-top .navbar__items>a.fastkafka-home-mobile+div>button:hover{background:#444950}.menu__list-item--collapsed .menu__link--sublist-caret:after{transform:rotate(0)}.menu__link--sublist-caret:after,.menu__list-item--collapsed .menu__link--sublist-caret:after{background:url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCA0OCA0OCIgc3R5bGU9ImVuYWJsZS1iYWNrZ3JvdW5kOm5ldyAwIDAgNDggNDgiIHhtbDpzcGFjZT0icHJlc2VydmUiPjxwYXRoIGQ9Ik0xNS4yIDQzLjkgMTIuNCA0MWwxNy4yLTE3LjFMMTIuNCA2LjdsMi44LTIuOCAyMCAyMC0yMCAyMHoiIHN0eWxlPSJmaWxsOiM1NmI3ZTEiLz48L3N2Zz4=) 50%/2rem 2rem;filter:none;height:1rem;min-width:1rem;width:1rem}.details_lb9f[data-collapsed=false].isBrowser_bmU9>summary:before,.details_lb9f[open]:not(.isBrowser_bmU9)>summary:before,.menu__link--sublist-caret:after{transform:rotate(90deg)}.menu__link--active:not(.menu__link--sublist),.navbar-sidebar__back{background:#ffffff0d}.footer-discord-link:before,.footer-facebook-link:before,.footer-github-link:before,.footer-linkedin-link:before,.footer-twitter-link:before,.header-discord-link:before{background-color:#8c9fae;height:24px;width:24px;content:""}.navbar.navbar-sidebar--show .navbar-sidebar .navbar-sidebar__brand>div{margin-left:auto;margin-right:1rem!important}.navbar.navbar-sidebar--show .navbar-sidebar .navbar-sidebar__brand .navbar-sidebar__close{margin-left:unset}.browserWindowMenuIcon_Vhuh,html.plugin-pages .navbar.navbar-sidebar--show .navbar-sidebar .navbar-sidebar__brand .navbar-sidebar__close{margin-left:auto}.navbar--fixed-top{padding-bottom:0;padding-top:0}.navbar__title{color:#fff;font-size:3.5rem;margin-left:-.4rem}.navbar__brand:hover{color:var(--ifm-navbar-link-color)}.navbar__items.navbar__items--right .navbar__item.dropdown .dropdown__link,.navbar__items.navbar__items--right .navbar__link,.navbar__toggle{color:#fff}.navbar__items.navbar__items--right .navbar__item.dropdown .dropdown__link:hover{color:var(--ifm-link-hover-color)}.navbar__items.navbar__items--right .navbar__item.dropdown,.navbar__items.navbar__items--right .navbar__item.navbar__link{border-right:1px solid #214c6c;padding:23px 18px}.navbar__items.navbar__items--right .navbar__item.navbar__link.header-discord-link{border-right:none}.navbar__logo{height:auto;margin-top:1.2rem;width:2.156rem}.navbar__brand{margin-left:0;margin-right:0}html.docs-doc-page main article .markdown>h2.anchor{font-weight:400}html.docs-doc-page main article .markdown>h3.anchor{font-family:var(--ifm-font-family-monospace);font-weight:500}html.docs-doc-page main article .markdown>h3.anchor>strong,html.docs-doc-page main ul.table-of-contents>li>ul a>strong{font-weight:500}html.docs-doc-page main article .markdown table{font-size:.8rem;text-align:left}html.docs-doc-page main article .markdown table code{border:1px solid #0000001a}nav .navbar__inner .navbar__items .fastkafka-home>div{margin-top:1px}nav .navbar__inner .navbar__items .fastkafka-home>div>p{color:#fff;display:inline-block;font-size:1.2rem;margin:0}nav .navbar__inner .navbar__items .fastkafka-home>div>img{height:auto;margin-right:5px;width:15px}.navbar.navbar--fixed-top .navbar__items--right{justify-content:end}.navbar__link,.testimonialDescription_MWAM,.textContainer_jPR0 p,h3.anchor>code{font-size:1rem}.footer-discord-link:before,.header-discord-link:before{display:flex;-webkit-mask-image:url(data:image/svg+xml;base64,PHN2ZyB2ZXJzaW9uPSIxLjEiIGlkPSJHcmFwaGljX0VsZW1lbnRzIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHg9IjAiIHk9IjAiIHZpZXdCb3g9IjAgMCAxMzUgMTM1IiBzdHlsZT0iZW5hYmxlLWJhY2tncm91bmQ6bmV3IDAgMCAxMzUgMTM1IiB4bWw6c3BhY2U9InByZXNlcnZlIj48c3R5bGU+LnN0MHtmaWxsOiNmZmZ9PC9zdHlsZT48cGF0aCBjbGFzcz0ic3QwIiBkPSJNNjcuNSAwQzMwLjIgMCAwIDMwLjIgMCA2Ny41UzMwLjIgMTM1IDY3LjUgMTM1IDEzNSAxMDQuOCAxMzUgNjcuNSAxMDQuOCAwIDY3LjUgMHptNDMuNCA4OS43Yy02LjYgNS0xNC4xIDguNy0yMiAxMS4xLTEuOC0yLjQtMy40LTUtNC43LTcuNiAyLjYtMSA1LjEtMi4yIDcuNC0zLjYtLjYtLjUtMS4yLS45LTEuOC0xLjQtMTQgNi42LTMwLjEgNi42LTQ0LjEgMC0uNi41LTEuMi45LTEuOCAxLjQgMi40IDEuNCA0LjkgMi42IDcuNCAzLjUtMS40IDIuNy0yLjkgNS4yLTQuNyA3LjctNy45LTIuNC0xNS40LTYuMi0yMi0xMS4xQzIzIDczLjUgMjYuMSA1NyAzNy40IDQwLjFjNS43LTIuNiAxMS44LTQuNSAxOC01LjYuOSAxLjUgMS43IDMuMSAyLjQgNC43IDYuNi0xIDEzLjMtMSAxOS45IDAgLjYtMS41IDEuNS0zLjMgMi4zLTQuNyA2LjIgMS4xIDEyLjIgMi45IDE4IDUuNSA5LjggMTQuNiAxNC43IDMwLjkgMTIuOSA0OS43eiIvPjxwYXRoIGNsYXNzPSJzdDAiIGQ9Ik01My4yIDYyLjJjLTQuNCAwLTcuOCA0LTcuOCA4LjhzMy41IDguOCA3LjggOC44YzQuNCAwIDcuOC00IDcuOC04LjguMS00LjktMy40LTguOC03LjgtOC44ek04Mi4xIDYyLjJjLTQuNCAwLTcuOCA0LTcuOCA4LjhzMy41IDguOCA3LjggOC44YzQuNCAwIDcuOC00IDcuOC04LjguMS00LjktMy40LTguOC03LjgtOC44eiIvPjwvc3ZnPg==);mask-image:url(data:image/svg+xml;base64,PHN2ZyB2ZXJzaW9uPSIxLjEiIGlkPSJHcmFwaGljX0VsZW1lbnRzIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHg9IjAiIHk9IjAiIHZpZXdCb3g9IjAgMCAxMzUgMTM1IiBzdHlsZT0iZW5hYmxlLWJhY2tncm91bmQ6bmV3IDAgMCAxMzUgMTM1IiB4bWw6c3BhY2U9InByZXNlcnZlIj48c3R5bGU+LnN0MHtmaWxsOiNmZmZ9PC9zdHlsZT48cGF0aCBjbGFzcz0ic3QwIiBkPSJNNjcuNSAwQzMwLjIgMCAwIDMwLjIgMCA2Ny41UzMwLjIgMTM1IDY3LjUgMTM1IDEzNSAxMDQuOCAxMzUgNjcuNSAxMDQuOCAwIDY3LjUgMHptNDMuNCA4OS43Yy02LjYgNS0xNC4xIDguNy0yMiAxMS4xLTEuOC0yLjQtMy40LTUtNC43LTcuNiAyLjYtMSA1LjEtMi4yIDcuNC0zLjYtLjYtLjUtMS4yLS45LTEuOC0xLjQtMTQgNi42LTMwLjEgNi42LTQ0LjEgMC0uNi41LTEuMi45LTEuOCAxLjQgMi40IDEuNCA0LjkgMi42IDcuNCAzLjUtMS40IDIuNy0yLjkgNS4yLTQuNyA3LjctNy45LTIuNC0xNS40LTYuMi0yMi0xMS4xQzIzIDczLjUgMjYuMSA1NyAzNy40IDQwLjFjNS43LTIuNiAxMS44LTQuNSAxOC01LjYuOSAxLjUgMS43IDMuMSAyLjQgNC43IDYuNi0xIDEzLjMtMSAxOS45IDAgLjYtMS41IDEuNS0zLjMgMi4zLTQuNyA2LjIgMS4xIDEyLjIgMi45IDE4IDUuNSA5LjggMTQuNiAxNC43IDMwLjkgMTIuOSA0OS43eiIvPjxwYXRoIGNsYXNzPSJzdDAiIGQ9Ik01My4yIDYyLjJjLTQuNCAwLTcuOCA0LTcuOCA4LjhzMy41IDguOCA3LjggOC44YzQuNCAwIDcuOC00IDcuOC04LjguMS00LjktMy40LTguOC03LjgtOC44ek04Mi4xIDYyLjJjLTQuNCAwLTcuOCA0LTcuOCA4LjhzMy41IDguOCA3LjggOC44YzQuNCAwIDcuOC00IDcuOC04LjguMS00LjktMy40LTguOC03LjgtOC44eiIvPjwvc3ZnPg==)}.footer-discord-link:hover,.footer-facebook-link:hover,.footer-github-link:hover,.footer-linkedin-link:hover,.footer-twitter-link:hover,.header-discord-link:hover{opacity:.6}.footer-github-link:before{display:flex;-webkit-mask-image:url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMzUgMTMzIiBzdHlsZT0iZW5hYmxlLWJhY2tncm91bmQ6bmV3IDAgMCAxMzUgMTMzIiB4bWw6c3BhY2U9InByZXNlcnZlIj48cGF0aCBkPSJNMTM1IDY3LjVDMTM1IDMwLjIgMTA0LjggMCA2Ny41IDBTMCAzMC4yIDAgNjcuNWMwIDMxLjYgMjEuNyA1OC4xIDUxIDY1LjUgMC0uMi4xLS40LjEtLjYgMC0xLjUtLjEtNi44LS4xLTEyLjMtMTguNCA0LTIyLjItNy45LTIyLjItNy45LTMtNy43LTcuMy05LjctNy4zLTkuNy02LTQuMS40LTQuMS40LTQuMSA2LjcuNCAxMC4yIDYuOCAxMC4yIDYuOCA1LjkgMTAuMSAxNS40IDcuMyAxOS4yIDUuNS41LTQuMyAyLjMtNy4zIDQuMi04LjktMTQuNi0xLjUtMzAtNy4zLTMwLTMyLjggMC03LjMgMi42LTEzLjIgNi44LTE3LjgtLjctMS43LTMtOC41LjctMTcuNiAwIDAgNS42LTEuOCAxOC4xIDYuOCA1LjQtMS41IDEwLjktMi4yIDE2LjUtMi4yczExLjMuOCAxNi41IDIuMmMxMi42LTguNiAxOC4xLTYuOCAxOC4xLTYuOCAzLjYgOS4xIDEuMyAxNiAuNyAxNy42IDQuMyA0LjYgNi44IDEwLjYgNi44IDE3LjggMCAyNS41LTE1LjQgMzEuMS0zMC4yIDMyLjggMi40IDIuMSA0LjUgNi4xIDQuNSAxMi4zIDAgOC45LS4xIDE2LjEtLjEgMTguMyAwIC4yIDAgLjQuMS43IDI5LjItNy40IDUxLTMzLjkgNTEtNjUuNnoiIHN0eWxlPSJmaWxsLXJ1bGU6ZXZlbm9kZDtjbGlwLXJ1bGU6ZXZlbm9kZDtmaWxsOiNmZmYiLz48L3N2Zz4=);mask-image:url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMzUgMTMzIiBzdHlsZT0iZW5hYmxlLWJhY2tncm91bmQ6bmV3IDAgMCAxMzUgMTMzIiB4bWw6c3BhY2U9InByZXNlcnZlIj48cGF0aCBkPSJNMTM1IDY3LjVDMTM1IDMwLjIgMTA0LjggMCA2Ny41IDBTMCAzMC4yIDAgNjcuNWMwIDMxLjYgMjEuNyA1OC4xIDUxIDY1LjUgMC0uMi4xLS40LjEtLjYgMC0xLjUtLjEtNi44LS4xLTEyLjMtMTguNCA0LTIyLjItNy45LTIyLjItNy45LTMtNy43LTcuMy05LjctNy4zLTkuNy02LTQuMS40LTQuMS40LTQuMSA2LjcuNCAxMC4yIDYuOCAxMC4yIDYuOCA1LjkgMTAuMSAxNS40IDcuMyAxOS4yIDUuNS41LTQuMyAyLjMtNy4zIDQuMi04LjktMTQuNi0xLjUtMzAtNy4zLTMwLTMyLjggMC03LjMgMi42LTEzLjIgNi44LTE3LjgtLjctMS43LTMtOC41LjctMTcuNiAwIDAgNS42LTEuOCAxOC4xIDYuOCA1LjQtMS41IDEwLjktMi4yIDE2LjUtMi4yczExLjMuOCAxNi41IDIuMmMxMi42LTguNiAxOC4xLTYuOCAxOC4xLTYuOCAzLjYgOS4xIDEuMyAxNiAuNyAxNy42IDQuMyA0LjYgNi44IDEwLjYgNi44IDE3LjggMCAyNS41LTE1LjQgMzEuMS0zMC4yIDMyLjggMi40IDIuMSA0LjUgNi4xIDQuNSAxMi4zIDAgOC45LS4xIDE2LjEtLjEgMTguMyAwIC4yIDAgLjQuMS43IDI5LjItNy40IDUxLTMzLjkgNTEtNjUuNnoiIHN0eWxlPSJmaWxsLXJ1bGU6ZXZlbm9kZDtjbGlwLXJ1bGU6ZXZlbm9kZDtmaWxsOiNmZmYiLz48L3N2Zz4=)}.footer-facebook-link:before{display:flex;-webkit-mask-image:url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMzUgMTM0LjkiIHN0eWxlPSJlbmFibGUtYmFja2dyb3VuZDpuZXcgMCAwIDEzNSAxMzQuOSIgeG1sOnNwYWNlPSJwcmVzZXJ2ZSI+PHBhdGggZD0iTTEzNSA2Ny41QzEzNSAzMC4yIDEwNC44IDAgNjcuNSAwUzAgMzAuMiAwIDY3LjVjMCAzMi4zIDIyLjcgNTkuMiA1Mi45IDY1LjlWOTNIMzcuNlY3NS42aDE1LjNWNjIuM2MwLTE1LjEgOS0yMy40IDIyLjgtMjMuNCA2LjYgMCAxMy41IDEuMiAxMy41IDEuMnYxNC44aC03LjZjLTcuNSAwLTkuOCA0LjYtOS44IDkuNHYxMS4zaDE2LjdMODUuOCA5M2gtMTR2NDEuOWMzNS4yLTIuMiA2My4yLTMxLjYgNjMuMi02Ny40eiIgc3R5bGU9ImZpbGw6I2ZmZiIvPjwvc3ZnPg==);mask-image:url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMzUgMTM0LjkiIHN0eWxlPSJlbmFibGUtYmFja2dyb3VuZDpuZXcgMCAwIDEzNSAxMzQuOSIgeG1sOnNwYWNlPSJwcmVzZXJ2ZSI+PHBhdGggZD0iTTEzNSA2Ny41QzEzNSAzMC4yIDEwNC44IDAgNjcuNSAwUzAgMzAuMiAwIDY3LjVjMCAzMi4zIDIyLjcgNTkuMiA1Mi45IDY1LjlWOTNIMzcuNlY3NS42aDE1LjNWNjIuM2MwLTE1LjEgOS0yMy40IDIyLjgtMjMuNCA2LjYgMCAxMy41IDEuMiAxMy41IDEuMnYxNC44aC03LjZjLTcuNSAwLTkuOCA0LjYtOS44IDkuNHYxMS4zaDE2LjdMODUuOCA5M2gtMTR2NDEuOWMzNS4yLTIuMiA2My4yLTMxLjYgNjMuMi02Ny40eiIgc3R5bGU9ImZpbGw6I2ZmZiIvPjwvc3ZnPg==)}.footer-twitter-link:before{display:flex;-webkit-mask-image:url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMzUgMTM1IiBzdHlsZT0iZW5hYmxlLWJhY2tncm91bmQ6bmV3IDAgMCAxMzUgMTM1IiB4bWw6c3BhY2U9InByZXNlcnZlIj48cGF0aCBkPSJNNjcuNSAwQzMwLjIgMCAwIDMwLjIgMCA2Ny41UzMwLjIgMTM1IDY3LjUgMTM1IDEzNSAxMDQuOCAxMzUgNjcuNSAxMDQuOCAwIDY3LjUgMHptMzYuMiA1My43djIuMmMwIDIyLjktMTcuNCA0OS4zLTQ5LjMgNDkuMy05LjggMC0xOC45LTIuOS0yNi42LTcuOCAxLjQuMiAyLjcuMiA0LjEuMiA4LjEgMCAxNS42LTIuOCAyMS41LTcuNC03LjYtLjEtMTQtNS4yLTE2LjItMTIgMS4xLjIgMi4xLjMgMy4zLjMgMS42IDAgMy4xLS4yIDQuNi0uNi03LjktMS42LTEzLjktOC42LTEzLjktMTd2LS4yYzIuMyAxLjMgNSAyLjEgNy44IDIuMi00LjctMy4xLTcuNy04LjQtNy43LTE0LjQgMC0zLjIuOS02LjEgMi4zLTguNyA4LjcgMTAuNCAyMS40IDE3LjMgMzUuOCAxOC0uMy0xLjMtLjQtMi42LS40LTQgMC05LjYgNy44LTE3LjMgMTcuMy0xNy4zIDUgMCA5LjUgMi4xIDEyLjYgNS41IDMuOS0uOCA3LjctMi4yIDExLTQuMi0xLjMgNC00IDcuNC03LjYgOS42IDMuNS0uNCA2LjgtMS4zIDEwLTIuNy0yLjMgMy41LTUuMyA2LjUtOC42IDl6IiBzdHlsZT0iZmlsbDojZmZmIi8+PC9zdmc+);mask-image:url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMzUgMTM1IiBzdHlsZT0iZW5hYmxlLWJhY2tncm91bmQ6bmV3IDAgMCAxMzUgMTM1IiB4bWw6c3BhY2U9InByZXNlcnZlIj48cGF0aCBkPSJNNjcuNSAwQzMwLjIgMCAwIDMwLjIgMCA2Ny41UzMwLjIgMTM1IDY3LjUgMTM1IDEzNSAxMDQuOCAxMzUgNjcuNSAxMDQuOCAwIDY3LjUgMHptMzYuMiA1My43djIuMmMwIDIyLjktMTcuNCA0OS4zLTQ5LjMgNDkuMy05LjggMC0xOC45LTIuOS0yNi42LTcuOCAxLjQuMiAyLjcuMiA0LjEuMiA4LjEgMCAxNS42LTIuOCAyMS41LTcuNC03LjYtLjEtMTQtNS4yLTE2LjItMTIgMS4xLjIgMi4xLjMgMy4zLjMgMS42IDAgMy4xLS4yIDQuNi0uNi03LjktMS42LTEzLjktOC42LTEzLjktMTd2LS4yYzIuMyAxLjMgNSAyLjEgNy44IDIuMi00LjctMy4xLTcuNy04LjQtNy43LTE0LjQgMC0zLjIuOS02LjEgMi4zLTguNyA4LjcgMTAuNCAyMS40IDE3LjMgMzUuOCAxOC0uMy0xLjMtLjQtMi42LS40LTQgMC05LjYgNy44LTE3LjMgMTcuMy0xNy4zIDUgMCA5LjUgMi4xIDEyLjYgNS41IDMuOS0uOCA3LjctMi4yIDExLTQuMi0xLjMgNC00IDcuNC03LjYgOS42IDMuNS0uNCA2LjgtMS4zIDEwLTIuNy0yLjMgMy41LTUuMyA2LjUtOC42IDl6IiBzdHlsZT0iZmlsbDojZmZmIi8+PC9zdmc+)}.footer-linkedin-link:before{display:flex;-webkit-mask-image:url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMzUgMTM1IiBzdHlsZT0iZW5hYmxlLWJhY2tncm91bmQ6bmV3IDAgMCAxMzUgMTM1IiB4bWw6c3BhY2U9InByZXNlcnZlIj48cGF0aCBkPSJNNjcuNSAwQzMwLjIgMCAwIDMwLjIgMCA2Ny41UzMwLjIgMTM1IDY3LjUgMTM1IDEzNSAxMDQuOCAxMzUgNjcuNSAxMDQuOCAwIDY3LjUgMHpNNDUuMiAxMDQuNGMtNC44LS4xLTkuNiAwLTE0LjQgMC0uOCAwLTEtLjItMS0xVjUyLjFjMC0uNy4yLTEgLjktMWgxNC42Yy45IDAgMS4xLjQgMS4xIDEuMnY1MWMwIC44LS4yIDEuMS0xLjIgMS4xem0tNy4xLTYwLjZjLTUuMiAwLTkuNi00LjMtOS42LTkuNSAwLTUuMyA0LjMtOS42IDkuNi05LjYgNS4yIDAgOS42IDQuMyA5LjYgOS41cy00LjMgOS42LTkuNiA5LjZ6bTcwLjQgNTkuNWMwIC45LS4yIDEuMS0xLjEgMS4xSDkzYy0uOCAwLTEtLjMtMS0xLjFWNzYuN2MwLTIuMi0uMS00LjQtLjgtNi42LTEuMS00LTQtNi04LjItNS44LTUuNy4zLTguNyAzLjEtOS40IDguOS0uMiAxLjQtLjMgMi44LS4zIDQuMnYyNS45YzAgLjktLjIgMS4xLTEuMSAxLjFINTcuN2MtLjggMC0xLS4yLTEtMVY1Mi4xYzAtLjguMy0xIDEuMS0xaDEzLjhjLjggMCAxLjEuMyAxIDEuMXY2LjFjMS4xLTEuMiAyLTIuNSAzLjItMy42IDMuNS0zLjMgNy42LTUgMTIuNS00LjkgMi43IDAgNS4zLjIgNy44IDEgNS45IDEuNyA5LjMgNS43IDEwLjkgMTEuNSAxLjIgNC4zIDEuNCA4LjcgMS41IDEzLjEuMSA5LjIgMCAxOC42IDAgMjcuOXoiIHN0eWxlPSJmaWxsOiNmZWZlZmUiLz48L3N2Zz4=);mask-image:url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMzUgMTM1IiBzdHlsZT0iZW5hYmxlLWJhY2tncm91bmQ6bmV3IDAgMCAxMzUgMTM1IiB4bWw6c3BhY2U9InByZXNlcnZlIj48cGF0aCBkPSJNNjcuNSAwQzMwLjIgMCAwIDMwLjIgMCA2Ny41UzMwLjIgMTM1IDY3LjUgMTM1IDEzNSAxMDQuOCAxMzUgNjcuNSAxMDQuOCAwIDY3LjUgMHpNNDUuMiAxMDQuNGMtNC44LS4xLTkuNiAwLTE0LjQgMC0uOCAwLTEtLjItMS0xVjUyLjFjMC0uNy4yLTEgLjktMWgxNC42Yy45IDAgMS4xLjQgMS4xIDEuMnY1MWMwIC44LS4yIDEuMS0xLjIgMS4xem0tNy4xLTYwLjZjLTUuMiAwLTkuNi00LjMtOS42LTkuNSAwLTUuMyA0LjMtOS42IDkuNi05LjYgNS4yIDAgOS42IDQuMyA5LjYgOS41cy00LjMgOS42LTkuNiA5LjZ6bTcwLjQgNTkuNWMwIC45LS4yIDEuMS0xLjEgMS4xSDkzYy0uOCAwLTEtLjMtMS0xLjFWNzYuN2MwLTIuMi0uMS00LjQtLjgtNi42LTEuMS00LTQtNi04LjItNS44LTUuNy4zLTguNyAzLjEtOS40IDguOS0uMiAxLjQtLjMgMi44LS4zIDQuMnYyNS45YzAgLjktLjIgMS4xLTEuMSAxLjFINTcuN2MtLjggMC0xLS4yLTEtMVY1Mi4xYzAtLjguMy0xIDEuMS0xaDEzLjhjLjggMCAxLjEuMyAxIDEuMXY2LjFjMS4xLTEuMiAyLTIuNSAzLjItMy42IDMuNS0zLjMgNy42LTUgMTIuNS00LjkgMi43IDAgNS4zLjIgNy44IDEgNS45IDEuNyA5LjMgNS43IDEwLjkgMTEuNSAxLjIgNC4zIDEuNCA4LjcgMS41IDEzLjEuMSA5LjIgMCAxOC42IDAgMjcuOXoiIHN0eWxlPSJmaWxsOiNmZWZlZmUiLz48L3N2Zz4=)}.github-stars{display:flex;height:40px;margin-left:12px;width:150px}.footer.footer--dark{background-color:#003257}.footer.footer--dark .container.container-fluid .footer__bottom{background-color:#003a60;left:0;padding:2rem;position:absolute;width:100%}.footer.footer--dark .footer__copyright{font-size:.85rem;letter-spacing:.025rem;opacity:.5}.footer.footer--dark .footer__col{border-left:2px solid #214c6c;height:10rem;margin:2.3rem auto 5rem;padding-left:1.5rem}.footer.footer--dark .footer__col .footer__title{font-size:1rem;letter-spacing:.025rem}.footer.footer--dark .footer__col:first-child .footer__item{display:inline-block;padding:.3rem}.footer.footer--dark .footer__col .footer__link-item{font-size:.9rem;text-decoration:underline}a.link-to-source{display:inline-block;margin:0 0 1rem}a.link-to-source:after{background-color:var(--ifm-link-color);content:"";display:inline-block;height:24px;margin-left:0;margin-top:.02rem;-webkit-mask-image:url("data:image/svg+xml;charset=utf-8,%3Csvg width='24' height='24' stroke-width='1.5' fill='none' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M21 3h-6m6 0-9 9m9-9v6' stroke='currentColor' stroke-linecap='round' stroke-linejoin='round'/%3E%3Cpath d='M21 13v6a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h6' stroke='currentColor' stroke-linecap='round'/%3E%3C/svg%3E");mask-image:url("data:image/svg+xml;charset=utf-8,%3Csvg width='24' height='24' stroke-width='1.5' fill='none' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M21 3h-6m6 0-9 9m9-9v6' stroke='currentColor' stroke-linecap='round' stroke-linejoin='round'/%3E%3Cpath d='M21 13v6a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h6' stroke='currentColor' stroke-linecap='round'/%3E%3C/svg%3E");position:absolute;transform:scale(.67);width:24px}.footer.footer--dark .footer__col .footer__link-item:after{background-color:#8c9fae;content:"";display:inline-block;height:35px;margin-left:-.5rem;margin-top:-.4rem;-webkit-mask-image:url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIGhlaWdodD0iNDgiIHZpZXdCb3g9IjAgOTYgOTYwIDk2MCIgd2lkdGg9IjQ4Ij48cGF0aCBkPSJNNTQwIDc5M3EtOS05LTktMjEuNXQ4LTIwLjVsMTQ3LTE0N0gxOTBxLTEzIDAtMjEuNS04LjVUMTYwIDU3NHEwLTEzIDguNS0yMS41VDE5MCA1NDRoNDk2TDUzOCAzOTZxLTktOS04LjUtMjF0OS41LTIxcTktOCAyMS41LTh0MjAuNSA4bDE5OSAxOTlxNSA1IDcgMTB0MiAxMXEwIDYtMiAxMXQtNyAxMEw1ODIgNzkzcS05IDktMjEgOXQtMjEtOVoiLz48L3N2Zz4=);mask-image:url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIGhlaWdodD0iNDgiIHZpZXdCb3g9IjAgOTYgOTYwIDk2MCIgd2lkdGg9IjQ4Ij48cGF0aCBkPSJNNTQwIDc5M3EtOS05LTktMjEuNXQ4LTIwLjVsMTQ3LTE0N0gxOTBxLTEzIDAtMjEuNS04LjVUMTYwIDU3NHEwLTEzIDguNS0yMS41VDE5MCA1NDRoNDk2TDUzOCAzOTZxLTktOS04LjUtMjF0OS41LTIxcTktOCAyMS41LTh0MjAuNSA4bDE5OSAxOTlxNSA1IDcgMTB0MiAxMXEwIDYtMiAxMXQtNyAxMEw1ODIgNzkzcS05IDktMjEgOXQtMjEtOVoiLz48L3N2Zz4=);position:absolute;transform:scale(.4);width:42px}#docusaurus-base-url-issue-banner-container,.footer.footer--dark .footer__col .footer__link-item>svg,.footer.footer--dark .footer__col:last-child .footer__link-item:after,.themedImage_ToTc,[data-theme=dark] .lightToggleIcon_pyhR,[data-theme=light] .darkToggleIcon_wfgR,html[data-announcement-bar-initially-dismissed=true] .announcementBar_mb4j{display:none}.container .accordion{border:1px solid #8bcae51a;border-radius:2px}.accordion__item+.accordion__item{border-top:1px solid #8bcae51a}.accordion__item .accordion__button{background-color:#076d9e;border:1px solid #8bcae5;color:#fff;cursor:pointer;font-size:1rem;margin:.5rem 0;padding:2rem;text-align:left;width:100%}.accordion__item .accordion__button:hover{background-color:#60bee4}.accordion__item .accordion__panel{animation:.35s ease-in b;color:#fff;font-size:1rem;padding:2rem 2rem 1rem}.prism-code.language-py code .token.decorator{color:#c5221f!important}html.docs-doc-page[data-theme=dark] .prism-code.language-py code .token.decorator{color:#fbc02d!important}body:not(.navigation-with-keyboard) :not(input):focus{outline:0}.skipToContent_fXgn{background-color:var(--ifm-background-surface-color);color:var(--ifm-color-emphasis-900);left:100%;padding:calc(var(--ifm-global-spacing)/2) var(--ifm-global-spacing);position:fixed;top:1rem;z-index:calc(var(--ifm-z-index-fixed) + 1)}.skipToContent_fXgn:focus{box-shadow:var(--ifm-global-shadow-md);left:1rem}.closeButton_CVFx{line-height:0;padding:0}.content_knG7{font-size:85%;padding:5px 0;text-align:center}.content_knG7 a{color:inherit;text-decoration:underline}.announcementBar_mb4j{align-items:center;background-color:var(--ifm-color-white);border-bottom:1px solid var(--ifm-color-emphasis-100);color:var(--ifm-color-black);display:flex;height:var(--docusaurus-announcement-bar-height)}.announcementBarPlaceholder_vyr4{flex:0 0 10px}.announcementBarClose_gvF7{align-self:stretch;flex:0 0 30px}.toggle_vylO{height:2rem;width:2rem}.toggleButton_gllP{align-items:center;border-radius:50%;display:flex;height:100%;justify-content:center;transition:background var(--ifm-transition-fast);width:100%}.toggleButton_gllP:hover{background:var(--ifm-color-emphasis-200)}.toggleButtonDisabled_aARS{cursor:not-allowed}.darkNavbarColorModeToggle_X3D1:hover{background:var(--ifm-color-gray-800)}[data-theme=dark] .themedImage--dark_i4oU,[data-theme=light] .themedImage--light_HNdA{display:initial}.iconExternalLink_nPIU{margin-left:.3rem}.iconLanguage_nlXk{margin-right:5px;vertical-align:text-bottom}.navbarHideable_m1mJ{transition:transform var(--ifm-transition-fast) ease}.navbarHidden_jGov{transform:translate3d(0,calc(-100% - 2px),0)}.errorBoundaryError_a6uf{color:red;white-space:pre-wrap}.footerLogoLink_BH7S{opacity:.5;transition:opacity var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.footerLogoLink_BH7S:hover,.hash-link:focus,:hover>.hash-link{opacity:1}.mainWrapper_z2l0{display:flex;flex:1 0 auto;flex-direction:column}.docusaurus-mt-lg{margin-top:3rem}#__docusaurus{display:flex;flex-direction:column;min-height:100%}.iconEdit_Z9Sw{margin-right:.3em;vertical-align:sub}.tag_zVej{border:1px solid var(--docusaurus-tag-list-border);transition:border var(--ifm-transition-fast)}.tag_zVej:hover{--docusaurus-tag-list-border:var(--ifm-link-color);text-decoration:none}.tagRegular_sFm0{border-radius:var(--ifm-global-radius);font-size:90%;padding:.2rem .5rem .3rem}.tagWithCount_h2kH{align-items:center;border-left:0;display:flex;padding:0 .5rem 0 1rem;position:relative}.tagWithCount_h2kH:after,.tagWithCount_h2kH:before{border:1px solid var(--docusaurus-tag-list-border);content:"";position:absolute;top:50%;transition:inherit}.tagWithCount_h2kH:before{border-bottom:0;border-right:0;height:1.18rem;right:100%;transform:translate(50%,-50%) rotate(-45deg);width:1.18rem}.tagWithCount_h2kH:after{border-radius:50%;height:.5rem;left:0;transform:translateY(-50%);width:.5rem}.tagWithCount_h2kH span{background:var(--ifm-color-secondary);border-radius:var(--ifm-global-radius);color:var(--ifm-color-black);font-size:.7rem;line-height:1.2;margin-left:.3rem;padding:.1rem .4rem}.tags_jXut{display:inline}.tag_QGVx{display:inline-block;margin:0 .4rem .5rem 0}.lastUpdated_vwxv{font-size:smaller;font-style:italic;margin-top:.2rem}.tocCollapsibleButton_TO0P{align-items:center;display:flex;font-size:inherit;justify-content:space-between;padding:.4rem .8rem;width:100%}.tocCollapsibleButton_TO0P:after{background:var(--ifm-menu-link-sublist-icon) 50% 50%/2rem 2rem no-repeat;content:"";filter:var(--ifm-menu-link-sublist-icon-filter);height:1.25rem;transform:rotate(180deg);transition:transform var(--ifm-transition-fast);width:1.25rem}.tocCollapsibleButtonExpanded_MG3E:after,.tocCollapsibleExpanded_sAul{transform:none}.tocCollapsible_ETCw{background-color:var(--ifm-menu-color-background-active);border-radius:var(--ifm-global-radius);margin:1rem 0}.buttonGroup__atx button,.codeBlockContainer_Ckt0{background:var(--prism-background-color);color:var(--prism-color)}.tocCollapsibleContent_vkbj>ul{border-left:none;border-top:1px solid var(--ifm-color-emphasis-300);font-size:15px;padding:.2rem 0}.tocCollapsibleContent_vkbj ul li{margin:.4rem .8rem}.tocCollapsibleContent_vkbj a{display:block}.tableOfContents_bqdL{max-height:calc(100vh - var(--ifm-navbar-height) - 2rem);overflow-y:auto;position:sticky;top:calc(var(--ifm-navbar-height) + 1rem)}.anchorWithStickyNavbar_LWe7{scroll-margin-top:calc(var(--ifm-navbar-height) + .5rem)}.anchorWithHideOnScrollNavbar_WYt5{scroll-margin-top:.5rem}.hash-link{opacity:0;padding-left:.5rem;transition:opacity var(--ifm-transition-fast);-webkit-user-select:none;user-select:none}.hash-link:before{content:"#"}.codeBlockContainer_Ckt0{border-radius:var(--ifm-code-border-radius);box-shadow:var(--ifm-global-shadow-lw);margin-bottom:var(--ifm-leading)}.codeBlockContent_biex{border-radius:inherit;direction:ltr;position:relative}.codeBlockTitle_Ktv7{border-bottom:1px solid var(--ifm-color-emphasis-300);border-top-left-radius:inherit;border-top-right-radius:inherit;font-size:var(--ifm-code-font-size);font-weight:500;padding:.75rem var(--ifm-pre-padding)}.codeBlock_bY9V{--ifm-pre-background:var(--prism-background-color);margin:0;padding:0}.codeBlockTitle_Ktv7+.codeBlockContent_biex .codeBlock_bY9V{border-top-left-radius:0;border-top-right-radius:0}.codeBlockLines_e6Vv{float:left;font:inherit;min-width:100%;padding:var(--ifm-pre-padding)}.codeBlockLinesWithNumbering_o6Pm{display:table;padding:var(--ifm-pre-padding) 0}.buttonGroup__atx{column-gap:.2rem;display:flex;position:absolute;right:calc(var(--ifm-pre-padding)/2);top:calc(var(--ifm-pre-padding)/2)}.buttonGroup__atx button{align-items:center;border:1px solid var(--ifm-color-emphasis-300);border-radius:var(--ifm-global-radius);display:flex;line-height:0;opacity:0;padding:.4rem;transition:opacity var(--ifm-transition-fast) ease-in-out}.buttonGroup__atx button:focus-visible,.buttonGroup__atx button:hover{opacity:1!important}.theme-code-block:hover .buttonGroup__atx button{opacity:.4}:where(:root){--docusaurus-highlighted-code-line-bg:#484d5b}:where([data-theme=dark]){--docusaurus-highlighted-code-line-bg:#646464}.theme-code-block-highlighted-line{background-color:var(--docusaurus-highlighted-code-line-bg);display:block;margin:0 calc(var(--ifm-pre-padding)*-1);padding:0 var(--ifm-pre-padding)}.codeLine_lJS_{counter-increment:a;display:table-row}.codeLineNumber_Tfdd{background:var(--ifm-pre-background);display:table-cell;left:0;overflow-wrap:normal;padding:0 var(--ifm-pre-padding);position:sticky;text-align:right;width:1%}.codeLineNumber_Tfdd:before{content:counter(a);opacity:.4}.codeLineContent_feaV{padding-right:var(--ifm-pre-padding)}.theme-code-block:hover .copyButtonCopied_obH4{opacity:1!important}.copyButtonIcons_eSgA{height:1.125rem;position:relative;width:1.125rem}.copyButtonIcon_y97N,.copyButtonSuccessIcon_LjdS{fill:currentColor;height:inherit;left:0;opacity:inherit;position:absolute;top:0;transition:all var(--ifm-transition-fast) ease;width:inherit}.copyButtonSuccessIcon_LjdS{color:#00d600;left:50%;opacity:0;top:50%;transform:translate(-50%,-50%) scale(.33)}.copyButtonCopied_obH4 .copyButtonIcon_y97N{opacity:0;transform:scale(.33)}.copyButtonCopied_obH4 .copyButtonSuccessIcon_LjdS{opacity:1;transform:translate(-50%,-50%) scale(1);transition-delay:75ms}.wordWrapButtonIcon_Bwma{height:1.2rem;width:1.2rem}.details_lb9f{--docusaurus-details-summary-arrow-size:0.38rem;--docusaurus-details-transition:transform 200ms ease;--docusaurus-details-decoration-color:grey}.details_lb9f>summary{cursor:pointer;list-style:none;padding-left:1rem;position:relative}.details_lb9f>summary::-webkit-details-marker{display:none}.details_lb9f>summary:before{border-color:#0000 #0000 #0000 var(--docusaurus-details-decoration-color);border-style:solid;border-width:var(--docusaurus-details-summary-arrow-size);content:"";left:0;position:absolute;top:.45rem;transform:rotate(0);transform-origin:calc(var(--docusaurus-details-summary-arrow-size)/2) 50%;transition:var(--docusaurus-details-transition)}.collapsibleContent_i85q{border-top:1px solid var(--docusaurus-details-decoration-color);margin-top:1rem;padding-top:1rem}.details_b_Ee{--docusaurus-details-decoration-color:var(--ifm-alert-border-color);--docusaurus-details-transition:transform var(--ifm-transition-fast) ease;border:1px solid var(--ifm-alert-border-color);margin:0 0 var(--ifm-spacing-vertical)}.containsTaskList_mC6p{list-style:none}.img_ev3q{height:auto}.admonition_LlT9{margin-bottom:1em}.admonitionHeading_tbUL{font:var(--ifm-heading-font-weight) var(--ifm-h5-font-size)/var(--ifm-heading-line-height) var(--ifm-heading-font-family);margin-bottom:.3rem}.admonitionHeading_tbUL code{text-transform:none}.admonitionIcon_kALy{display:inline-block;margin-right:.4em;vertical-align:middle}.admonitionIcon_kALy svg{fill:var(--ifm-alert-foreground-color);display:inline-block;height:1.6em;width:1.6em}.breadcrumbHomeIcon_YNFT{height:1.1rem;position:relative;top:1px;vertical-align:top;width:1.1rem}.breadcrumbsContainer_Z_bl{--ifm-breadcrumb-size-multiplier:0.8;margin-bottom:.8rem}.searchQueryInput_u2C7,.searchVersionInput_m0Ui{background:var(--docsearch-searchbox-focus-background);border:2px solid var(--ifm-toc-border-color);border-radius:var(--ifm-global-radius);color:var(--docsearch-text-color);font:var(--ifm-font-size-base) var(--ifm-font-family-base);margin-bottom:.5rem;padding:.8rem;transition:border var(--ifm-transition-fast) ease;width:100%}.searchQueryInput_u2C7:focus,.searchVersionInput_m0Ui:focus{border-color:var(--docsearch-primary-color);outline:0}.searchQueryInput_u2C7::placeholder{color:var(--docsearch-muted-color)}.searchResultsColumn_JPFH{font-size:.9rem;font-weight:700}.algoliaLogo_rT1R{max-width:150px}.algoliaLogoPathFill_WdUC{fill:var(--ifm-font-color-base)}.searchResultItem_Tv2o{border-bottom:1px solid var(--ifm-toc-border-color);padding:1rem 0}.searchResultItemHeading_KbCB{font-weight:400;margin-bottom:0}.searchResultItemPath_lhe1{--ifm-breadcrumb-separator-size-multiplier:1;color:var(--ifm-color-content-secondary);font-size:.8rem}.searchResultItemSummary_AEaO{font-style:italic;margin:.5rem 0 0}.loadingSpinner_XVxU{animation:1s linear infinite a;border:.4em solid #eee;border-radius:50%;border-top:.4em solid var(--ifm-color-primary);height:3rem;margin:0 auto;width:3rem}@keyframes a{to{transform:rotate(1turn)}}.loader_vvXV{margin-top:2rem}.search-result-match{background:#ffd78e40;color:var(--docsearch-hit-color);padding:.09em 0}.backToTopButton_sjWU{background-color:var(--ifm-color-emphasis-200);border-radius:50%;bottom:1.3rem;box-shadow:var(--ifm-global-shadow-lw);height:3rem;opacity:0;position:fixed;right:1.3rem;transform:scale(0);transition:all var(--ifm-transition-fast) var(--ifm-transition-timing-default);visibility:hidden;width:3rem;z-index:calc(var(--ifm-z-index-fixed) - 1)}.link_SBpC,.link_ksra{transition:color var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.backToTopButton_sjWU:after{background-color:var(--ifm-color-emphasis-1000);content:" ";display:inline-block;height:100%;-webkit-mask:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem no-repeat;mask:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem no-repeat;width:100%}.backToTopButtonShow_xfvO{opacity:1;transform:scale(1);visibility:visible}[data-theme=dark]:root{--docusaurus-collapse-button-bg:#ffffff0d;--docusaurus-collapse-button-bg-hover:#ffffff1a}.collapseSidebarButton_PEFL{display:none;margin:0}.descriptionMobile_CZcP,.docSidebarContainer_b6E3,.sidebarLogo_isFc,[hidden]{display:none}.docMainContainer_gTbr,.docPage__5DB{display:flex;width:100%}.docPage__5DB{flex:1 0}.docsWrapper_BCFX{display:flex;flex:1 0 auto}.features_s1P3,.features_t9lD{align-items:center;width:100%}.features_s1P3{display:flex;padding:2rem 0}.header_vZXi{font-size:4rem;text-align:center}.description_vol2{font-size:1.2rem;margin-top:1rem}.containerWithMinHeight_zTH6{min-height:500px}.features_t9lD{background:#1474a6;background:linear-gradient(180deg,#1474a6 50%,#52afd8);display:flex;padding:1rem 0 4rem}.features_JWCp,.features_i8jF{align-items:center;background-color:#60bee4;display:flex}.featureSvg_GfXr{height:250px;margin:30px 0;width:330px}.title_iMVk{color:#fff;font-size:3rem;padding-bottom:3rem;padding-top:2rem;text-align:center}.textContainer_jPR0{color:#fff;padding:1rem 2.2rem;text-align:center}.subTitle_yWbm{color:#fff;font-size:1.4rem;text-align:left}.description_gnTt,.description_qDmr,.description_ynef{color:#fff;font-size:1rem;margin-top:3rem;text-align:center}.rowWitExtraMargin_OCkm,.rowWitExtraMargin_R_NL,.rowWitExtraMargin_qK_b,.rowWitExtraMargin_xiCQ{margin-top:80px}.features_JWCp{border-bottom:1px solid #8bcae5;padding:4rem 0 0;width:100%}.features_JWCp p{font-size:1rem;text-align:left}.featureSvg_IBxz,.featureSvg_Td5A,.featureSvg_ed9f{height:200px;width:200px}.title_bFDR,.title_e8_4,.title_lvu5{color:#fff;font-size:3rem;text-align:center}.subTitle_FXDe,.subTitle_TSnm{color:#fff;font-size:1.5rem;text-align:left}.link_ksra{color:#fff;text-decoration:underline}.link_ksra:hover{color:var(--ifm-hero-text-color)}.wrapper_DIPT,.wrapper_yrC9{position:relative}.verticalAndHorizontalCenter_LWfS,.verticalAndHorizontalCenter_krzz{left:50%;margin:0;position:absolute;top:50%;transform:translate(-50%,-50%)}.childrenWithExtraPadding_xwhI{color:#fff;font-size:1.2rem;margin:4rem 0}.features_i8jF{padding:3rem 0;width:100%}.title__ymQ{color:#fff;font-size:3rem;padding-bottom:2rem;text-align:center}.subTitle_DG7d{font-size:1.5rem;text-align:left}.description_fCta{font-size:1rem;margin-top:3rem;text-align:center}.buttons_AeoN,.buttons_E9Qp{align-items:center;display:flex;justify-content:center;margin-top:55px}.heroButton_F0GI,.heroButton_GTT_{background:var(--ifm-navbar-background-color);border-radius:25px;color:#fff;font-size:1rem;margin-top:-1.5rem;padding:.7rem 2.5rem}.heroButton_F0GI:hover,.heroButton_GTT_:hover{background:#3e99c5}.testimonialAnchor_iYyG{background-color:#fff;border-radius:5px;color:#1c1e21;display:block;margin-left:1rem;margin-top:1rem;padding:1.6rem}.testimonialAnchor_iYyG:hover{color:#1c1e21;text-decoration:none}.DocSearch-Hit[aria-selected=true] mark,.faqAnswer_fJMF a,.href_wqkW,.link_SBpC{text-decoration:underline}.testimonialWrapper_gvoa{padding:0 0 3rem}.testimonialHeader_iSI8{display:flex;height:45px;justify-content:space-between;margin-bottom:30px}.testimonialUserInfo_th5k h6,.testimonialUserInfo_th5k p{margin-bottom:0;margin-left:10px}.testimonialProfilePic_wg0d{border-radius:50%;height:auto;width:45px}.testimonialSourceIcon_RwqW{height:auto;width:20px}.features_fQn7{align-items:center;background-color:#076d9e;display:flex;padding:2rem 0 8rem;width:100%}.title_bFDR+p{font-style:italic;margin-bottom:5rem}.subTitle_u53r,.title_bFDR+p{color:#fff;font-size:1.5rem;text-align:center}.link_SBpC{color:var(--ifm-hero-text-color)}.accordion{border:1px solid #0000001a;border-radius:2px}.accordion__item+.accordion__item{border-top:1px solid #0000001a}.accordion__button{background-color:#f4f4f4;border:none;color:#444;cursor:pointer;padding:18px;text-align:left;width:100%}.accordion__button:hover{background-color:#ddd}.accordion__button:before{border-bottom:2px solid;border-right:2px solid;content:"";display:inline-block;height:10px;margin-right:12px;transform:rotate(-45deg);width:10px}.accordion__button[aria-expanded=true]:before,.accordion__button[aria-selected=true]:before{transform:rotate(45deg)}.accordion__panel{animation:.35s ease-in b;padding:20px}.browserWindow_my1Q{border:1px solid #fff;border-radius:var(--ifm-global-radius);box-shadow:0 5px 15px #00000059;margin-bottom:var(--ifm-leading)}.browserWindowHeader_jXSR{align-items:center;background:#ebedf0;display:flex;padding:.5rem 1rem}.row_KZDM:after{clear:both;content:"";display:table}.buttons_uHc7{white-space:nowrap}.right_oyze{align-self:center;width:10%}[data-theme=light]{--ifm-background-color:#fff}.browserWindowAddressBar_Pd8y{background-color:#fff;border-radius:12.5px;color:var(--ifm-color-gray-800);flex:1 0;font:400 13px Arial,sans-serif;height:20px;margin:0 1rem 0 .5rem;padding:5px 15px;-webkit-user-select:none;user-select:none}[data-theme=dark] .browserWindowAddressBar_Pd8y{color:var(--ifm-color-gray-300)}.dot_giz1{background-color:#bbb;border-radius:50%;display:inline-block;height:12px;margin-right:6px;margin-top:4px;width:12px}.bar_rrRL{background-color:#aaa;display:block;height:3px;margin:3px 0;width:17px}.browserWindowBody_Idgs{background-color:var(--ifm-background-color);border-bottom-left-radius:inherit;border-bottom-right-radius:inherit;padding:0}.browserWindowBody_Idgs>:last-child{margin-bottom:-8px}.features_K0bx{align-items:center;background:#52afd8;background:linear-gradient(180deg,#52afd8,#60bee4);border-bottom:1px solid #8bcae5;border-top:1px solid #8bcae5;display:flex;padding:5rem 0;width:100%}.featureSvg_waEg{height:160px;margin:30px 0;width:160px}.fastkafkaDescription_h_GB{color:#fff;font-size:1.5rem;font-style:italic;margin-bottom:5rem;text-align:center}.fastkafkaChatIframe_w3XB{display:inline-block;height:600px;position:relative;width:100%}.fastkafkaChatHeader_lrZG{font-size:1.8rem;text-align:center}.robotFooterContainer_CsQd{position:relative;text-align:center}.robotFooterIcon_R67M{height:auto;margin-left:-3.5rem;margin-top:-4rem;position:absolute;width:7rem}.heroBanner_qdFl{background:#60bee4;background:linear-gradient(180deg,#60bee4,#1173a4);overflow:hidden;padding:4rem 0;position:relative;text-align:center}.heroRobot_FLpk{margin-bottom:2rem;margin-top:1rem;width:870px}.title_GqtP{font-size:3rem;margin-bottom:60px}.description_meEo{color:#fff;font-size:1.5rem;font-style:italic;line-height:.8rem}.DocSearch-Button{align-items:center;background:var(--docsearch-searchbox-background);border:0;border-radius:40px;color:var(--docsearch-muted-color);cursor:pointer;display:flex;font-weight:500;height:36px;justify-content:space-between;padding:0 8px;-webkit-user-select:none;user-select:none}.DocSearch-Button:active,.DocSearch-Button:focus,.DocSearch-Button:hover{background:var(--docsearch-searchbox-focus-background);box-shadow:var(--docsearch-searchbox-shadow);color:var(--docsearch-text-color);outline:0}.DocSearch-Button-Container{align-items:center;display:flex}.DocSearch-Search-Icon{stroke-width:1.6}.DocSearch-Hit-Tree,.DocSearch-Hit-action,.DocSearch-Hit-icon,.DocSearch-Reset{stroke-width:var(--docsearch-icon-stroke-width)}.DocSearch-Button .DocSearch-Search-Icon{color:var(--docsearch-text-color)}.DocSearch-Button-Placeholder{font-size:1rem;padding:0 12px 0 6px}.DocSearch-Input,.DocSearch-Link{-webkit-appearance:none;font:inherit}.DocSearch-Button-Keys{display:flex;min-width:calc(40px + .8em)}.DocSearch-Button-Key{align-items:center;background:var(--docsearch-key-gradient);border:0;border-radius:3px;box-shadow:var(--docsearch-key-shadow);color:var(--docsearch-muted-color);display:flex;height:18px;justify-content:center;margin-right:.4em;padding:0 0 2px;position:relative;top:-1px;width:20px}.DocSearch--active{overflow:hidden!important}.DocSearch-Container{background-color:var(--docsearch-container-background);height:100vh;left:0;position:fixed;top:0;width:100vw;z-index:200}.DocSearch-Container a{text-decoration:none}.DocSearch-Link{appearance:none;background:none;border:0;color:var(--docsearch-highlight-color);cursor:pointer;margin:0;padding:0}.DocSearch-Modal{background:var(--docsearch-modal-background);border-radius:6px;box-shadow:var(--docsearch-modal-shadow);flex-direction:column;margin:60px auto auto;max-width:var(--docsearch-modal-width);position:relative}.DocSearch-SearchBar{display:flex;padding:var(--docsearch-spacing) var(--docsearch-spacing) 0}.DocSearch-Form{align-items:center;background:var(--docsearch-searchbox-focus-background);border-radius:4px;box-shadow:var(--docsearch-searchbox-shadow);display:flex;height:var(--docsearch-searchbox-height);margin:0;padding:0 var(--docsearch-spacing);position:relative;width:100%}.DocSearch-Input{appearance:none;background:#0000;border:0;color:var(--docsearch-text-color);flex:1;font-size:1.2em;height:100%;outline:0;padding:0 0 0 8px;width:80%}.DocSearch-Hit-action-button,.DocSearch-Reset{-webkit-appearance:none;border:0;cursor:pointer}.DocSearch-Input::placeholder{color:var(--docsearch-muted-color);opacity:1}.DocSearch-Input::-webkit-search-cancel-button,.DocSearch-Input::-webkit-search-decoration,.DocSearch-Input::-webkit-search-results-button,.DocSearch-Input::-webkit-search-results-decoration{display:none}.DocSearch-LoadingIndicator,.DocSearch-MagnifierLabel,.DocSearch-Reset{margin:0;padding:0}.DocSearch-Container--Stalled .DocSearch-LoadingIndicator,.DocSearch-MagnifierLabel,.DocSearch-Reset{align-items:center;color:var(--docsearch-highlight-color);display:flex;justify-content:center}.DocSearch-Cancel,.DocSearch-Container--Stalled .DocSearch-MagnifierLabel,.DocSearch-LoadingIndicator,.DocSearch-Reset[hidden]{display:none}.DocSearch-Reset{animation:.1s ease-in forwards b;appearance:none;background:none;border-radius:50%;color:var(--docsearch-icon-color);padding:2px;right:0}.DocSearch-Help,.DocSearch-HitsFooter,.DocSearch-Label{color:var(--docsearch-muted-color)}.DocSearch-Reset:hover{color:var(--docsearch-highlight-color)}.DocSearch-LoadingIndicator svg,.DocSearch-MagnifierLabel svg{height:24px;width:24px}.DocSearch-Dropdown{max-height:calc(var(--docsearch-modal-height) - var(--docsearch-searchbox-height) - var(--docsearch-spacing) - var(--docsearch-footer-height));min-height:var(--docsearch-spacing);overflow-y:auto;overflow-y:overlay;padding:0 var(--docsearch-spacing);scrollbar-color:var(--docsearch-muted-color) var(--docsearch-modal-background);scrollbar-width:thin}.DocSearch-Dropdown::-webkit-scrollbar{width:12px}.DocSearch-Dropdown::-webkit-scrollbar-track{background:#0000}.DocSearch-Dropdown::-webkit-scrollbar-thumb{background-color:var(--docsearch-muted-color);border:3px solid var(--docsearch-modal-background);border-radius:20px}.DocSearch-Dropdown ul{list-style:none;margin:0;padding:0}.DocSearch-Label{font-size:.75em;line-height:1.6em}.DocSearch-Help{font-size:.9em;margin:0;-webkit-user-select:none;user-select:none}.DocSearch-Title{font-size:1.2em}.DocSearch-Logo a{display:flex}.DocSearch-Logo svg{color:var(--docsearch-logo-color);margin-left:8px}.DocSearch-Hits:last-of-type{margin-bottom:24px}.DocSearch-Hits mark{background:none;color:var(--docsearch-highlight-color)}.DocSearch-HitsFooter{display:flex;font-size:.85em;justify-content:center;margin-bottom:var(--docsearch-spacing);padding:var(--docsearch-spacing)}.DocSearch-HitsFooter a{border-bottom:1px solid;color:inherit}.DocSearch-Hit{border-radius:4px;display:flex;padding-bottom:4px;position:relative}.DocSearch-Hit--deleting{opacity:0;transition:.25s linear}.DocSearch-Hit--favoriting{transform:scale(0);transform-origin:top center;transition:.25s linear .25s}.DocSearch-Hit a{background:var(--docsearch-hit-background);border-radius:4px;box-shadow:var(--docsearch-hit-shadow);display:block;padding-left:var(--docsearch-spacing);width:100%}.DocSearch-Hit-source{background:var(--docsearch-modal-background);color:var(--docsearch-highlight-color);font-size:.85em;font-weight:600;line-height:32px;margin:0 -4px;padding:8px 4px 0;position:sticky;top:0;z-index:10}.DocSearch-Hit-Tree{color:var(--docsearch-muted-color);height:var(--docsearch-hit-height);opacity:.5;width:24px}.DocSearch-Hit[aria-selected=true] a{background-color:var(--docsearch-highlight-color)}.DocSearch-Hit-Container{align-items:center;color:var(--docsearch-hit-color);display:flex;flex-direction:row;height:var(--docsearch-hit-height);padding:0 var(--docsearch-spacing) 0 0}.DocSearch-Hit-icon{height:20px;width:20px}.DocSearch-Hit-action,.DocSearch-Hit-icon{color:var(--docsearch-muted-color)}.DocSearch-Hit-action{align-items:center;display:flex;height:22px;width:22px}.DocSearch-Hit-action svg{display:block;height:18px;width:18px}.DocSearch-Hit-action+.DocSearch-Hit-action{margin-left:6px}.DocSearch-Hit-action-button{appearance:none;background:none;border-radius:50%;color:inherit;padding:2px}svg.DocSearch-Hit-Select-Icon{display:none}.DocSearch-Hit[aria-selected=true] .DocSearch-Hit-Select-Icon{display:block}.DocSearch-Hit-action-button:focus,.DocSearch-Hit-action-button:hover{background:#0003;transition:background-color .1s ease-in}.DocSearch-Hit-action-button:focus path,.DocSearch-Hit-action-button:hover path{fill:#fff}.DocSearch-Hit-content-wrapper{display:flex;flex:1 1 auto;flex-direction:column;font-weight:500;justify-content:center;line-height:1.2em;margin:0 8px;overflow-x:hidden;position:relative;text-overflow:ellipsis;white-space:nowrap;width:80%}.DocSearch-Hit-title{font-size:.9em}.DocSearch-Hit-path{color:var(--docsearch-muted-color);font-size:.75em}.DocSearch-Hit[aria-selected=true] .DocSearch-Hit-Tree,.DocSearch-Hit[aria-selected=true] .DocSearch-Hit-action,.DocSearch-Hit[aria-selected=true] .DocSearch-Hit-icon,.DocSearch-Hit[aria-selected=true] .DocSearch-Hit-path,.DocSearch-Hit[aria-selected=true] .DocSearch-Hit-text,.DocSearch-Hit[aria-selected=true] .DocSearch-Hit-title,.DocSearch-Hit[aria-selected=true] mark{color:var(--docsearch-hit-active-color)!important}.DocSearch-ErrorScreen,.DocSearch-NoResults,.DocSearch-StartScreen{font-size:.9em;margin:0 auto;padding:36px 0;text-align:center;width:80%}.DocSearch-Screen-Icon{color:var(--docsearch-muted-color);padding-bottom:12px}.DocSearch-NoResults-Prefill-List{display:inline-block;padding-bottom:24px;text-align:left}.DocSearch-NoResults-Prefill-List ul{display:inline-block;padding:8px 0 0}.DocSearch-NoResults-Prefill-List li{list-style-position:inside;list-style-type:"» "}.DocSearch-Prefill{-webkit-appearance:none;appearance:none;background:none;border:0;border-radius:1em;color:var(--docsearch-highlight-color);cursor:pointer;display:inline-block;font-size:1em;font-weight:700;padding:0}.DocSearch-Prefill:focus,.DocSearch-Prefill:hover{outline:0;text-decoration:underline}.DocSearch-Footer{align-items:center;background:var(--docsearch-footer-background);border-radius:0 0 8px 8px;box-shadow:var(--docsearch-footer-shadow);display:flex;flex-direction:row-reverse;flex-shrink:0;height:var(--docsearch-footer-height);justify-content:space-between;padding:0 var(--docsearch-spacing);position:relative;-webkit-user-select:none;user-select:none;width:100%;z-index:300}.DocSearch-Commands{color:var(--docsearch-muted-color);display:flex;list-style:none;margin:0;padding:0}.DocSearch-Commands li{align-items:center;display:flex}.DocSearch-Commands li:not(:last-of-type){margin-right:.8em}.DocSearch-Commands-Key{align-items:center;background:var(--docsearch-key-gradient);border:0;border-radius:2px;box-shadow:var(--docsearch-key-shadow);color:var(--docsearch-muted-color);display:flex;height:18px;justify-content:center;margin-right:.4em;padding:0 0 1px;width:20px}@keyframes b{0%{opacity:0}to{opacity:1}}.DocSearch-Button{margin:0;transition:all var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.DocSearch-Container{z-index:calc(var(--ifm-z-index-fixed) + 1)}@media (min-width:997px){.collapseSidebarButton_PEFL,.expandButton_m80_{background-color:var(--docusaurus-collapse-button-bg)}:root{--docusaurus-announcement-bar-height:30px}.announcementBarClose_gvF7,.announcementBarPlaceholder_vyr4{flex-basis:50px}.searchBox_ZlJk{padding:var(--ifm-navbar-item-padding-vertical) var(--ifm-navbar-item-padding-horizontal)}.lastUpdated_vwxv{text-align:right}.tocMobile_ITEo{display:none}.docItemCol_VOVn{max-width:75%!important}.collapseSidebarButton_PEFL{border:1px solid var(--ifm-toc-border-color);border-radius:0;bottom:0;display:block!important;height:40px;position:sticky}.collapseSidebarButtonIcon_kv0_{margin-top:4px;transform:rotate(180deg)}.expandButtonIcon_BlDH,[dir=rtl] .collapseSidebarButtonIcon_kv0_{transform:rotate(0)}.collapseSidebarButton_PEFL:focus,.collapseSidebarButton_PEFL:hover,.expandButton_m80_:focus,.expandButton_m80_:hover{background-color:var(--docusaurus-collapse-button-bg-hover)}.menuHtmlItem_M9Kj{padding:var(--ifm-menu-link-padding-vertical) var(--ifm-menu-link-padding-horizontal)}.menu_SIkG{flex-grow:1;padding:.5rem}@supports (scrollbar-gutter:stable){.menu_SIkG{padding:.5rem 0 .5rem .5rem;scrollbar-gutter:stable}}.menuWithAnnouncementBar_GW3s{margin-bottom:var(--docusaurus-announcement-bar-height)}.sidebar_njMd{display:flex;flex-direction:column;height:100%;padding-top:var(--ifm-navbar-height);width:var(--doc-sidebar-width)}.sidebarWithHideableNavbar_wUlq{padding-top:0}.sidebarHidden_VK0M{opacity:0;visibility:hidden}.sidebarLogo_isFc{align-items:center;color:inherit!important;display:flex!important;margin:0 var(--ifm-navbar-padding-horizontal);max-height:var(--ifm-navbar-height);min-height:var(--ifm-navbar-height);text-decoration:none!important}.sidebarLogo_isFc img{height:2rem;margin-right:.5rem}.expandButton_m80_{align-items:center;display:flex;height:100%;justify-content:center;position:absolute;right:0;top:0;transition:background-color var(--ifm-transition-fast) ease;width:100%}[dir=rtl] .expandButtonIcon_BlDH{transform:rotate(180deg)}.docSidebarContainer_b6E3{border-right:1px solid var(--ifm-toc-border-color);-webkit-clip-path:inset(0);clip-path:inset(0);display:block;margin-top:calc(var(--ifm-navbar-height)*-1);transition:width var(--ifm-transition-fast) ease;width:var(--doc-sidebar-width);will-change:width}.docSidebarContainerHidden_b3ry{cursor:pointer;width:var(--doc-sidebar-hidden-width)}.sidebarViewport_Xe31{height:100%;max-height:100vh;position:sticky;top:0}.docMainContainer_gTbr{flex-grow:1;max-width:calc(100% - var(--doc-sidebar-width))}.docMainContainerEnhanced_Uz_u{max-width:calc(100% - var(--doc-sidebar-hidden-width))}.docItemWrapperEnhanced_czyv{max-width:calc(var(--ifm-container-width) + var(--doc-sidebar-width))!important}}@media (min-width:1440px){.container{max-width:var(--ifm-container-width-xl)}}@media screen and (max-width:1290px){.navbar__items.navbar__items--right .navbar__item.dropdown,.navbar__items.navbar__items--right .navbar__item.navbar__link{padding:23px 18px}.navbar__title{font-size:2.8rem;margin-top:-.3rem;padding:8px 1rem 8px 0}.navbar__logo{margin-top:.5rem;width:1.7rem}.navbar__brand{margin-left:0;margin-top:.4rem}}@media screen and (max-width:1024px){.navbar__toggle{margin-top:.4rem}}@media (max-width:996px){.col{--ifm-col-width:100%;flex-basis:var(--ifm-col-width);margin-left:0}.footer{--ifm-footer-padding-horizontal:0}.colorModeToggle_DEke,.footer__link-separator,.navbar__item,.tableOfContents_bqdL{display:none}.footer__col{margin-bottom:calc(var(--ifm-spacing-vertical)*3)}.footer__link-item{display:block}.hero{padding-left:0;padding-right:0}.navbar>.container,.navbar>.container-fluid{padding:0}.navbar__toggle{display:inherit}.navbar__search-input{width:9rem}.pills--block,.tabs--block{flex-direction:column}.searchBox_ZlJk{position:absolute;right:var(--ifm-navbar-padding-horizontal)}.docItemContainer_F8PC{padding:0 .3rem}}@media only screen and (max-width:996px){.searchQueryColumn_RTkw,.searchResultsColumn_JPFH{max-width:60%!important}.searchLogoColumn_rJIA,.searchVersionColumn_ypXd{max-width:40%!important}.searchLogoColumn_rJIA{padding-left:0!important}}@media screen and (max-width:996px){.description_meEo,.navbar-sidebar .navbar-sidebar__items .menu__link.fastkafka-home,.navbar__item.github-stars{display:none}.accordion__item .accordion__button{font-size:1.1rem;padding:1rem}.accordion__item .accordion__panel{font-size:1.1rem;padding:1rem 1rem .3rem}.footer.footer--dark .footer__col{border:none;height:auto;margin:1rem auto}ul.menu__list li{margin-bottom:10px}.navbar__items.navbar__items--right .navbar__item.navbar__link.fastkafka-home-mobile{border-right:none;display:block;margin-right:11rem;margin-top:7px;padding:0}.navbar__items.navbar__items--right .navbar__item.navbar__link.fastkafka-home-mobile img{height:auto;width:30px}.title__ymQ,.title_bFDR,.title_e8_4,.title_iMVk,.title_lvu5{font-size:2rem}.testimonialAnchor_iYyG{margin:2rem 1rem .5rem}.testimonialWrapper_gvoa{padding:0 var(--ifm-spacing-horizontal)}.title_bFDR+p{margin-bottom:1rem}.heroBanner_qdFl{padding:2rem}.descriptionMobile_CZcP{display:block;font-size:1.3rem;font-style:italic;line-height:1.8rem;margin-bottom:0}}@media (max-width:768px){.DocSearch-Button-Keys,.DocSearch-Button-Placeholder,.DocSearch-Commands,.DocSearch-Hit-Tree{display:none}:root{--docsearch-spacing:10px;--docsearch-footer-height:40px}.DocSearch-Dropdown{height:100%;max-height:calc(var(--docsearch-vh,1vh)*100 - var(--docsearch-searchbox-height) - var(--docsearch-spacing) - var(--docsearch-footer-height))}.DocSearch-Container{height:100vh;height:-webkit-fill-available;height:calc(var(--docsearch-vh,1vh)*100);position:absolute}.DocSearch-Footer{border-radius:0;bottom:0;position:absolute}.DocSearch-Hit-content-wrapper{display:flex;position:relative;width:80%}.DocSearch-Modal{border-radius:0;box-shadow:none;height:100vh;height:-webkit-fill-available;height:calc(var(--docsearch-vh,1vh)*100);margin:0;max-width:100%;width:100%}.DocSearch-Cancel{-webkit-appearance:none;appearance:none;background:none;border:0;color:var(--docsearch-highlight-color);cursor:pointer;display:inline-block;flex:none;font:inherit;font-size:1em;font-weight:500;margin-left:var(--docsearch-spacing);outline:0;overflow:hidden;padding:0;-webkit-user-select:none;user-select:none;white-space:nowrap}}@media screen and (max-width:768px){.navbar__items.navbar__items--right .navbar__item.navbar__link.fastkafka-home-mobile{margin-right:3.5rem}.navbar__items.navbar__items--right .navbar__item.navbar__link.fastkafka-home-mobile img{width:33px}}@media (max-width:576px){.markdown h1:first-child{--ifm-h1-font-size:2rem}.markdown>h2{--ifm-h2-font-size:1.5rem}.markdown>h3{--ifm-h3-font-size:1.25rem}}@media screen and (max-width:576px){.searchQueryColumn_RTkw{max-width:100%!important}.searchVersionColumn_ypXd{max-width:100%!important;padding-left:var(--ifm-spacing-horizontal)!important}}@media (hover:hover){.backToTopButton_sjWU:hover{background-color:var(--ifm-color-emphasis-300)}}@media (pointer:fine){.thin-scrollbar{scrollbar-width:thin}.thin-scrollbar::-webkit-scrollbar{height:var(--ifm-scrollbar-size);width:var(--ifm-scrollbar-size)}.thin-scrollbar::-webkit-scrollbar-track{background:var(--ifm-scrollbar-track-background-color);border-radius:10px}.thin-scrollbar::-webkit-scrollbar-thumb{background:var(--ifm-scrollbar-thumb-background-color);border-radius:10px}.thin-scrollbar::-webkit-scrollbar-thumb:hover{background:var(--ifm-scrollbar-thumb-hover-background-color)}}@media (prefers-reduced-motion:reduce){:root{--ifm-transition-fast:0ms;--ifm-transition-slow:0ms}}@media screen and (prefers-reduced-motion:reduce){.DocSearch-Reset{stroke-width:var(--docsearch-icon-stroke-width);animation:none;-webkit-appearance:none;appearance:none;background:none;border:0;border-radius:50%;color:var(--docsearch-icon-color);cursor:pointer;right:0}.DocSearch-Hit--deleting,.DocSearch-Hit--favoriting{transition:none}.DocSearch-Hit-action-button:focus,.DocSearch-Hit-action-button:hover{background:#0003;transition:none}}@media print{.announcementBar_mb4j,.footer,.menu,.navbar,.pagination-nav,.table-of-contents,.tocMobile_ITEo{display:none}.tabs{page-break-inside:avoid}.codeBlockLines_e6Vv{white-space:pre-wrap}} \ No newline at end of file diff --git a/assets/fonts/Panton-SemiBold-8ca10ba7a8f4dfc517918497d4738352.woff b/assets/fonts/Panton-SemiBold-8ca10ba7a8f4dfc517918497d4738352.woff new file mode 100644 index 0000000000000000000000000000000000000000..2812f88969b792f60a480935f3abdc33530dbc49 GIT binary patch literal 45248 zcmZsCV{C5S^Y&fawr$(CZQHhO+dZ{y_tbVzZM!{n|L1x0`}oevm7STnW@gRW$*g22 z+e1NI8~_CPCk+Dtgntj9a>@U+|DTA9tIGZ3mHzW#{{yajD1D`bsF*ka5c2v@|K|f} zBLpCkN+>F*007|?007lH0N^FqV{pkTp`s=X0EGU_4QvPifTrIK0j(>jFfjisIR7tB z{vQlYLtKkZ?2H@%0PvQ7yg>i}WM>HXqSM02<)1C&?jHy8Km7avKrC#%%mDznL;%26 z9smTQXykYQ+tSR)^j|*Bf1H1L0qh_(GD?>JjQ?!0|8$~%AcfEc-L$lG_57Eg@ZUa? z|D~f9c#!9`bujtIgZgI&#rzM7z)AFWMxOub0yX@LBlrhGU{!#py^)>Szn=E~#g762 zU`wOO??jFcF0KH8@4tQ_{+$Kf%xdG(v*)IXp{b!Ez*r_><@^4V-2kg2%K#vTNsr2C z5V-K;_RZ43%3J{C%!QdLSPT%bhxym-|6=qXZxKL1riOmFf8eyRu>y(51EX*N1zq6$ z|2FYI&Vf0lnV|&$h@8+IR2Uo_g0TPyfKUOL`+t6OWus_ALo-9e3*bdq*d4@druf0g z1=Mj1LyJHV#1y38PA<^S=8lGnuQMZ0N4o^!DAP8WTBw+qWg$nInwjxHRAAudfW2&( zr~m53so7Yjn_>3de7y0_=r-r|&(wAfx2N!Mwx4hIIpBnGS%NodvI=Kd1%=FawqN@Dwy$QC`0MJpl1PRvM>#4| zmc|$TS*K1le<2hp6G&9P5KFX|D1OYBT8*#tYK}R<`JZG7c$q{0y^!DG2}8FNT<(p> z<^x~hnEw?qU*VAaCrbMOv|nN&%qsp+a)Iwi?acYtQ9@LT?k!0|; z#{L%KVrPD_h~_L$Oep}**M*%c&q0@DX&h}(Iqpsu|2pBW*2`A`y~+$>PE4s5O>)o2 zb|lPDH0{vIwTt{F4#}otZT3F)s!VPzNkV|`P&;O82pVt9`VEA}OQP8~2Ke2Fi~bI7 zTeFs}V%W=e-u6H(b;*Ox8*DEVfO8T^Jeo0&DKZPRt!FEwtnuYH&gWadEM~PYaF)i4gW~!%)xzx>PSxeSpZczRmO!XUE-)6aM?azcF zgOmevz@(Asc-vk$D!9fS$iXX~Ri;t=8blDT1s&+u7Qys8XHGi!xOxPxQ~d2g56|fP zVubHktP6O|0?(kF!Lax+F1Oqb%w@q!{q`VShGD$U^?ULoMAopweO&@)zF^fKf|~%i zF#Am3(A)Ue%{AfPQM1%r@7O-3YrTEHXjr{Du3>0mflQ;Bs7vl4+j_!?=LB!*=e}H1 zxR%Y8K{lhA{9ii{T>jaYCP%+O6KgdltSFlTW=2V}P7jx&)yq2&c8fBylsBxI|#?3MFLL zd#R%s6z9V`?l$Z8_cFNH7wDJP%NL673~wj7x@yLm=~Wf_wO;wC)j?$45K3>M90|P) z0lie<)#PMsT+$zk6-S?u<&(tEG_REU|Mm)Hw>7_M7(1t5oi4ykVR>58dg;?xN5QUW zmpE%iJ~3pvj$OR@@Y^Z>oC^P*4+q*zeEkz%>bKyyYga(KwKjo2wwq572M3JHRgn@Rmi{k=MW-xWB#SPiSdXJt9h; zAM3=hT0xhwC2A{eC12F6hJ+Kj%Cn>^QEt4IgVZKLiXy{jvaBVuwmnS4Qq|wvxm*w^ z>@i8clzoWh@LTZUW3k9#IsMD!;x8~DVd9W*hU8W1vDol;ij41+DVE%KHX+osttN%| zKGt14;IQs~I^(Q+A0wblfTn}#Rl8Lkin@X}ts;4uFs&@TQq8&=y3)pcz}80Fym(_c z*K(FMW+o)vNKA{a7PC4o%@C}%Zk3W|Ec?&fx6bfmOC_gp>yhcKvA=Wp zKe_)Z`3^+*imGHU(sqmM? z)D_jamhkYMNg?;1$q~FK%L=@+1O~wphYi9Kr4@CZgtG)El_t$%_)buYvxGOsn($(o zG6&K25$8n78Z)b8PO|oybMG>SWK9!CY`lNrtdENX`>pHh)R)A=?=vNkm`8VIiqX9x#N^{vL+rn=0soK?n*>Fh=ESVetVyMn zOl7hQ^ecFM;@>^~Pe}i_)B)juBOrO88emXhLSRMUK;RzW8xTkkQV@O+b&z0?3XmI6 zPEbeCD$oNkXfSQCRInkiJ8*Pxad1cQ5ePyERftH40f-Aod`MZySjZvB9VkdBaVSfu zCa4!^d}wKCf9M_=TUuNbHptQg)HeVE{wq?o#xk(kAp^O!GK_*j})L)g67 z(>R1UmN;#=kht2oZFsr~iOmQ+nt zchvOM?$muWh&0MHnKYZUxU|-^eRQyNf^@}nSMANK_hA&)vwB+oq0H!n7?CT|<>Cm%0gBHsZ&GQTXpKYtVdfdHm}q=3Ib zjlhv0rl6x>ix9k!qEL%4Kv-BfQFufIAR;IdDl#dGEov*;A_gj^EtW2JCQdFMDn2Q} zC=o94Dyc2GEX6O?B#kbuD_tdhB*Q4Q8sphCQt`4fMsP3$usXm~-r~a-% zs3EFhsgbVHpmC)suIa5=rMaU8&=S^i*V@u1*EZMg)Be<9*74P8(%IAn)}_}q*NxZh z(gW8M)N|FV(A(0-)tA?g*YD7OH{dq#H0Uz;G-NbXG>kVKF(Ni{G^#NMHs&{WHtsRL zGC?=tHL*7-HkmX9H#Ia(HT`Y+Vn%IdZkB1bX%22KY#wahVS#SpZ}DVlXt``9Z?$ZV zZf$BEXq{`_W<76xVS{NSZsTlIYI9`^uoblJvE8!$w8OR&wsW_uvpcYdwwJW`wJ)`w zbbxhGb%=BraCmaWb<}i>b-Z$-cXD>RaAtK*bzXIWc8PSQbj@{Rb?b3wcGq?fcCT_@ zcK`OE^icHh^l0!n^2GF1^tAPi_AK?>@}l)p_p0)`^QQN<_0IO*^1=5J_p$Y<^Lg>5 z_BHlR_wDoj^kekX{5On8{9*kC{oVbm{1*cd14REV@j%1C_`s^b z*}%IX)F9d*!=MV!&5>RRPuw!*18H&P$L7bq%vIt~%$d1Ce+QLxa$%?`-pvj8(6Ecx*&jndN?HIMYAAY+61|419 z>E^S!TrQir#U!rHuH438PSO+?wA^}?E2wuE5IM(Xj^n$Q?O100vNe04WQ0vMkUQv{ zX5g8}NL=!ybwYDD3;ihOc(}rA+%IuA@&_WE@e1Y5teaB3fGReuoPRM)V(1BQ0%}2&*US6BPwYAVh2k&``3=F*YrA> zwd$ooCQ*P8xyTt9oP05uYlz?z#V3M1TGY4bQZ!-d_i_aB2pR7$3%NQowJFfa0c>Mi>Q`T@Za{G{<44|7{?v_afkDy*ru7BO+&f7aKL;9*t z2BmkyjneCnHga^Z6ckbAUW)Sn(A|VD7Eg{JRSmJxXz9;2`O<6)EX+)hryy3@M00+k z50-Mw*BKgY_ZWf0&py~4xY>&{CL4EnrLY)^{mgcRpxB($7U_7QC#TL<(BOXv&!N;` zYUW93l);>L?bmUHsc8qRYD0TqRdnM6=@FXRLW4)|_ix|~&qVcOSOsGW?hd=5&-J>| ze+oE}DJVzhq~(PXO9drhZItP%>2(&G+dqF@IDGmDnX!EapIzrLl{h#qKAordlQ5TD zk}4ZV;fOw1HaMSz8k(6YzQMhRP53R9|CP(u!nz9I6->nqV8 zjY@skiqW|niSK${$GFSpapHOEP+RMKl7-EBM0rV6)O#shbJ98S^Wmerywgys;{i8+m|DcI(iH8Hy&N2 z^Cy}3@QQEw>lW+3yc`9lIx64}>R`abrC7oA7M)YRBPXmLcRf0w zVwd!@1b%KGm#5|<-#!hVemO|uCVUJuUV^^(3Rwa_y)Z_S12v<^1tPu7DlEx9NN!NN!SK0fq5vsqC5NM>GO`FbL8JbFmMu9t$qY z&>_9`I)bJkd{vvS38CAI=G0fB4Y{1t$Q5OX4M~ft6WHBIp!&Kx9#RnVk|^{d2?J7@ z5f#uMjYeufq}~+?$xDj_%9j7jYH{h3{sE2-mpbus+PndOrd-e$3BE3h#7{ zC7zmN;VvXZM%!Gzg33^;eAIzzFO)D;7`KK17EN<9w>5-rX|PT)~f@nQOeVqi1p5pZ4+v3>lx$bPfvfKvs#Dc<@^&4_a6kpjy7>_ zhCF6NwrJt780z)Mh&<5t+MbKO$?M0{Zp$QRx6r@s?oGtYSll@79PKdVAfL|PTW+2R zd3Q`Po1eAIu+BjZqan`7%`t;<8q--U+`1alA?zt_KAG<3arbeXTQ^Vy61oRKISr!3=)I4uZ9 zx^ZTrD+EV*`C;QSTT{VIh@s@*qV~$PrqXEWIX$1w22-wMyN9GQZu9sYcCNA>%V&01 zf%}gAEw1ZIe0KWx?rPz|XWNZ$t`IC{Z*v$M%Mt4qz$nP}u&)=`tc{t9i%^j7D1arX zen`za&?~&D+D#_IrtUP47YJffb28&HN{5@O!G{EZh8$MXE{L5r#L$cVpi$X4#H@u> zHPMjy=YO@ct|*(`q3F(z#I#-Scnp?WVc@BJpJul~?X5pee5u&Au8@hbTja^GDxuF( zq{g@{7j>h{lhmBn@O#|9L|V)qnc;lf1ay0-D|GxK(^WiAvx+|Qjhn0oYagA4BDFD% z+S{zfgY0ib?1P6SScG;36t`iCL~^uiE?Fa*^9ZRHhhvlM(V-1X#N+yQrMPQA_COVR ze%EBdl#!rvkQc2>LPv3uyW%DGZ-ep2oVQEi(my}GMNTB?`#dQbHv0@(B))lFyEB;j zPx61_uh?7?75F13=q2*E@khB3#N8`rHBHeT?Dyb8Hh8+mwZ`+~nw$ahn~ClK3n5-Q zm~gV#cE5!IyOYu@B`@{HwDj>5Jq0JAw(_8$H>$;`K#B_4Ao&#DZt$Wm@MzMD zmP3bP(m66k_zNdsQ=Xf3z6J-DNQsji$a1g1i*rS<9$c9PDzO_5A)8tIkYwwO{Z0vf zBAHPLu4-K`oD#pGqkkVKEg~x+6it3v3;mmJZUtM;Od>v=&Ry`S>CM3khQo1r&gV}- zQA>E_`jluB3WwyfYk8*(&TAhsW(4P8v*Cr~jK;U90iCT)B#XPq=#nH3sHu|0w4$uN z3IZ65t}*t?Pa~C2xU@2Rc$!~PDs7WO#Fhq9wBRyoaG3rS94p^9l?FXs&hr|i^xPIe zpS$C#)ru+`?g#O=J0FLCEbx#*TXp37fWMX4I*f(|aWH+ao-PyM>voLWFlGw8hNr>8 zQb)^rRGApfF6a<;0xp+zZk=&D0!sS-%zArFs9Poz{fw-f>Gt;OuzvFMb0R51Nnbs2la_ zZHI}{NSmxHato6wiFrP{MJTij0#QYZPi;}fq|+I2vaSV_mgLxbecie3J>8W2?Xa{w zg{)0_WBKOw6Ozf1#3mZ%b8_YQO`yHjsd?*8W~J*X2A~rYX9jlFbAO!4_yVdw;;By8 z5BOSC8>ysW@O%oUSwW-x$#3?2x{WzC%!>H^G{gh~Rlalg!D!8L{F> zP<3cRAvJ$Q7gae;WE?%N7jV9Rx%gr>==In5r8xry!M@}5>;@sl{iFm7#$4{oNK6jA z%7%v_e7HkCvx8Pm;=Tk786-QrVymRK7TfytQH!aL61ZPF$}&u`{}slObo$pvqq&cm z#EN-VFvO4uGR6FErAcMLSgaq^H`vcCRtQtoBH=TAjq$f$5MRT`DO_svqL|ElYC+jh zX^U<58IhXaWhN73zKkvhDV?6!C+pf!b8g#(yWt@6g#5)dym8-Nt^4MExG0iuczU)!^{Ow)@32%-Q`4{%dEW;-oD+v4VIN0SkX+&~rX1ut%alSu|q zy1?pX0FnDfieJcVa*#_3g!*tPoWP@nA^nEYPZ|!+;ne~cvsQ1XI@M4L}RX15ZvWEE^7uHt4{Wkb{&N3A!GJkL|pCU@dXEpBXn-sd% z`EX!R=okiV_9HSx^M`ivJhqAopYI#fya_cZI+SqvCUT~!b%6ZYEf++AFO;viMh$4~ zk7>DWo*FKg|8;2h(*7>=K7;Ub-H-zDkC=Fq#0?p(#*pF9#r4uVU;IqsnV!0H6d$q{ zf!-z>-}M~d+Kx!>W|Q|x1F3osP~|t1?}hIj|!`*)Ut1_%AXsR~R#C0m#_tCZn-_2{k6!~R`B=Rk8!_6}GF6-9svmWou1jL0;>tC9Vk*N$ z1)kd3)Kl2tRn>N&oE~GTSn{jJ%+PXhI@>}Kve>b*X;9Tgb;c z^$3GHw>n?@Mlp*taQmL1dG9n>(CKr5@&{y14F8&CT!RaY+9C?h2ukE4jEPR#lSHK* zGy5SHSjrIXLkfxDV7ZMDGkn0ES2?Ea5~7XR_RNhW4*(=rU4gBMN2vauad*dcKvfY) zC+X@5B7&(gdnydLw}4@bjSI|&-WPhc?bDh8so09AMQL8eD{>kSLTLF!q+D9Slz!{? zQmT8$b$gMtMEuRQ66=#qCXbQb*66#gG$^##)VG=HIyV+ywUrPf$u> zh8m=BL>$0k6M=~y;De7~?yq&RCiBD$iaG)s8Rb?pPusj_Pis_ys1yTPfFM@tlgpJz zFm6}V$$CIgm!!+7uwJ~u?BQySvfHEjE0 z;iNEi9ND3aOMJ)l5@jlGTKNT5TpTTtj5;LG^7V?+auuwp(MAc)&k62`22!r559Edn zHTFF06tmaS*CCg4B_zdYzYp2s%(Wiq7Sk}!WEkNrp(>o3owYE>_N_;KABsvQS$K){ z2UPP*7}Q7hI75Vrdwq>qENUvf=+r|r8~quE%CcV30>7jPOJs0PA(9n$D#%Z(Jdt4v zdBhjDF{w#-QR%ucNU(bk;Q*-HuVpdb&mg@~uB+X|MItQlA|@)Y)}RQg9|c0NKM{TI zDTWm+jzU`?5-7+74H0#ruwL~UV47%yOIujAzaE0dti&qxy{V*ZkJ3>Mr{PpwTRL(d ztF@>x)L+*Blsq(y08{!`CdXS{&*X1l`##RH)MsV-HbVpKPW1c?R22{e6SOONF#+HJ zB^G2QAzD`_NxJclr+7d){-c)pzDrAAe2Q3Pu81xYLWCdEUq-VKUJI>tt|3IjEn37( z!7ahCJ(bCRMkIvJcX-vTxYlIq+sx)AlqcHcs zJGw$07mS-fO`S2|5YL(P#~D&km>B6QzjBw{}N-X^s$%Rn)&3M`u(J}9v^ z6UKm&ajAQ)#@pr@m16F$&?msBKylkY^)(Sm-wlqT|Gz>edJ>{+2-CrFG~dL-~|x^G2;^&G@z)Oc%z;u2Z<;YOmA)=P1KkPfs z6SG!P%P*P3yMT?Pw-%%BiRU#R=YnxU#Sg*(oJ94GRvc@HJH$0rJ~H=5P}^FNzhjxN zl_G$Z%w8Duo4v#Myr*FB8ZDD9$sVcS)}~^AFMiqrP(R|e_9APR|80^BM}I)0_k%~f<$NGx)M=9$$Lko_&eJFM z))#(EzN26P_V3AJnP`H?bzRakk4GKB2H2{6{yd+T^WV@3!`TBi8*~DL7W0{60@l`6 zwta|`88S?L8h$iK$uv3&+vY~7DTtg{&&R5wxcXfC?7AQxM@nSlSdnLpkN|XEkK>s9 z45W?`G5U`@A2ARy8inq^)s7i^&GssMGMi*2(w*3Y+AS#Y9j$PTBwvxwB3pz)4W0qp zv?xo6+~S(Pu`6uefc+n|+q(xE1Ld2|V{jD9NkaT(M**BiRJ~lUUv^xM7(}oT+{D3c z6C$Bu$1z^&?9rRy-vd?_%OBhbBX2a0(-*D*X>hv(wvH4DAwDd2}l#S6r(X26Q{Hg-nx0|lO zV5VTSiHUanfO{5ah=8IO0M9_?OgeZ>l!DR^nJk$=zdCTXDJ(swtbBmR0DlUV$x>LA zPe~P!TDcU-G3PrRNC9u6;&u*GZtU?kDVw#2tR>cKd#&lJR8aV?m@CpQUTCF`p&o#7 zYFI4l&i{si6y2N#%)%09FM;{BSrv$kh%lS=OrG6$REmjDLaJH4>Y6@u@RiZXrgqNL zUq;CCerl6i;DF_9P-C7d79%wrB?`5nfXgda*|IB|*HO=-gsGVy|GitXf_=(}fAb#Z zrRc&9Eciau0z`6w1!AcAgp58+20mep7~o}ZR$Ax1?(mvBg$_&vzI{;8k_p8~8aO8= z)BaLV=5i|UY#AY5@Oe=T4rBD({mRKWBJc{O0NZ!C9a9);8yn9FV z<11Vgwu-(+LNoE7VF%h3Z>iZLXJk!8dxA$F_BeKE*4#;W4JtSt@LKSw;M)zdEpLn~ z1la_%M&%o@;(9wM^!Y~!?4kHqR_&2NZe}YQ7*v4AX>zP{iE7b3DV~~s35!#nWZkXB z0N4&VwFLM%uvGnl@y_`P0B#UhH< z;|LW%*Hdp3ao$w|=Na}BVv=CbQ)i>g-QX6}sasdL{QD%P7(-Yu=I|b-=NF)NP~lGh z?W&A%;4J$(g?2MgT(361vJNa5#N>$hM_RI?@NgSbjWKCmc!4lQ+najTb23^h$Vt0a z&hG*u-$`pQHrxtGa{}-R?B&0dso=pQWYeX{y9svNgpV~Xr5MjC4rS0FB)+wXz%`PK0_kh ztJvY<`&=UweRS;jvnydtcgWtUUH>T91{_s*6$PvQ1e6#s2Qwn${5rhFC^L@7Eca(^ zmB^Vn((?$dB^*0ZjhJ76nKmsrD>R8VC%kqbdR@p(aHKJcRwQn`Xwil)VV21LcVBv< z7IkXr$wL$QYaVUHCi!z)xG7ezdMe&J4?>`q&*=_PiGg)+KdBU$c_kGuBh{j5`aDBg zhE+e_dBHk(XlGvDxPGPTr(BZbzRhV<=olB~c3rI|Z&IjOi;@;)9f3*6Gd8|2FA;hNQ}&3r^2RX>Coa^+^I zpGl7E?x4j^hW~|%fI)D)ALColYqT?HZxA0a%R8!RxfUJ62npAw(Ls}wq=xfuuFTV zeZ>*BYQA6Ula{61quXgNBx#ozv0MA*o+VP)`gVVFz)JKL=1VEN{nF}w&8WG^g)iz$ z3;_2-h!^+#(X&>&#@!}tgsma-&t8bPR*1{qJWMvrn6P5!F%U%f6#rCpUxaVoNO~G* z#85N&A)e2JFKNCUQlirqj;&+GpGKsYG%_cg76Ga z*N#mUlJwkZ;gG*=&ZJcO)>EYK_VsK?W=Y;xO*4RPz*%(zEX%B_OYB&?>{_obA5 z2l))&(xyPy+T$YxU%a6spWfwt+wK>#OpR66yi?J(bX4n#n;Up%Lt!L4^E7$y3OXy6 zmz#Y?U9e+_g4&kNr_eFP-FD^!)+^Zr4HSjfHc;Z&^>ve7L z5n5IMr6XHchV-1V~D=R)nZ`~_G;?;qW#!-DYS}B zewW{Ewe1M>{2i7HgIr>RRRFpyE|J1?b{6r)6@ArZcz@@QHoJBQkdZ+$8F>15P+vFS z96`61r6-Zn{VidFcqo#>S)r{AroQ@A3(I|#d~?nN{_yq(RyYFenT#NsL9dZ!Zf%MZ zOXij}%8}JlAcR>R?xNFNvWRrwm30E>XB0J*^gI{KiR9vM@mH(@tZnrn1=cg*E=7N& zgm~0?5+UM^2NHQo#0?+B{_Y4g7|Vank=xV8VwpYllC>3s!}@!S zY<-A62=WUN&4j41V`Hd_eGoZFD@hm_q$kQ~G(8;fwZHJ;Jla(Sj*Oq>b;+Bn-0_bX z5AH!BJt0S;-7%g{9RiD6#%W;vSB}oHEQr=@Jkz|N6a-Q-tWwdXNy|?%7#&csLY&wp zj-)K(PlUapjA@1g{ngd|Wp0j!9!zG8?&J5#7KrsFbB(y zQb%RVk;@BT7)(4idP`gynval-&-a$issfJmwi5j5;-ob3%!C8z#5hx z-D5%s3YS^70O~>ZJ`czq41u{HCSbVAOV2wVy+WgEVzw^juZMqIzaTKFPh3 z+>HW>Q_#6$vkFFOQM2&iA&&g)%z4D5*{OowlvvXv5{;k0=Fy?f$uHE;!(AK)9Np11 zy;r-pWYtX#b4Gy@nw92X&84v9q?^u2Z{$Rfx4wj35i}(4!*HJ=KVu3S)SWOItuUQ5 z=TcANSL~C6%jAp72aahAMXP03x}gzc*s?O|4iQ7g34=S-`E4Em>u za-rj6kT&!Rs*>g99Z(8}@Qi_7CX$YY5)EU{eUh-4I_cttoc1cmkSW$%Ulv!IjVrL@ zhl&(a+NSjYaR~UK-#%evwnG=ga{mr{#H51N2@3)mUNAV|O8^A)14hMjKu5X<0X;ha(@?PH{m>>iag@#v+D9~jHJwHPiD3UT5~ z=b7Q8vBkx)GFTA_Af} zz9K;EaITP1Bk3lcSi*`F2Blgm7#toH<{~wO`y@*GFoidBR}~c9mUiMD07^2cCxrV< zGfNQY(~Mc-aR;00h{Ew&`FbR$Nsrzt7X!wDnnc|iBQ=wDD>93j zb3;_wa(!e`52vbL!+B2}8hw-I$u`X=iWB2OFpt{wi*_a`&o-XSlod~F#9H2`BQS?O zj{BYD^}Hs8ubD9o6z3(nn~x6dkf!K55`qrJYLPIqLeMZSL#1U(IsS7qj-7@RaSC2> z%2w()&_f6;ld}AkD~Kn}2uAAd#okcNNVKz-9QMfd0931mI0|}mn${+C^H&%ikg~qR z&^%bawx{AXIdab>%|GzXesUY-<1g+Un4MIxmE~3*#RPWq63}qSAK#%-Gl#HvS8)w8&}fTdBx0sg}CZb)+PX$xPESjsAr+;fuC3&|4OU z40GwcMX*fPxe*YS-&r^^I+q#M?pIBjX@g=evw6~<0{rXQq+sw3)6U`Y_wJwN2|lRB zL1U-PT=ba7?9fh@n?EX2jEIWX=aF%o>fPLAvjT-&>n1=yZK4Lw9aykdML#G-`rJw} zKmB1r(8qL9T7beNQ)tsY4HuxD5CY5xnpDKX{N889vTj!pEV8Z(3&}{p1O+=*4}%bp zz@Hn8t%K>I-_kNkj;>pl#~qOj4=XVU$2eWEa}@DZLhxClOf2G$c-Y@yCQTES4~uffx}8vW5f~la%=7K#t!>Qc9jq!jFw(zruA@ zQ_%|77SF^sa=>Rsqn?VUCqUzbLXLfX^Q#@h4>&2fabyD&xHjmUM=NpVKrTLIA#}Mo zSgH!~Rm%SW=@Va#6NK_Dk5*5Oo=YtIxB_>d}tAB6gh`b)b=mM@#SgrK9tD?Zy%P<0tGM^SD)L6ph zbu?!Ss)@xkitVjk!`J$wVtpVS+t~crHGD~~mbnfGCjyhvJ$VU^PT!7XFhAT&LU6+m zOub2UvIQjxSCu8WjT=Y<|Ca4{Ct*j_XYih^3FAhHFLwSbcUHG?r$kD^RayIpL*3`V z!u9EaHfgU56IRkZtvq*87co!UmKA^Xb&f~| z)xD5JOL)%asoJ@gG)6T6w)JDmOL`Nfbp(-J$FCKu%#qtpb8#CYS46$oM8m z4u?S(Y0g%=LYMb&p7xk7jllJ=r>uYM_OZA!X5#$s`Nk{L+n-*imGR$PI2^Uvk*V=* zFLkr&@K3AD>pkVaaIlqtB_yl=)(Cj`gzph{8=>+=i@%<G=nqS=*drN-^pZLW z`XgNym^x(((!z+Fq8v&0w5v`oK6-XGHgsIJ=KYpE1} zT1m*QOUv}E(P7Z$ceU6LZQ{YC+tHoZ*%MIy(%LyCZ@cFGVL6@{6tu8Yv+x>`#T?)x z2?^0jZ-u~iN5gmoS4P#1lZ%T}S>VRb*5T#~NwR?Wd&`h3HPo6qg?vuC>ddK`JG|i3 z#$UC3R;RJ$I+LazPCvI-sNG1c@-k)@>mu+ccaVo|o3rWY;wcr;H-CjUE`jhX^_O?~ zlb5L-Z`Fh6B9PbTtS%PrNHsc^vqw-7qDMce54Qwt83K_(z2slTV-U16*@M$gARjsA z>fBsoP?lf3IUp^Vw)KK|b^O)k)C`Dridlb1eTtYl+`w<|e2!;=DSLBJ7BwV|TElK{ zPwA?`ls6JV6hpwvCr>vY?=^#X1N#I}0(__`rW2c$&ekZOAQm`|72(&R@QSC~8*~{Y zThWx86{F4OfRZe%KAy*U*U{%hy|A*uU|C_#w7`?g{F9Sog=6!A=5~d!4-?VCPaDMX zG^tc{Z<6R(|I$HBKrh0h+Lp`TRc&0Davjo+gPD25Owp`xDid;l+a$6-jZ`PnlzbA# z34<1EmLS#CCR4Hakzu?-S4vjzj9{$`Cv*g+n2MuvDfq6Eq5B_*mP9qwAj*;7ms2fW zJedSU%qg_s=KK0;Q4ym4c7LtQRA?gO|B{Lk*_JFll-Tu8hXo6IT<53g3s+ZHk+oQh zzP5s8z>HZF#hRv;XeZ?cu`k5L5YeDr|6YBWIlZA>YZs?not=-Xr4a>Z{A|Qp@r!uh z`-z&foRn=;V+r{F-PCH6q31aJE~S(G$4a_1%aJHV8MrESyB066)<=hjN5?4+J9YPg zclnfr%f3FgQ^jDwL-eYe5_$Vzde0U|RbxkHE`0C;%3WO$@A$74i^-F=KI~$oaIuTq!jdscAzUC zYqZNi&V#NYWs}4l>W=9%43r3F0LrpX;djM&v(T}^!IFnNY~2D%1N?lThS;7DQ}B7_ z%i?y^L9JeUbRlFrCI6IXe9|`;+=C&_`DH8HuS0CbdMg9&9#I|E*@zf>0 zqA*0DeCFsID@{+|du#=+)-hnE{bq=T1Y(>6KkJ($Yo79*=i^CHZ{mH%kPBEXsi^X| zg)1y+E5^W2BLE*%1UCs{RJpeeJYg|oMVmdl%o-CsXC3IFd)XeP`0rZE@&0LmW;D}> zH2nG#wyhElV?lS1t|5Q!zF}Sl&`9j*MbcFuvbH|T=2)5RN2Wr$%0;}nf|vBh=GM7F zXq`_D=oDN=`fjZ;9o>)eOpJtWGmb?fg6rXv3@PPcut;1=1U5anzUcdCb=zrt@0k~t z1&NAJ-egIgpXRQ*Q{m0Zk?&t>glW@_)~nZvHowD@D6vEC^u3-_SYGF?#QcI{NJd-_ zxEmaf3sa~W9k6Wl8eYGX%Bx25DuN??NENzd+DgW}G!$!|aD_rGdwqQUU>`99sgRYc>mGchYAA zm?FAzywyf%X{12_v7q1k8-tFe=%jmSOm?o%GytT~0b&lQC-SPc^x_LM`8jO4K}mwS zWr=BC(JKN9%$Zqa2AC&tp-fUwIPwsQs~?{G8avlGQ8jw^F4JT;7F9y!#y0$mr?G%Y zr>Uh9vp<&JHg9uo)?WzVpYaWXd?hUUKOiAk(%~#~(S*7c(Zt*GP{B5g;KLxedRl+x z`RwBm>*t%0ztKiJxlE);Id!MZ#cH%AY|#2Fs-_B+5>%KFai63c1EW4%G7x0De3bDq z>(dLh4L~lB*pS>bpU>J&M;>S2C_VmFWQY#QzjE!N2vKTMwbUrx02Mw&7QUXv!mzOAV z*e$(tRxlk2;y`#va9L{-${jF{5?!d2 z9f$N=xU##7WWqXdcWg9xk~oxxdj*w?1sTwE`nlUr0!~%&r@|I3G@UyQnW3g;bR8JV zac85|*!XXBW0wlsrm`BO%^3{ebt<+Uf}jOROnw>M@w{mvX5cJcWS1G=Vq{I_R=w~# z(bT!9BFB`=Rl9-~uRtyXPZIj8M@4uC#jGV5gr7qmF-M7-<(_`cYqo3LsPZ?C!AN6i$bav!Wl zvleUT`GE-DGw?lKawE&pnFS`4MxCyW4t#&2PU4^HV+JKrQ4_w5GtgRS3t$1rW3)hT zUA0Ioh_h)VhECASf5*_eo5)wE@|;2t(Njr{vNk=H)(_#17 zTf3EfnN1X4GF2oW^)iK8KOdZy-(zePp!fOLxa6oAlP<;EPP|dHW;&=Qjf4alOO0^C z$7KXT!YTY|+ZwG))sKUqbUUx?6n{LbCt|fWg%xgXAZ4Z~l*sUzN4rA~WZF-u;p-32Y8ET@Vr zV`UmsE2NT@SIRI)I}HcnyJ>&wya1YAV9Uq;U$-0Je@G!eOInC1J`M!jyq} zOh-5l_1JkFvnx5>Kfbc6)^=LSYSzsD&%XNvcH*yU5#Iy7G--=Yx2T@+>kKT{D6AeE zfA~!qw9vz)-TFdW+kr;rIIXn&`-(gA1m5XK;8hYykOVZSog&`k6w&}Qt=Sye&rZ^* zzb_AyH@SZU?mJfVZP^DmQa{06Sjt4toiTtF;C=DFJhz(mylct#rVz)Tt!K)F6DLH+ z>(vSG8dwz^(qW=gWWx}g(-2AZ$tlSPHUAH0K$*YTk-MT`9M?DK%(!A+f@eiLR+h8E z^v4N)mPr$>h^;Rh2hAIk-7KR@KK0k5xJELl3R{k;5)$N#=6n5L_>-ihvM(o!A50RI z$|d8WIELdLkoR|Fu*=R0!82nbU|jm$kzfy*@<3`7r$|Oss<)%5GAd7$+GCP4H?<~O zY**xkN4-|_zwIom{#%loq>R@Cz(iT0;tafi7t4M0_#FX8Z+DRF)gt4H;qX1E+g4y} zO}*;Zm~A9)6mkYmaBw71K=88P$2^!5k1y~v$?lXmukzVASXAZJdE@0-joZ!uEX|Z> z^>vmW2Wd%GIU61jV?RfAOsFvHms3r;m@Vax zy%}x(X5uzvO+^y%)y$Zr<`~k@_fx=pVgWRvSaS4C62=T9YnSJt*Lmu*_1H00pVMnK zVndngcP0Rr3|q|FycySwMPUrHYw_hrwIo&J=#T3Wyw`)#ROGNcF<`dH5p;7CAsGvc z<;vLG5DppV6jkJk5>3se0)&{F1@hg5RH~%(pcj+-T@X0--*H0 zm)H2B;)1Z*!#Vk4zb`Dixz2dR%gbVGeS4K8wbpgih%ctb+Yn=)BiVy(*}~M@sZeSv z+~;W6zX-HrQo@qrR_gsJ>p^uNbSH_sv^c2P*m&raH9Slkjr6sGuq+PwU6cT=O}oW@ zJS9E{?~M!Jr}&esx2Ptax+m7+-8+#bDU+6olPo4NAib2ZfF#S2I+dK)Hufj@{2wT% z@mv;7OOMT#76SFLWGoz6=S3QXQHV4{Rbo@mr$_IsdKaNGh8mOFblwt%8%RsU$LMQ>faOg_coT{>6O@Q>6wVlmX(5F zb|fK4GgH(^R?~~=*g!O-*ZK^#oK2@8eX=(XEbFG z?^*YlN2@d5ayFI5Q{c>}@c2$6$0L={LGV&>A)TuZxw4X{DfSYH^GBs+tC!v4<1?Y>C`!*#N4PY4K^0YQikUmG@^L71I@fhLwU8W@k@mIM; zJDTlV4pQL6tK-GQ^an(Xhhj*5H_e*O)R~VGs zT7@ly$7P16F9A{9*eI6#TIcr5`G`_P&(_AuY- z0hc8t-GUY47Y;~_gjk+;-GFF3_(wR0Fhd4mt|0Ja zM(qLDakgFHQf1!wP2o-@i@bb$#GRR&Ec-ggf?BVLnsK{DSdxLv&^+RIn#)6zZ`pfE zjLt*;j>Cw%Md=Os-zVkrzn=~1eTiC9@f5N?w1iZ&dn<#?Z0IeYz&dwP1J!z){VbNai?C zx&Dy-Y@jMQQ_(iN{@A0&{6|1AJ2cA~8WT|bU<{%u$y{D^k+*@W!g;4LsIhtY5 z&yZ(uoaQa)7c6l02Qp?C#Mga;PFAZ)r+X{1))VXWGCOI#ZMOkWg zMDc+97(Q&BWr8C+CbY}$BAK-IsvZ>Oq6{c`Fd3Tbv*d-SIJr%$HSE>m*LF@n>X5p~tdS<^;Q-yqt!i!w!MZ4QBt`J$?%>G5Rd)$^036bhQGvMtJtC1$ZCD|@yP+FhBuuCW)3DC< zZ(L0EP38LK3!94qjSH8nx|xVkB_>{f-!yYRPuO{_eoh^P$Bg-idiRo&107CEfmT7( z@}A>pLpys%-PV!eQZ?mSbb4G;!Q!b~YwNeK*sx*6`JfNyId`^1Iy*OYfoF%J8P0YN zCpZrdu3r3uMQfhhzrV0NnT)|d8|n5F;$+k%Tx+T={=_=n4C^%d^(`<3AMG%9jHcw z4w5^jnxQcoLb_T+>x+>kMlfGmdR;AdAuwh^+(H>dRa0{{k55ynn%cNDg|M}icm~>? zbt(E;4DK8)yB4^mVS(pq394p-6KQ6XS*JIh%$_kP#9Ka?na&8cH!!X@)|V+`GyhJ( zU_*kHb;sGa4c1=;4IGp-#0oO4yn6OMgbA%b8LB_flDrl&?`~zFBB8|Ks=Qj4Vap7j zX9^{6kLK0JEQa^%ppHHxLrz;fSp95vpidOE95)U-ZVO~whwUH#i_N$Wku%E=4Dc2d z&c6BVwh#MTYcl%Q>@9*Qtq=sbF^^aao2vZk&RlWfn!RPLyOQTA_e^y!!MD_{Sh|%CgZk2j zIyt#whkowd*bcc$?QrYVEvkrbYN%1&RGrr6kH>xf26OcJOGQIC2@k>Xn*g4(*v`In1R9(aalc^}s7Ls_zXehq1Dv$1fljfS*h}x3+YLOBhCX?P$<0sH^XVF{J_c4Cn z?7Eq`%k$h3YCm`^-|NSVh0M?>9{f17 z-(_z~|M<*dkIC)i3EXq?DE3<*xsu|y$h!$8m}6i$8)E`C)%X2k)~1v_DB}MbfpN~v z-JLSp8!QO*#LC1760Jo;TKq^1KLm#nafu{df-kWyW=zt>|AjE3h1Du!asWyWtmm*K z8;}8r_cG_kz5wfp{jn+niTMSocnHMtkl2$9A>?s^xfp3J7q68_dTn?JEq~97A)1ti zW~1+CGXk-n<6E$BrZT|7<3LJ?!q0q|GkZ@1DnbZ%*#{r64smf_(RZ>D zOti+r-?h4QS*y~GgQp*-6ryt#9m;^FBPn&~*5xXX5yRE>7`CSD-m~1N%9X{1_FOvF z_GNLQjgP;#V))Af<2d+xO?G-6k_zM~0@At-1Noy2SbWAw(G-Hx0HL&-Ae5`&J1H;9 zflUB$$q;LyEUS|fb%W;X>(bIgT87g5Lwot=P4R^A(v5X; z=2|QkxS7K^re`oqOb=?@q+a2b>4RkfQ)*{tyCW2iSn!>O_Kxh#=aO?8)Bs#?y+G>k z)r(@K*SMQEnhfF|lLx+^FpxsCEb7Yifrjf-dsL%1p*h)XV&ngzcQx3B@};3N zSnOe^MQc34g--fEwX{Yl)g%{9lX&re*Wfx@n;7kFZPvZbTBNvN+d!9qYTNh=&B*Qt zMVYG=C+|?!a>cQlF(^m9GiMB0KE-2PnaXSZGFOq5q+InE$1jee*KnxTpl;2lzDqWQ z{wcYj3obzY^UT=H+(;uERtXgSLlof})KzIQ4Y<3vKc)M3tnugE#7fkC@E9qVgAF)? zD=cyh*rD8^20SGeqvnKCGc)=*&MbiM5>H4`H6Hc-!ac`7NQVWC*MZz#C|WQqsAo%I zmUiSxWq?NC%G#V9!=mB^#hzxZ{FMVY<*{JYKW0Tta(3nTczIlfB{eq3G7fG&je+u$ zY*kKmf0lUs45 zVjr5ak8rG8iH*$meucs@1>eR7IU0NH)3Flp7#tf>Nf(HZEygWky#V4dtr1IXuBX|x z8<4#_v7Hq5Vk0=ftlfZH-*u41+tEMmBh16=@!Y*V2e1vSJh^5g4k(y70rcX3QSV66 zMjX+@K{mJ+xk@N>RL8fR!mkEww`nG1cSR0FwiSV<0>Q1SB&u}bT3$BrsjoaqY1&z2e zdeefquQ#ujXvv)=n#XOBrqk-16aOSWfiQRL`gQYC=<5Gjb#3ryLY1PA{!gX0al2r3 znKJ#-|ElaZTsQ7@g73sC;CX6KmD6X}`2-wDl$Dp}(FP@{V^4QJEm3fs#M@f!)m$*= zTg5W5A}eUir=(?~te=)oc}wh**WtUBR$PtmY@P3km&$gs{_I`uz!kmu=A?3&F%aOa zUGAe@c9OYTWH?^+y%*zqlWOc4&D+VI63$qemuvk!BgZ>g5;$>$Ba`WdB;B(<`Vord zKPYit)yF=jC%n+!cHtZ8tj_cl_y{7!iT*`;1Zb)gJu%u7`P21aHL*{PY_R0*Ku=D* zeK?!@d~s67EwL}B5B9_;_$uR*k6eu zP(i$8pXnfDaU{|G5mF;>POR|Je~yta zT$chVW*Ma4VDvPj!>qBOq;~Zt9H|fKhv*j=XN{e66+`A&_$O?AmsUF{fwgzcM+9E% zRNil?bV0^q;#=kbx3UI@kK6+6jMZ;re6d&@uYTqK%YK$Gr?r4GyM}X>9Li*d-9o7*thxAsm=C|mN(wmL7ZB?Xj`S;f8{x=RYaZI@W%6z_78B}G<=0aT=Hv&A!2%Fb}9HQvxTH&AwseHv+f#O6A>EgjuJp1IxTX zt{O!N2|)kMPITHDGtn2&nc%^=1ftfbwR zoHUxLnV72XN3c;$G|t~}(M5l0p1N^X9G7#rAO%&gs;Fq^#RWz(j(S zY)LB_3d2JkBIasUCCrB6KIMOCbB&h7Y1UUv0tU()&0c}1IcLl8)b+|PK9;!o;|<#u z4TFPxJa=r9{%R;o+cHQ~NB$s@rI%b>YLtsDlD#+smD!A*`9st;wzW;)Hns!5p=PfrFrVnbKX+V9a`rf;arUw=t2alG8QZg zq-O1Cj$h&=8owMO-&hbjz*=2a_M&Q^Nz$BO0ZAbL{1M4Z33M_tXJq=~_brwWhn6f~ zs@xRC0>vrA;AWnR!gOAY=Zsqrg&4?E`y^r3%;ip^^?~47jp|z&WA>XP57ur1LuIN0+f zaca-Hk=>ufax8BgI5?<$p~L;$zr_9A1L>AmrgE#zxvQ!oeB`R%3)J|e7+$hiPHCo6 zCfi8WNxy;PEy8+&4#h}oB0-OwsD(fy&K@VDJY$kgVhoBPW4xs5qXO4< z$>o<{(uS!~dll!+Texsu8<-3qZSxiq?{Y%>;oZ}AA8tqMi@lzhJ7eCAx!^m;XUtVr z&z+&x*MRF?r=AHd)4hz<44jBXlY>4jlN|CQ;yjuZ!wxg^=!1;xY=oll)AuQgZ*s`& zi4&}33-cuTfrpVP5m3s%BvN9*bTEmQDB+uLFe~~Q&ixuD?$PE&US+s9QCJ8nj4J?=+t5Xf05ibRb6NjK4X1T-&%jczI7;e?P|2% z(<1j|&y*=WPl9K5kF47hJ9eD#iE`uM!2!fRR6SZ>j`>mm-G+q{>ltV^D@d(Af=&js zVpnNvdj`!57U8ww$LI%7BCZu(-Y01O#L^>Rrt$@ZK&MOPi`V9~UAHHLZ%Ol8h(xhJ~ zDX(nuRe-wcD&=2S-4>UEw}D?SIk5!(AH2Tn=+Rw&MvtLnQP?wMK4jSFp~l0)N4u6s zgC&DMNdrkzziUARdn6zmtlz!{IJIru9BHx6HCf%&Zf>p3J!#7O9X-A6&w(#jZ;5TB zzPRQwk@CDzA<1i=pU~bm;U8MeG59UcX|9U*8_4IzqG|jq39Xkv?rR4vwrrI6ZwLd9 z_0t?CaKWYmkdQE^I{H)^uxW{g8yehBh9&L3$;F#z)^A<0ar@?tJNDkRuy11LfWEJH zVjuBGj`R5JtTb=4W%9aZdzOuiEc^QVS1R*cyV^^xY3=H2Rb@~Yt|MlEy0Qz~sG1Go zM{xp}y=xtbZI0**csnvUyMIjQa(&f5{q~|@F!23*OUo-tPk_Bs{l!aqr#r!G&M8-v z7Wqpqd%vP2P=0f?Hpy*p9!r``RyB)Dk?Zbnj8N%ROAQ5hRv8u;XSCOxW$b)nD*XFW zv|pturfGs@3ZmDSHn|k)@p6D@LRWZYaJchbWymS%@da2OOo3m#-T0s9=CH!OXzK0T zoh*N^z`8c>y9t+Z7#@9OGFYZlYt(*;Yt(vT8q7GgQ>$~;NKLOi8!w8g>Mn1?bxzdq zlu%4Yb66&qok99R^qe5(5h)9*;#mu9zD!_4(nXYP3l^KNFY_$I^=Cw~Pbd#G;=9zw zK)KLIiqir`V#Jf7F;SEZ{LMuGMUcr8z&76TV~@ z@zdi%tKw)3;;15bg`*^*bbv2uGFH-PNxYiS3_4!YD5-{#T2$yst%MPp^32#@0it<- z1{;*nY*5wX1b&6cph93yI%+C?7RE-&cBfxTs!sOp;UxzL10rQDH#4j?JTm=ObuaMx zh+a5PV~Xyl+MoCcsRn3b?*im)R8{=S<-xGsC8N02N=3nMb@gk33q{t!G!s5aSBWUTZ}nsss=vJ zZ)hcrMpaG;w|rE)?ZAsabxLEI5W!F4SZXy#+oI!&!ww+3@ErB$mie+prC zrtViFLw{8(lJoC4PmgX;oUx=mnb>{wl4$1OJORjYTL zzpBcPd&4Oo54G9K%WZ8#;LS_b7edez!<%+(9wzipAdEZ#KW^T>c^#quIg;L^ql29W zu+uPMis7(9xy;ZxsEfrCeXiC%Lww1h`O(OzV2p}Iz95AAPrQ;Lkh6kDY3Z(mKk;92RS zwJX72(Z=rDY>w8GgwGS=^5q&Id^(Be1Tuqcl8kUj`6OId+w|+G-b@_E`fNbU)Jc<) zr70CBL2SYho=}t7;PqI2N#(=3aJWvJIa)#T(5;CTC6%dB7*yTjGYo?_GX$2*GhUNv zme}He&H?!)>cv)(!8TRHP|D&yDP$!f5gn+!d-LWE8}4po+yNI|u)4eVVRT*}?wPQv zfN?^7?aKeJx@V1!syg@VGmrCr&CEGxGOs))GeZ&*LLQSB2}#J4KoSI{yhJG|h=_ot zh~iyVTeMQODuOnx0=>4VOKn#xRZ3U2dTV=2ul2UKh1=CymZYs+z24Tn_%H1`NJc|Xk0hrw-IzhT?94ePgU$?S|<(5jw}_MV=dz3A#?>((uM2ESCl zk=Y@S#T!~$8lI_dZLQDvxuSOfpFvA&ld~CT*hmeLs!beb1A|Ae6p@~Gf09*wF`eNz zHngr96)I=XuG~*M{no5KF(v~KIiOrMXJp0b?5k#dlluQ&x7~_$cm#NE=OjPrO9*k; zAVf>ar$&>Vm?T7mM@=@Tx?v zUz5eFri+;wS3_S`B#|+{K^V~kQ-{lBnJDHk2d0mc$*WiGy_9;@g;mg}Kd}^)LqE^y z1yOUTcWGGv*bfdB)TBq_L<@Z@(UvE(MdpdD+D84B{^8Wm(WF?&*{>5^kmp1(^IR1D zfx>nx31cF+1jq)~P(_B0(6UxmF3>Od8R11u4C7~J!!t+hXY%~Vw&pw&#3NP5t zG)&x1?V)bLvaOw6*QR}DC(TPb7Bpf($7YOpjG+(n{EH@^&vS}ppc8n;4~cSI5f5P!I3!|M1N2AYi=Rpm zdN@H0D?h;3Kfv=;H!(&t(8pwZ?zt?x`N=3|DdIs)V+V73Jw#x!ZxHk24+t2?FnGf2 z_}kZsC&aNdK-L*-hP9=C zKGk$?9y^KWZpfrU9uFBf?XqEp#zUrIC1AeM2(vgFb2{fTT!l~iejj;iT`%q zP5%e*lpxVN%mv7olJl?C6P9v;ucyt>Bk30t-4y$W+*ymGWNURfDW9mhWy=p7!0m-JQ7Xk~mZ{9>;C4)aq)RiHZbC(L(Z z1um+nR8>Q&{F)@ap~^u;*}p|rL$dsuth}jdK}CKN&u0iMp+yO1yzqq43i3onpvzc9Q~5a z!e}?DtSZ1dO`v^fk)s~wtSEn`n({P@iIF`c!w(MsCQ%lR!~Fz(xO4ve&ilwqBpQv} zR~(HN6WJ6!g+Ae^g7aVQ3N2VJHtPzb1XGW?WtNqTSt%&8-4L3UKJLP%gnd$umTp@?CfE^%%w(rFx$$etaC{+0rs0_p6r zmQAERC#O*&T@?zOLzt~KvwC_)=kFU`(9oxHzKANAvqp!82jW%q-Jb5&j_${{42)D3 ztE|GwvEl6tTbn(2Tn6B;7RF_!$7PFt@l0@AJqn4e>j5Y42`g^q-WNrd6#;e;5R`gD za0x1cffQT1X$MsbaIXTmJ+LM#vL52ZLWp@uQ@p9c3Z6)sz(>ktCk{2L9`5NInlm^$ zHafrKSN(m7Sxsz3J@WDz$BrzdUvHe#G`lkt>S^t0uA5g9?rKaH6><85fg+F}1;N8N z*=KQ^wg}Ep#w=^=19EC=5ePu6K<<`uK-%EG5rQ;t#KZgQMi(sXuPLVoJl$;_-Ft5x z9Ihx0!RZIp12z=LSc)Bn=Y7!AL$$#>fDVT%h z6Q_`ue?3|MW6ar2MYzV4dC7m5IYHcXAYS2`tzVVuTCw{>8obyxirDL^KZ{zvJrbQ3yzZDN6nQuK|`}{U;r( zmkbO`f~G}^z1`or=V9mdGV5U7MGITtckZGndhgb|w?$Nib;(1+eL=Ix7={17zM z#lY8Vy{$NY3(dnOh14

YQD~3Nm%nr^AG>P00#c`uMmXRmS4Cy>ctoxpo1;o+AFY zEmSXwnZ-1UhYtY7E~WIxRQXwsfuz1=KWO~ zkNm+_+Nlc4k3f_Jf%08)F#R*x1OAZ_89$pXE3nmxc28GU;4NFAOq~(sF#bi@lKq`j z)x;V4A29bkWsu0zY>Zb)yvf?v$R-!VqEEi3Z^qlF>Sy~NfBWs!&)=E*++BA*_w25_ zoNIv)qUT(Hx0y zGaXuZIlcq8#XELNzz^VTME?|MXm$=I*XNe(gXQ0t4xclvK(92c@unjWJ@U$fJ26@O z&8LoDcjwkkcP?M7`ba^|+aEx;?|WnDI23>(KrDsWAYi*t6Y-s7HBgf!hMtI*F?y*((a-*I@s0BztZ|ESnc#9(erM|w zCr+Tc6Gs;Pd=z~OjrhLbwQz)Yv%{xGlmE#U3?#2H5e{UZwRf=j29XIurR zGDn`2a3Mkh$)s*7;?;gh5dFJEFZ!LtveuzP#-}nkhU`iU5<5$<93i_4-r0Oqs>u-Ijw#CG$n!7^N9drQ@;CNI4&3*|Ma$JBuDrb+c z92y5i?J51Vg{U26tZqn(o3juFl8-iXR&l62!4Zjzz6E5!WRp*mOEuFk^1KvnXz>`F zThrB|ViyQStlKTSoea0-BJdZCSO|0x+G(SY6WVE}I;r6crk@-MBAS*UvI2zc!n^se z(n7RFGLjd)n@2L^f~M~R8Dl~F;8%u6=FdIx;0zEkD(gVR@Nn$tLi)IvM<-b3X7a|l z3JW1!!C>!>=DNPHErXa3gA9TgTL^)FE|AgBIIbb_SKY9qTua@Yp_R$got$U9c3J{*QN@6dYFH;GNNBBgj+JT7?iYywMs^Sk#IZE*;yhVlp1x+%m~Tpt zd2|Smxu%(|td~T=%Z&`v$2q|(iCO(XM4D=?q{o^O*o}u^tS%rPub8yYR8%khJ$k7X zGfynpW`W9!KCJ6!KYm(*!>TfF7~_g;O432JRgV}4C0OvvxbB6UenmP2?QNV$(Rb0$ zkoK&GG!iJDVJ;?piwaLk2aFQqfDDh}=h1VDV#xB-XFt|;+6lK!SsmAPXs-(3`*$0@ z3}B_C4sWS-qM(lvpbSZ}9(Ns-BvV$<8m~UC;GPZ|5&e*)_+1RA5q%$|@iYZ8|NmI^7vcy((~=cq;Xnsv3%lHgQ6!58AvR+PpX2rnM9a zpotujupYgyv%+b#NmWf%PCd#%`&@|b^py%+>QPlOR2AKPO5`{J^@RwWg8=6|e9rVH zO%RD}15lLU6WAYsyGNDLCJ$~WbyR1?_fn543f(BnsmD&~oR~Tq)}e9eQ6Vr60*s_R zLMsePdqbqnOJPLMh2g4d>e2T^R!<$pIPl>1V63S}VXS%>^@^N+3d^C@#4mCj+AGiX zi_DaT!NFoh|L{EtcFHf|unah6I~)u(dTe?-pzsUMtBH<0wUo0?dC(ApHN2 zB1k8hzO({&kGRIklqzt)ned+Vwsn zjjkfw`4&Nml5kMVJ~uG|cwaJ&e*4e7W|R^Aw#^FZmlQ_j&)!V*+X(paQR*2qW2&4Q zw}lKNM9Ms(8Cl~7@tt~{K0<$B`_`o`&pD1~mif8Kvb56&WyblxOd#QB$3BBc@pO^L zbsb2+Zk7{XaB3#~fh2wWOT&kbfb6bd*X{S>H_^$)IDNHDSe6<`CaIH?xJ}_?Vi6ZEt+K9dW$)5eT@4G2=IgtbFY8+=pdwFPkI$@*DnXBr zQ;=C5(-WE`(<gPSNvo9Xuoy^q}m8C)J(Nm8zL)~wis-@B^mlC{?nzu08^*&o}NjUCons-?b3 zJJD(2SES}tebNAQCJmxTVBGO2egSr9;+xpdzAp1k8RN!>$ezaT*sZ~Mu^{t7ueY(I z;+EhH@&QoP9fuDk@FAecK-p+uPD z171&!!@BaNeY<;UgYL&rW-`!~+Bu#w6w#EuN)ng)%xMxOenD2pM3Yl4WgJZ#F@fxB zc^kid+~gF?SZ9(Lv&WD^4B0oc+(vS-nJj{vSZ;&85O9st9(01u_h4MQD}r!G zi5OY2v816p3Rl(%UP=88!KqqQQ%~wTx_o;N>Kn13Y2skb`>7^;w7^^{zg{yi8aI#Z;frYsq{TEiIJ=5Qc61G?7X%aTE06R@C*szs;kC)42p=q8s-)}yX z<$+uyndU#^)1!r2AHMg8_91hew}AdoIR?vS(Tw3cU0HY&BDz;V0WrT)JgKsn~!`C@nLO zn&zKu6AC$d3#`-S#z${dZQD?k;zAn=K0Gr5N*htI4do1405H@Qo(-?44Fw>Y&WMty z+aNxGXa(T+Vr(cZJCzOP%Q-d_PM&N-IfHtkpDSlW*>fHn${Dm2@O%YqD5uV8LxGoC z7}vRMC_yQ0L&+Fg(H>Tu#)fhRtpd2Opq&*(&u>Jr?5y_My%Khm!_(SPu&+Q1T10g3 z7w0Q5zmP|ims>mnby25@e(8KX!Y^4oqEP2!$Qd4iy3jm;t@b>6z6_Ue=2}`C+p(q9 z*3z}i5~NaFNk?18)+sb?m{)iKx_m_&fh7QVT43e#@m{>zuzV`*UjSGI$EjHR7ftYP zRa5=+ruCRtga?vsYXhvsDjNM6duuSF!A&df7I3o?@O|P+OLRLI=#TM|2x33cx8(X`M5K&AMhjX6a9>G(jI~qx zV|X5z?2los-%Iz$&;)(9#V64F1^yUvY~)i5Tbn#so}oXaQjWcrKV@!}_HXA_VOR3} z#Z}8!VdBs9N2r3-jj(c1DcLNFj!$k#)@$xe;T4kWg+7BuNM7#?%t(|FJ*52cbuP%{wtzMd`X^naits?Q)Meogg7g+nicV3<$L@PJ!aQ> zIbXgHvXGDVl{5=+2CB`#g7?@Hvv#CbYD zpnm6gmEv$q9{~P<6#8);quWTB^)q9HVyqXSx-Lc~$^J->mX<2zS z5!Fc#P(O4aO0+$S(=Nn!u)eU@QB0G}m-L?ujby&0F`puJW4u<5#mRavGwq{aidF}h zld`Cs{ktN{Cz(JSFSE3ZW)=Psw3%qWqOzs=lB0B=elpZP!|9yS5lXECxNtuHA3{I% zc4jSHTDCslh)fK#S-WfwiPxa&W5{=GLGL}d`444LT72!Ule<=2Et5~8d{5sHy8ZDZ zM;>4GSdFEmQbLM*#{RV%_wC!b`b(8=_$ZPuYW4z}{tpeg>DvGRc-pO#%Wl&^6o!At z?mg;d(*>YtBsQ%?9VaE~B}k}=6sc7!g7GfBpYsbg2R8&O_>hhfDN@C+Irr{OLI_Fek zIdhz4T2;;#N_cCyfJJ;XY~wM$8FrZOf#E5`Oh8OV2eq*>u{%m*|MW<$X1nuu|UYa|HJ$AH=e*3iW|-&J_1@F~!thd_(I z5j}iJTJjxTS`Rsza7lXzImEaar^X&>v>+i)i`d}$ALC}2KrZF&vi{w z`iyr>3Tkt|&^&1!&@*GrbN|uJ4i{T`b;$S^JTulenoY619scwS2xqf&G zTJ!dh{AVnLq)q805~d;O<=@$IIE#W)m1ZIjC4y8)ktMRHhBCPjvHyEV;jhQxS8rgS zMV9(%+EkWnjA_CCNJgTqvLxo@h_;@-$VlfB>C6-}%)4$}DZxB*N;N4RHqXsBVyNpV z?x`daGU^Exsy=HdQW>R!T_5{!MI9oDq8<&1A&vwZF$$xx48~wtEQjT>0#?LISQ)EeRjh{9 zu?E(}SgeJ$u@2V7dRQMDU_)$#jj;(f#b($Xp?U;xTOhO8iF$Gi6iD{UQF6@9A=*EuN2|bvJS?I;i=)*3UjXBsAyJ2_ifw|Zd zHZH~?I28BbG<=4`a2$@txwsOS;3&L-gK;u`z>hc^g&cjvs89(6{T!d@z z1Rle+n1?h@Kn732!Q*%m&*5o2gJuIK5|%q zLFD1$0u-Q&eKlZ}`I1pdq8{A2QHLPVFBaGrN z{LOkcFvd8R5=^j>qezbCG91HYxg3}03S5yZab>Q;Rq-o+<7!-;Yj91Du_DJ z$Mv}ZH{?d#n492tyvj|v88_!RZowut)1<{DTR5JroWM4GfDgGPx8l~^hTC#GZqIg3 zWCtfP#mSt)skk0Du#?j`on734GuX`?xf6Tv2mZuI_?R;}i@n^LecXk!IfuJ)H}1|o zIG20U<~*jEp~HUM#ViMy<9vLAPq_e>G0#D|EO3Z>(PNP%mbnnG;eEV`x9~RJ!|QmL z!#JCZxR^`0H}~Pb+>iV703OJLcrXv)p*)O-^9UZvqj)rr;jui9$MXc9$dh<7PvNON zji>Vrp2@R#HqYU?Jdfw|0$#|Acrh>GrM!%n^9o+ct9Uj4!)tggujBQ+fj9Cd-ppHg zD{tfNyn}c0F5b<1crWkc{d@p7;wIdR+wd@M#w~aR2jM{+j;rw~?!|q$n-B6KKFmk> zC?DhFe1cE%DL&0-_$;5}^L&B-<%@iYFY^_?ieqp-U*qe1gKzRJzRh>|F5lz({D2?w zBYw70>OK}p;!R0s}r{GlFf&1|QKj#yq_iEh%xIb9 z_qR*YlmHz8rZmt|5=&JZITagCNeoA8B&9n#O5#&Op|YV+Q>wkP6`PonE({LZQe;C# zd}^rAK-Jeyoo9Qs1O6e>S<2-zP9&!!+9@l^$x1r)N^*MZIwf;XtI;{l$jy&W3o+(b zePVhz)q;N&S{;Q$5}17y?4Ta&}#Bi_1sv z5*FckSEtV$1Gk&kZ{GmGYMzyr2*jaeZ9IGzJ2j`)zphQQPP%?#INPxXLQWCl_IcUWcXV+8B#fp7mY#}$OCD=39In&XD55+>$CqC$iNSgVX-?nzmDk9T24N_J& znE4yFKu5V|u0Vd%U;Am)*;y^zJW=rrWffFeSdQpv&y%%9n3C&~>ryn)o{>rzicv~S zc8J?pKsrD5r?P9s-Gl?4Fz5dbWDooU5VkRZRDSYL>DNrV`U7pD{|Cs&3Q&9czDpDK z{}|hIgeT*G*3SZD`;&Q|>)*-YK!vHB9J9I=R>3{;ru)7jIShHidKE@AKBKxFsuv{7 zdc6Z2ehR7XOnuHhf|w8J6D@k03?f=h>JGj(_V4#o9q*kXpS)Urw(V}f2F{&cIT`x2 z0=*{$`X6s!()j4bO4ShPQ-Z%xNI6!WV+Y5vXH!?9aAx0!?4EhgF2R=TfO~PMXo{zs zMKO$69rJBam=u~uRZ8;n&f^e!}5UA0-oDX|Q5X{q-R|gaa z1a;dEuRg0?7*w}V@y33_^{Yq||F`Zm@l=m7@>?lf*=+{3vx|Hd~eE)v=W{CU> z5aanHy>;A(QK10p3NvKX98KP}_=(Yo-;23l>q2HK#{Uvjo~cQ@c2LPWGzytL*Q2RGLO9EsJ1|HJvOo`-V}QV8-VYN;>kCl2U0 zZkry71}>nCBv-i>>)F8mqO)GolpX)4xDZuMdQ z>;{u{n+iuxhvFo)(Mr>91MpE2@Faa~gkK(s3M)r$wO=!Y0~SPoxq;~9Q{6;dwPm;X z$8oD{A`am3?e3ej1JOT*MaYnV_r!kIBD_9UX&2aiS@UF}DHC+fn*E-+Uz2(m z|JsZqeq3L0**oiW9WSRuJPhXHgS{5`0%baugd1;Y4)}tN8297VU<-r6>JJA+C8cD& zhZz{yT4albP=fRFQ{NRx!=Tbt#8A%IX=x}aGN>5%ex-IRjAC0U-4gA|?oeJSo5dQI zCH7Ti9fu{64;oWZ8{W>y5X#JiFDQE_l{_ z%V#qjQKWx=NPz!6{CSy4VlRe;%wJuWSBmvmz_XWh;Nzs}4}=Zp(I+$Wwa>LXiSwoe zP<%sjUg{;E)%~jD9q$A}Gti_fhZ3~5<_M))nw7;{5kcE>EdvL~^_8b;IoEysUoF;; z#%i+5cn|+TJGA>gaIT=&3J`pcWt~Fv;F2-MDQ|5DqZateZ61v6y|(F(;HM$g`dE1f z4Ex%M4iy>%9~l`54?jQ~?Z97r{5#HSK^4v$IiZ|(i6%|IbXuwhp*TyI(N1AG3l%}YRz4{ zFP2<+xhqUNy^cO7N-AsvGr2$%LzZWLCk9C`AwU5F1ce2*^cEm{~=@ z2zezC^$EqmJ=%aOcPn-&k&-dB?JEfkgK;v7Jd{tRGE|*Ab6#e!7c%Nf0^G6h%d2f6 z3?-m#zoL}c+rp!iKHuR2fk}dY}*k67HdD$Gq>dmG04Vv>}bO5n$SU#L7&$ z%2@h}EW3*MlCJ}lj04m;155)_p^;ds`Z&o<-J+DR~BTbsCohqpc%p zSEyc(E@++!9wJw4zIfr~ZPpM4Hn!c?@ZfuV$>}fCj5%ohR@@_Wc``!EzFGiJ-Xj32 z4|rgIAcc`Gz9U^@c;gzl%cILaP+fz$rn4TKu=35dZ@Ddgn({0(Qc%T2T#~J#p!9W3 zZ++{$K4QW18*Ni0UC4rGDSkPF=A!$A$6x9})c9Ns0qkoHy)Ro#QHB_=(&y!;@OIKemoY*+ zjem)wCL?PTVty+{#tbXqQ!3B5G^SIJIm{{^MNzbK&r?SiOkyd)EQF6$LR(qZ)D(K_ z#{1wVFf3j&Owi)4mCjl;7G0Q5ba}Gp7g?Z7LCeK5N>&?0r%){r9}-zWQjwyGKgR{J z#q(Dg*|ML0Rhp>JEW^jDM*=_NxW=$rF<=Im9Dpi)C6XG+!Yk}1=+YOJR1uT*> zWpUV~aH1`AnLgmp%f~U;khV#ZTlP#Belbl)@>yUcjN-E?A4NVlD*P)TK1q{SrR+hTV5>u3 zDf5aTcOMlv5KrAge$w4_Ks@GU3>_DTIuxkz)25;!Vu$cA791O?l;bZ{l0(V`C?jCe zT|wU~1@RjJ@HEyTvYer)vt6B$DzcDzbRlZBmDu`@Ks7EkjJjYsQTWcTSrY>VtWT*&20A3V(1_zTZ@GmsE`rhuc7E6!)>P|Iai#w`ne)+GsP_aTr&tP?!<@QLKA{BTCnG2f)4x9 z-k0F*r1|&FE!Y_KdNdX+%ce02%MO_i{kUnf4>=**K_cW!hMvfOpmR5iy&WOJ@ARci z@XtZUae1UxvBG`hHy@-D3f$YGAhSxzk09c;!U*-u!oy>IqDjBzz3yLYVrGmL*+HFL zv8G%lqoJ<63)oXhiCsEJ>U@7Qqdw+mt?}hIab|w!teFem;I(A+N2f%8@Qm@7h3;+R zjA-)s;4IJ{y4xD1Bgo(sdx=CPlo1CTsgKxCj|AubOg&~zwJs|!f(4HYG z(c5N6HjXlS_;6wq>cqVg^kpl~xMdgWwvIRli6OHdqaXvdi>0P%L}#1Ka4qb34rb!D z*7PK zOGizpx+!_wTM#-sN!&e1V#x_Q0r2kkqQ|F~GmGyP5EzA=w&*S_GV68=kKzoQ%p3)k zU>$4aaA02WnT0OmrY;k4D9#JT(X57nat6#y@#4(B&&)nm&7_q?(t~VE4nBfVx%)y3nAD^k@`0|8p?^aSurk@i zidKAcy4D2c72|#M%$pn606NW?f-L9gJK-ph;X4-mdsL%&;tK{W0@9ceXD4a5;5f${7mNu-@~a({Nu~>abTl$+;+Q zMeqGm)6}(R^y9n;OImVZ8>h)df4MEpaDIGCP5bTnmeo^W6W$C1D9Pz`*`(;S{B9<% zb>8NIqw-2yVTfR-9Xw$Cv(xC>GjRj9J*~hw{qp7<(%;^OkJJ$|VRx~h&1(?l2^9%Z zFe0RPH1kz1F#CG)r?b*WcwYbJ?#{?&P-^VTU+p!_QhyaYDO+R1+hGa28RB;8KNM;& zMyvZAi8X$?XEH1CidJWNkgKC&zLqDPXJvPml}?*jgOp43j17qvxOL%>^P*93oukN; zR{d$1tI(6_q3{lP*Ifi$<)+jVL0dU7zfSC1V1cXE2RQY+-Ke0=84rYZ+$pxal-*{? zR9eyR1r6;SsJd}K@C&B;e3(g{MOUSdGJcVP$n(XtJlo)PpQ)`oWH>a%gk{Kjqq|Xk zc#4!6^fA#&_>z0j9~4WpFm5C!rSasjC|bL=`P zi!^(u8ilD*32FbR+|!}F16!Lo>ebaN&?<28gGo^^ijlEJm1d&ncC-06A&C9iX~E1N#4I&smjTB>ds51tR%2dl&mr3I#U zpUl#+|JFOU9(tOQ=lbz?{#-m`W`>dln%|B60bC}TfDNl)4Vjxj4|!V*# z6x=XrTxn9ljI4?cGA$=QXKyTZP%CEt;`f3Xi-~8tpnCzG!me5NZx`i78Mc`>94cPS zV7q32?BsSx7jANA@vYcyl$=k9-@=j~@XzK47eOu1_agnz1f5W+#L#jHkfk!ga};7H zNdW_e(0)F3u3`%ilH#NIh?A za!J`|s4<&4OtpNZVBM%W>#%Vakk)ipL{1Uq-4MW&V-(U1{cByYcM1K18JVDDkm{Fd zZXWL;ufB5Crw@I<{E^!+Xghw$Nk|)16MHyGIK5UF98Z%%%8E2U15p%nqS<`wLCfO~ zksV~1990_;tnL#;Jo7%>!<5*ssK;p!Sa53yfqwg8j#Yo*o6bv3Lm2jBICbH?`!$pcaPUX z(Ap23{cA9*DbjJ0$R@5GJCVTzBoDbb?r9`-_`@XtIq_(mUsMRT%Lt<4J_vxQw`J|0 zy4V5DL{pRwCQVk|hx7~lT3cQeaYFlv3HSvqn*e$0=RGq`$|9K%T2p=la z`aj3{uy?(YxCb0NNpMDeti+DruB*Rj9Ij$<1Vjf+lLBT0M-7^pLu=^di+79tit)?d z7-PltTPmchp>|ZlK6>~M=VxCSa=~dcRZj6@d}WI#8q>lpN2Zl6k!EF2gfkMe*2FeN zHg{O+J<^ZaT>Q4(HCEm9DBg4;Oqkx9koEjKnlRm9$V#>0GlrcsSXyIjVV%2^^oaIDMFcd#!Re~AnGd?5`2JhYVTKPV)u- zjOHd1prKK99u3hQ*PV3$!--_Fvhu^7E)EWvB1l5O=G%(&Ppi6+3nByp9{jvSaPIJ*2c@#EowE&z$Nu?Ws4UjhLJA2p=64{g%l+;Z8yah_ z;|}z@4F&yUA%A1gk3UmW0sNxd&t6{fHy=|7a-}Km(Msz0rDv~#9g)n_q@9eSga(pB zzeh{2(=?iOAUhY;h!#TM^Pd%j8Z$UQ6HNnI5z zA{A7>o9>R4(|G0hGPSQ}%Px(H|6)MD9d|ehgXxkbd4?_^5wqA@b3q%$e@5^tSPJhl zbV^@8z-3n?PRW<3Z^cqdQh#N=SVQEg^hN#G{ zp9CC|2Eg{NleaIv@Da79nT#%YXjTO1L3|=Vfg7lYe^)a_Knpi~S)hw`ok+pXLcTWi z0JR7BCnp^GeKL|0rh6UNuKk!65Ka$o*Pk`(?s?%0Ib4+4ZqzDv=1vf5TikiR7D6>n zw5Xz{xYpK3yb4tab2t>uD2F_H6g$}evO@yd%Wl#07CN3;?UweYQm-D;Who~d(l@R1dVTf#| z^OpbE;&VE@9rZpJg%Nx3!2V;2j z3ax!a=flMA`4`c)yaWl@ZGVR-=I+)f_}SPh%X~)=#j7DzPW)H&A^=%^`jxa2d&?r$ zhCdm4_Rt(F{^=C;H2m0({9ZDY>lZWnDETjD%*SY7I;e)FcV1yq;sT|}4&oBtLBqh3 zhsd5XH7_uwA?p@-S;>3H_T=EA+=78Zc-yr{lQSX za>*JpB=SGnqw&WsOdVgzfZ@p=W<+vExb?*ciZ|5p&M|GHKSx}l4@hGN2EAQr(D!C> z7l^&oM23o=NCY+$yg#WAOG&xMg1?Z2!n|U*9Y{@l8He%_39SIbTlk>>Ek}xHc3xGx zVwu+>TD(0(2^wo&f!Y#Xnzy=>u~3$ZLH0sGZ3YpH%teDpmi2`Q)9q_{Y?eM5SxqfL z*;^y({@3?wf^OX!xenmh8? z9xKv9)iQw;yx&1uNAiEwP7^~y*dHa zi)wxu)xk>u4vSazBR8gl@w1FIdRbtbRA=4TzvM_Zbvtm{w9tr3f6!@E7E^Wx*3Xp> z0F2jj=8hZCX)wS{6dwxWrYe1`=nyQQF-I??;?p*%W@L-+pGGF1-QI5Kqcd;`^uTsJ zoPuA9jd3&t)rp?05|2B}7y>3wyCz7UwZ|yvk4(?h5m)7hpIN~??ex};uR>Pp^skv6 z#~tDyuWwKFQVSKvg_B~&Sl>cXx%#zOvvs0gj&zDXOTzS+fH6Hboy_^x-y6VVg*Wuj zuHGSFH`Mkc&VGy_A5m05oc`zW;rJRK<0T}KKme#FcDN(?> zGr>L@CF^VAM;eAfEOA9m3KKP8h{b}WvlfqWX;lZJcXDCeD&zJ=0ly8hwq$)DmdGOc z1LpWLuTkxTCbwFrG*9txW0?143dc~HCWCpupF%K_XjgbUPNs5z#egprnM2i?uW_@^ zn5*~Ujmjt%ZQQBnu=DRs`lfrTn$LcG&*b2y8WNERLG9;rZo9YKTB|%_XuO^|+{0zJ zEO;izUc;wW=#N3fXR_ue#2G2uGsaDtTmq~NgR*^M#p;R{(PuYSA%qd# z{>q;L94pbK^a;p1zSt;f3EvFR@)HP6%lzuP=?~w@Nx}k4r+mo>jL*1Q8KQRlfqw{f zYn{&LxeSUS-=jkYMX5lM2~~VhqZaw4HM@DSfkKSx9~C|)D0^WOM|Fz%>&xaBt#S1a zVG zo%p7mHLVdvAzFW|(ul(HcyN1+XoMNztslewlTkmo=(&rY+K0}jq>KJQVZkgJ!_bHx z8XWv#A3QSd^V)wdQrdu4d|_u%4d1J(@T<~CFno^0uiN3bS}ukI%MRhI^E<)FcCErq zlLJ~bD{hQVaMiJ$CZ;7--=SSyU0wrKXF9AJ#(j~u(Y;%Lek2v#Hi?*N_ruYivnM`4 z2?M!A_{HIoJID1WZy!mA7T$h-*{fBpd02sQD)(!at?DH4QMH(ZJpKU>g||gn5&vP& z83WjoKd_OMWAe_1H=tf4OGMy)~0R8mXG47Y`3){MgiAH@iVys((3iDcB=ar=D6~j(2bxlA0*m*gH z1jG-2e$kT6@5imS71mza*~?3XpM{p_qC69*W1`PP|@WA*x&B};VA)6#&)8e^o&+-Ja628)_6=2{A> z91`3z7pSi^xkVe$x7I&iYh8D63aATi8IV!T&$h3yk6XfN=L}rdQsHU9%+uBG%;8V` z1yI4FBInz!9=b_ebJVy(G?rsKWfFX{%ccG4-`OW~VPqtpL%%@owXr z?klm)rm9FQOc}7|rK!of_E5eb!8{()SOW)7^EV-CJe5gJG|2y*FSv3$udv$4FVXBI z<3)A<+L9~Jc#a0g0Lud7WCsL4693?On+B1qiGt?#WmEM1E7^N=UD4SwKvLq54KIrL zoUkFf^(yP|3%T);h|KAB^A$Z>xMU@$uqz~E_KiR)quN+-4g_B8D`Rp)n|PC)c=9If z5|tyORi75&7430KyvW<}mzinA)Z2vTb9(6ruRB<&(@qhPxXYY7Woh`PX8j3|Uk3b; z907LV#R){A9J%=hn2F2Z0NB|+fRb;i5$PB+{E$7Zk04;X{X~Z8f}YbnS`o2~%&5S5BKE3fH<(zVp#);~9F5D~`w)xn+?c>aM_7T~L5NYH_% z_QvALTU6Hk|^%d(Mxi?^s($i|gVd4YfoP;=K`1tcz z!pDMk}?lx)9J>&;OOXd#cO8IP4i$<{#%qm0g^*R(4asx02>>sxl^)$JQmz($^lM z?;9rfnq^>2%%Q}x^Kb!E5i;Opq4VW515%te-p4$kP>{aqj>jaxG>ckN!F%u#8FmQp`bent!KXD?B=j`T~f}0$c2K z=#UEG7+T89^$^-_1FC4B>_jV`KFC!Y@80AkYKz{WEE@S<&S@4q{?hD|7H*$SV>(7~ zRuSQ~c)m9P(lO{1>w*}|il%y9re^cAYY;j-^_O+B?lfix%5hq6U0XJ)L>3zep6CN< zaD^w~+J%cByq@?|rfGtlQAHk~VCx^!Z=yk~qzs?bf7A{BYId>vd$cKCNVU8PMUtBH zeJz<_*q*8Xn4YFAB^gV=e?{qjsK}$dp60zLR_-&Rja#zGxHjyF@%W`zI~mck&eV(0 z?TeEcr}*gBUo3GNC2R8s^VpKbf(dTTKu9}x_;ns6#IX`M0U~O4=>0pXW}o)4IBu}P z0HF~0?EvIZ0Jp`&IQZVwS8^oM)4zg@pD*+|m%q8aN}{0jJb#pfBETI4M!y2$RJN^& zUr;sYZK7PNbIE^}y=*%$mFA!P;TZqTKGo$joYs2#%+*PwN*0({Kd#tF_~P9AwlTuu z^Ob4JgxMeDyL3yw_gYGwF@t-urf{_S7Iz8l2+E~Yw8za=#@dq8X^H-%=~a0x^&6T- zA0LYiMyeR zo~0kZ#z+MutgJuP1_VqgyyO4*{@De}w@Z($T_W?rdz4Eu+MlcUQ?6>UXJO0wkbdA$#V^(e*u3-^`8jvzYDyD0{3+atsx|441AVZ6`X>8*Hsf!) zD+!5n$5RxpdsI`*z-!~fMwrPR^Z2z%w2yJ9nM;u`#`(xpx*I0ZrLSZzXhfD27lVQJN)(^^c1)39k9=px9uIb&trr+#M}_$aQbfH zn)P`r>0k4~u(xf(1)VKm%NSR1(=Fo$7dLQ29k9Rh*zi+Wdp2!E`+552h3BGc?7c$3 zP_d^HH?B!7J#BI^In%;UWgxRFgoWNXP8DXn zr>{YtRcq+><6|SFGJmAEsSI!}c8VWb9AX1uwFsfae86@`@SdHW;D2BI;@E0$OIYW8 zn9T6xO#z7Cot!T~bjD z+^l2xRNFM1;a+o|ySbVbE9ZREY*{YkT41TZQ2T74c!_dB#0(L;ho9)r78}0dj7mNU z5Y3cInA^XjWGbO*j@6pe(^Jm2A@G;YpCf+BLRCX7Rf9cLWz$q)D!4~j-+yJHVjFOA zq+OPmzP(YB$aOV^dt2^hOO=7E`%{JNa{6cDgMZf)=Z{ChhIxBDvO$P+S8z)y{0Ca` z_dTbNUh?}c500TfW_3^1-g=B*r(NA}) z$F*KY-Euu^J!^fCZvJoP-@?|0c7~>g=Kg;=`#Tq3fF2PZZ65jeLHCjOGWJal-lgtjM{p~AjilH!1`pXzQev_iozKy<_ zzLmbMzP-Nly2ZNH>&!!?W5R{~x|n`6(_*^mcoX(Iy+g|bYu6^RQ$pkLI_AaZMbE|i zIrPQys?3VadZ1(Pyzo#!!n@cj@~gx{$HVhO`9;k6^m@j+_UiKLcB6Acul?r=a2~k) zQTv|w-u|8{G@j?o(xSb3Mr0K+kG>n}O39o2^jZ8O04oqK;N%wTv2m2S(?0H+Z(e0y zX5MO^w~sIkHT)yKG#)#iE1rjtfKZ?4O2CuXQ{aIQl{cO@z`caejW8^pJp%VKdhx_l zntwkhH)k`aC&xb*I!7XBS-{zIYme|M=Rm=8>ni6m=g@EAx&8J#NF0O#Vh72A^gsX* zHHaG&F{PaAn`111#;=}JIfZQfPX$7Ex5gu^_eIg|3r+(x4g+Hh@nIkF<1X3|qikZ&6%}F)nmL~EGkgi6kN_+WiQ`EJ ziRZ|#oxoqqre0w?w)sbf_HTA$+bYY2-fQP)#sqa&$o*$3%YTAzU&QMq#zuV%<~gA! zZrf3*%yHd`R^^r{#3zcs9^OT|z|=aYexH`2MJw%!6|{4DeqLZZ{il#(+u~LX%*C0J zM>#51tz#|`l+69DEPnia0?SM0{^4WTJGjilIZN}`XLbYp)#Dg4WZG^)WH-)dHr6d< zl6@`ADudwM5IWoK5; z=LQI?E9AQ4-+7PWo=3Su?U6S>@n1v5%b#hp@lTAO3vrKSM>htkkdZsw8+R&S_nrt# zEy^NQz@F$~%xqAu&23Ai$`+*+WgkmB$6T7hJF4{ND|HQ`m{DG!7Z1|D xMCX%!ylgZaeZu%cR73w~_Q%}B#KZz3xLSUJPxLMf>IjJyA3~rWW(ES{zW{eUz8nAm literal 0 HcmV?d00001 diff --git a/assets/fonts/Roboto-Light-333da16a3f3cc391d0876c6d773efc6f.ttf b/assets/fonts/Roboto-Light-333da16a3f3cc391d0876c6d773efc6f.ttf new file mode 100644 index 0000000000000000000000000000000000000000..e7307e72c5e7bced5d36c776d0986bf71b605f15 GIT binary patch literal 167000 zcmbrn2Y3`!)HgnNW_C9{yXn+a5=tPzk_3>Zf*>s*y>|$ZPy+-LdWS&hz4z{B2nYg# zg(5Y8CX|J^$~*%{=0zwdkg&y(!#Y?(Rt+;h+Qopwhcf*=IpK^CH# zH*NM-;Tw+Wf*7`15XAn?+og55nAtc*5F?y|(Cbq34xQe3ciOB8f;j%XAcQVX>yT7? zRmFlOf+%;zS^5sh>^JJ{@9i@M(Yr_x%+m*q&x%SqE^idX;MVxO>)=sCGN-(V*(ZqB zj)Fi=4Cyy^l;9-<<9d1c+;2$6#K9*@X36+0Q6LXGrw`1`Zr=BuQ8@omK`=$659&A2 z+3osJfjs^c-%m-$8$Nq{R^jtHIF3ut%*yU`{bXZ&4thiCW{e!rFZB7!EP?#6T@ZXK zX7fNNkE`k_j z1x@t@6Fw89$|i>(31-1t@Dl=sk&2(yW(%t4@UuFEBKBzJkKX*zzesQh&AK|oq;dsf z#YWxufq(-cpiwtP76trLF{w`$^~sArIehLae(D?k>Kmff5pcI=t>|c5v}7a1Mx@A4Gnv#D=BTm1+ z&_FQ<1TxCajPh5|=?6OfKxcv3 zMKx;$OR>TBaQ5(LNIR+DuOaQ_ex@vXj-1J34>(6B{Ja2kmlGm|B`T?$I*p}PkZRQq zII}@FXLeb9QkF5hf}e=N<~k}QMHC2v+@Z2r6=T zSsfw#%V9+h!Rmj-th<^lX~}ZAyr>1O0_m zNgI$xUP~L8_QT&DO6cP@1Mia8I?yv@EgekO_9DmnlFgL=_odBy;l01fF@XqUOKM6k z^FpDTkfTV6)iD&o;giIN=#3$IYlAniB?_NJg}6BpgM%2x2{Fc-O}n?XAeeagAX*E` z=niA7j>sZMjJ2SO=S_^XPJBXqd~%(X)MR^TSXgLmo%q-ob7)AIjDrxfB{Vh}AKOF1 z?6p&*S&NU{KXQKmV8=%jwk=$=X({QmDedy1L7&~tKC*4mAu?p-!p6-%S-0(Cz=aFe zpU-VLwjwus&VDxaG`0%9v_h;4@Kn4lN_n) z=nYFN4r7dCQ;*f+Qv?PTSY^Wy!*z#oR!0TQXObh%T2NK@eg$hmEzcpulgl}V7aJ3w z9L~pD)1$ zdbJYIwUSp)-?wjW^QrrG&$sl-p4j*8UK6tiI&VzhyJvoLJj^xq&Ysx+?Y`s25BRyJ zJfSEPLPep9kPL~ao|4L@w7S(1Uc?@w-0Rs4ewa1U2*yFMwk{%Y5W%Fu z-yDpeDxvCAMgCL}>6vsTPW>d2KS5a(*gYRYBDgr%nIf=QNDAiO5+0ji=DdQK1lLYU zB?%TLB2q|rYD$XAII_G;+L1L2_V3(G7WWuA_1xmgX;;SCk{0pBC1d zGo#5n8QuG5No9xbXun3;lDl|8F0HZbgMaq@ytHlCo64}0<6W<}X-;kh#%^9YW0Rct z?#_WVMzm}@bTX8+FhPDpxDTykg4{S{x6a|JCMel{-`3qU*ff@2C6!m=yLq&#m}%}S z1PNBfD>#I&E&L=9dM>z5P*7@kfGCEB1O?8Qele{=|6P6i?&(`$@)x7ThIi>uda!Mqw@5sh^TSm#jU+W|(}o_TFhoSC zAWo86LMsLcQL0XqF{v`2?xHddAt}P)jc4zq2wbaDstFpqQn)GDB1s8kC@qX4t>ttt z(lnYL`nJ;-mpVy5XAH_AW9Wh@gT~UY8k1V|T0?e~R>D%bmV89;h4#hCn3aR-Fnff+ zA#oiT8f}ZUK?B5@n+JU=*!L?vEnZ*|{3fCnQifAy~j`hgVX8?6EB1 zbplr_Av2oykjBv`b>5w^ZW4e55o(F;M5}lha*tn$Nd?4nZHx?;6s}7+$`uQ^Vu7d^i2AFgvZc$6*OKb(YSd^~y+v%TT5mCnMTCkaGsK77N(SQ$ zMm1BjR(LG2FUS=8W=t}3`<7kutMoV4qCfc4Lh&hIfvrRztHb7AgFd*tkG2LS97t>p z`dABWo?*Ze7DRXsF)FJV8zV|KYmmKmP_U&Aj?5NY2s=uZ{=R?r){oMIJGUOl+1qw( z-7fCgvUvv)`_N1DBT|o4BPL=e4d_++rTGOBUOc1s|E2#B@n1GJ1t$53=`iq1eU&>V zP-N1`^FR~9#D*a!6-XYk6j5UD0))icOv#n(;*o5cMV^G1qLz(1igOQzl`aELlo8q~ z-et>iS!d|ShD+F9zJz|fKm>|xV_?W(vle*kt{BGOWw0liXvzatCnbez9J7T)%VnIS z$e+1=w=P$60M%5ytN?C^C2b~R?@8y^<(0u6t+e)q)cVTl4ZF=*0$fcJnvBUm{ zoA>O}^xG4%KVH4=!0{v5Qkl1>bjaJdW6tQ~gKwP}T96ojZ0hj`>Bn;yytir9(zOhU zy@~m01-bTt#%QU|z5zRM^}&bjEvv&W5IyuVU`rTAQoPK*Y*haITWlDiv3%)6>Y{BH zDgFEJvt1p;tutIB#Inic<{-M5jA|-5ot2~}J(&#dhV=>q9TkM3O4*8$IG0Kc0o#ct zC;m9_XDfwR4KipU{){$Dl4A2`%ysx%3qo{MTC4?SJ%>sK{&gsq;Ap@xh)s}c$P?^g z45X;$$nsTl@s=y!K5oOAhYzlP_^WNKZFm0qP0HF%liv~_if2dCkai`)%^&IKr&g>v z`~Kz&)fzE=PRBUBOvi;VAyycqNO6@IIl#2x?v>A1lQy0}j3+=pAFdZLCFVn{zdeFbWSoB~DNqUk^=8yxjYweu$!5=>P zX3zd2%uyT2iVx<@V+Dr6Y9$IYl(_0O7~g=#;4|T|qMLgmU|5L8JuatAG^=_u`|PNS6x$W>c(4BK)@6BQ=aiH|Z{8CUU2g}LnY z_cJplucp^-xRy@InEDHu`skgH>GjheY+83B{=n?Hd-lwmvrn!!`cTzsrzao3^VNym zkE>NZH2Uxtx11~HzPFz~+_7D1Hfh(E8B^D9U@|nQEQSCPKDQs}LQVYL62t0j9Yj zEr429lMhGcewaS;-1IxosDthbBUNEitaMNIsmjMTEIRU@xV86?srP3u{&fZ<>BkH@ zo@S-XBxYB8z7~Sl1u7JU6=bXQrL+rDXuv;l(4+#vv&bA!AhPEWJY$#!|I(Mv52ZKQztnJ; zt9TfrIal~e`b=&Jnro}Xa&H908kV@p!dy>>DalAFVRVvY=}lKXsg3iHxQry_(AoXy ztXxpqvE(1A733lUa?wihR^`H*c`WLM4E9QM?F!0!5Dvp>z%DRTI(v%IG?LvqFSgW( znnU5rs|FQjhh&Ql&en^ZH6eSMI6?es_x^MJ=Z&6IEg`;o_4tHpWcm2TpS17SXj1;X zaof*)wm;+KG*CZN1plEbdzDb3b+JXj{z z7CSRMij_aXCbN}WU+0d)yRlmfDu^DRt{T>|?hwOm>^a161Av_36PP=a+DOD0!zD9Y z(WyZ~bc`&DK{7iCXUif^3=Rs4v)7h`2r~_-!EDWo=C4fs4~*&eZb-(5dGmt;C!ES` zlhwxVlh!3?jdw8JNw?gf2R@D1KpNiZsWgbJcPae^U46*U?`tykuM*OKZ}D;8?k$gX zt^Q_@k=t5}WLfc?!973z4I$ww_=v}g!rX#Y{P9#zp1i}PF z<>c|8CIH$iN*JYx(J_o3_}(@|cvt{Il~sdXFNK&9gaf{aBqb?=^ePDStU1LiFoK;3 zmeT5ZmjMBtUs|)4-4@N`Pj1~!aLDa>rDzpZMIR*xAO4Xue9F3izoE;?=#9q;*3(&} z-P%3x70}Bj$44hd?}{t`@sy8lizi(Bx8%>+Fhp7~D{mIo^$ggkQ&@fFg*OzbLPZdx zj>HgMDaG=B)`B1%ozTI$ND6prmLYy(LUNrTtTH(~7JL$Ia44DR$*0QDUw+>D?_4ss zWamQ?7?xXut~>kahfhBGG@P9M@!3{V<q=j_y5&f-oqvhD#NDp0?0Oe)y(N&F8<3kA zp^xGl8&@hf1}m<~jS?tvD-I+5lKAKWY>cnOCN?I?DEyU}P{l#0V8WS9#Y0;$q2j6w z(IAGtZZx5f_w1w3ml9H&h~JT*vN`3h?z;5*=o|AkjcFN2CyL^M9)q`&#E*}XB+}=H z&o=L%-_U>d?<~oy9i2$B)7>0=%xChQSInfrx7AiQ*Egke>}}@D+#7T3jXBmi6x@$w z9K}Mm+2zN%&eJ(LQj=WF;9Dk#YmB%E*S`UAYK!Yj@D})q4e>thlKfP5(l#dtU&XJQ z!^?XgF9U^|iiyYP)gK!WIrn^_67>rLl4ZB4kv=twnJT@O`}niZALQ&@wctGj5S--d z^KZ^=+;nuKCg(qK?zC3C{g`9&3NhpH2Mxk;@-?N**~gT!zy(k-Yg*XCxdKH@VnyL{ zPLB9+ool1GWTklHg6nS(@mR9CvYofw^CN)*6C!~g#=m?uy;Rq%Rf&nPozxMpIuGZj zW3i~`G28JfK4!AUG#Z=#9kTP*yEi40+ECXl^nInLv`xzbzj%HI^nm>k8?^0&~ildzf@HV=gC zwNPa4sX{S1g54U{u&PVBg5|7_t3#3LmNVpwsn!^#SA*iBWW*GCqz6!(#dd7+uL~&M zQ$o)!BqC`?NS>7Q`_+q2pIo{2P@MM}E!s{pNy^G|#J*!Iz4eKeSV&cRlRjHbUGy3P zKfZ3euoT?#L1ryY@d*iK>H`?T&rLOFhsG$)kyCsn?o1>7q1VDNWjfUa;=wSe8|Fe+ zQmt)5G_mK9C*Ko`>j_;%#vM9wd_SEbws*Z}a{PSp^0(=GSFbuGGVC%DIObFpJ`X)##{hi}Zla{LyWVC3n~%jOdAPV~!n zI_;#cMFgzfwU>wsbAB(n@bvMOqF=>%Mf76YPpt_F-%hHZ*~ZwronARljO?un_F^1| zVcU-Ll@cxtP{Jb0F#d*F9VRz_Uuj~vAZj5E#md(?@U5QNiZ5R#%<_d9hRBT$XSxOc zKXId+Xz>>9mP>B`Ld@IelZ+Y8f62skCq6BpJH+0uttQ9)i+NvEajozd&pGne?!_wV z3Te&&{}Y74ig)EIrK`vg0;+)JT!CpMu>wmHDFNO*oCz4Nk5(dAq>EB0G5otoqZL## z5MC!fE&;wK=Hx%AB4)g__?Gl!FKv*%oCJTdEtB|=D2GifZTT4r3<@|VR z?-eq5;=(DdhV9>VZPfXk6^B=3kIWlBc5~+bq7i4Ou2onJ9~oeIIcCKoB&o{5fH+FE zF47lz`IdR)nh7<`<#OtuLj&Z#rsgk>nVKV*NrbK7**2WjCNxw`?&v*3(*`?YU|^|H z2{5q2#{7`D1em&zu&9(|o4k!o{bkjU^pWY^>m>aeRC>i_UyUpNMtbB-IC}BY5$P*j zJrZNWM~GzCQpaRix(2hNT~;CP2u!8$RF32A;z%!Va$%0^{&?5@*@cmjvhzhdoRbI$ z?west5L0(+e8brd>eR3_HKWBvT8nDox;{)^%<7Xre}cZ?RJch=ut4+M$(ib@q`^p1 zH>D=AOr{IQ(<7DQ=)v(cZ!(#8|CU^<`07^=Re^j`AApAuY{i{|y;z0qiG2&5{|C36x8)KP=C$UoQyHz`3kBPV76*s#E&$wR)~co zX$12H)!3QX>UaCQJ04__YTZfIEK`u{ocN`y3eHGM)=6U!o07Cl5@e7!5o*$CxPD$&t+ai}8yHSeAi6 zt>@&nJgYlb39niWUv-3gEMz#8b2x%a9+qmrt91E`;T+=l@h690Qel!`V?VJM&Z2jv zmEdp|ImmScBS(k;n$E381xrD!M|^;i5Iuo;<$B!iju9sCa8bA|Bs`Qy4r9$c!P`8@ z)cBLk^&b(-qucrOCT-7t^4{TVQZ)6-K3VOHLl@@Q2fowo;H-^DbFyYl?M|khJiL_( z8`}DH&g%EOIAvsyY-U~8LE}o`ULdk*gOy-4n-;917cW(qILg%6pv5&fPqg__On!WU z`|u|=Bt0VgkAEHl-JD=yehB>YAX@^D_F8N#P9#hBPTtQljhZm#-sx|%cdi<{xk~p3 z%Xf)EF8bx}odb!4-*q7?XiqCjX9Zd^WRa|@|2&v z#(M3Vvh~oR_6tW1*w&L&ei66%`RvaROj-Z%M(ov}8K+T0Al=}cE(;tJQI6-YBCL>C z0ns84KVK%9e^QF{O%fwk@!?M|v)Pc+L@M20I~+_#c>wcOc_l@v4SuURg!&;?yTA^COAp817e>S!eXE@B3Q0I{)9Mh{KjbS4%%;m$KhCGgB0(m zXvQ!^ML5CYlLggfKuL@yQ1Zj^f3<>Wzu0>B~<+q5RZX+6U)cBH? zqkp~Crrs&Zd3Klo`cIj@Y>WXFm_vb!)0IFT#Zkw=MI_3-+PP$F1LEdbJJ3eQGN-!4 zxI<*+Nrr(UA{ShQ5+)YqiA}~@hP0!kfn_!@plI74hYsyo)$}#*+OG{5K}1TWTm5J3 zRBQwNaz@YY@2t#ahPIbJ2j5vG&TJ(vp)%t;V!K+l!aycpni`qhSF*ZC!xb4rnI?%v zt&~7t9=w9*q(hlXDF2YDE`9O_tJjc3RmCdR?X{zff<@JmnS`meI*>58hqNsGToC!9XCoCPW`*FW1R|(h&`F?uE) zU>2NJ-X*JOT^Ea1?ZuqLfD&W0N~1)Z1eMmNx|okC6c`~?g9R0FP>~Igcr$yinE4Y+s_534qb!3#;Sj_q{=i~05JofVO>|O8 zyyXUkBZ$zf{>S4;PfIq*SujoU$tKfgkKUS)E!TJS?|*$zCY|Z3A%3P7j$-FbLHu zz%Dc7BiBIl~F^|d~|K*1dChl4JW#8(XhOXMnZNsi*FXF}cwah47 ze|%gF>NoT;Tf49d;xddQKxnS`1~UBi3SDSWL8XWlOcyPsnM_Ot!>m|}Oh~xaVayH} z7spo{wJv(XII++5+7+bFEt4j=%E`fKInqioBz*^}0i;BFZW)5f%)y6e#5K9h%bY~QVA$MGHK zZGS&+=(rJ#x8Wtfif^0JfQ{kmjnzY}SV&j3#fo*1 zstjTZ5dLf|f-^|`hC?jklNm&^kY0y#IU%L)gG0~ zz_ZjbvE1?HroXj}FV(A+0Dri<9E~z^kT@@X{DJ(#~ZNbpufSAL2$fHEyIn z`ST|b;Eo%yXaUNH@n9#Lf+C1Kn2_?}-{jIWgXu$mfBJYZJ(~lSA~kSc95YmERD4$I z{#pa)E|t=n;Dmyg#;B1-DW^v};7VRIn;L>s1RphOq(1#GQ6ut%#4Vyr$drYs!MwDP zOr}c~iZjUjuHRg@$pxAsCWyg|cTu#JYfQ0VD2&urM!%eF5t{w z&|!=&Y1h;Q(%_u_-$s`niKmtv6ridL;dEJJlWW_OQ&^b>5}cN>(9oc8ankO+Km9~@?gJX^wG1j`*!{G)85_mM0*l<&(*YP@lDd8b(;N$55A?v>Ga>PkKeD&uD*oql7Ezf z!K?a8ko)R^;8hW-0Ue=5j!;8QE-MjyNu&8Ob;dAs4Fn6!p%yh*!xFt@g47-seGsKt z&K~cvUYD0=AN%~q2ifb+o3$Xyf-ws|wLbVEoK*ZJ?i&&te)o>``ufsX3K4oCTKg6} zqiA8YQr=BBLotRL#?l~Nq>0xsMjAt`#!^ZL+M~RR5S(!M7qNQ4z#=7F{TZqk4QP`D zhgn!`*PfD^k|4oqxr@2nW*K#_WVyHahWLxKTz`An5YhX@QSYyh9l4RSa@wTj-lVHI zi#|HoJw4sOcguH3SUQP0|J#j%Im-*y(QncjQp+s)!PG~7ig;XA;dMo-mWWwlhEvPi zY1YGGEn*d0fLpo@9gAXBHrg|$uPg~zCUsI1Qo~cC!c)VsrqMQHA-sBl7w@3*(ku^e zdw%r&9qT%6%AK{l)2d-E0@==$E?dtdI^-bKur|71blmKZ@O&VE_J0RNYk#Dx{1J>qOhq{lUXnx ztvi8R3FLGY6OpQ6xdA9Jw7w}jJDdI?Nluq^UVM&U52iM`59BmOXeP{5D!tX5&1EI4 zqY*rK*4hD)tpuO35<6AnO1w?1u^|wqND+3@nr)^V4|W?jtaDEy-n{p3_USD=%l=iEuY-Ky!8EfW7B%Ku5KT) z=&^rRw+?+<*DdcqwsZgSb-~Sfra18dI@7%2)mG&FEGQ5u?UeN6{ekEk4B~x6TCZd! z-ZI8?p<4%%BdRVmW$_})8a1Na&(OTX(fjrs^A#afMhMijg9+4_=9N*w4L^3I#8#7vAvWv5tZOoe;wT2=yE) z5o&0p4yrCaJ#mgiqW&8cr=R56ptvXGg&EwRanNc7N@va6kJ-=ss~`$tLDoda$Sn>smTSf=yiZ6tp8E)uhAC;fWYKKj+>PRB_rGL^JEUPuq0Jwp#4 z5&Nv(`uXAR2XAg!eQ)c+9<#R%8~?6@UMf_G{g6Ux6&#^O2QQHB=g!fc7tYY#*RGPT zY~^ea4~hSPm*wFtO;-ZBx5P}UR&O2#wuLdlm*c}2m(T$Qn+u2{hH(_=EAzJj1j4KZ zfga1E1o<-9-o)Q8sC-ajP=lb>K|O=ay;#bug6CVT6g%2;2$rc;sVoX>Osj+a{IXic0S*j$DjhyzQ_)#@`#<+na-fh^ack>qJ!87t_ z(=$E0v~Q5Od+B6Wft@2Pmzqc$gm|(v@HfFimXf9=ERvZ^rc1jeFVqa7qYK02J-v)x zLva>R5ld|C&UUqy63KK@9u?2BFc=c~2e{}DKGqgq6E-UK8#H9>s?VxW*<%_0810r! zZXGnq)O2fs%_BU_V3hG3Dizgww~9$hl=>sPco zsuww;tOc)mq!*gdBX3c0Vk)FD$|H;>=ty2d3KbL*79JW4n-m|Lz;F_@O6*`&@LC-| zLJy|`Fh`O`t?q7wLcM;ZCu!*u9x@VfL7g49Bx0f%dmsuZB6{n z(cM=ee&-I+O9cfaS#gk>g$4BLQ@TrPo-=7yld_qsn}%ECmUe5|qhqs9Z=+6zK&vzs zpP4pe^_NkI%u3#Q)0?hm8N!2^5udqMh{H{d`$4wzrO(Cjm=$j!L};W)pyC6aYS!UZ{4DS?0C1xJF!OItKgR zMe)+v_W-Qy08IvYj4=pt=`hcz9q2!k1Rg*S0G#%Gp!fzb_e2TdA9B8<$5rhHK^Gb; zKMoY5#dWSTq{*=MXW_H4ahq;$XDrGcJm}rjYoM^XxJX<8 z3gd+dN)*qYs0hSBVHuEH1{5l!3?nHQN#qD+Bq5_6AkLyA&WCW45#p$%dpD#K$1N2q z@xr7sfMcEyF#@~3<&0TY*8IW>x>(r8ZGO>i=G(b_pU&-D_Z_-n!qla$-`cuy`m}As zhIZ`I>&n}52eCziMlD;`@0*)FxNh%=s>=rFP8rgued{+CG5O6!Hk;*1BxVy?{+}t) z!a!u|5N$&GK|)We#CuZh$i2U|x8gAG>x1YYoX4~k{oIY}1bBxNSgo4K|^cBZBO&7)l=$n{+_3y+P> zlEmV(YY$FB@#R;H(heA2G?1h(Y7Hb^-6xN`86=IztObUo5cRi#$!a3qCho~t`OSFZ z^WdK7m34E$&5{@GaNccV4rwDrp{gWSk-QnPs(f(wDgvX?`BqH1K>`_)*p|pzSQEv| zu{CSHm6XIN>`&W}d}J2_gq{c;c;b>;Jb`09Eg^Bl586CYka`tHnQNM%X$dcoVbu#L zn6j8Xpa!z$?>R&!QI(wpRb{3=_uH{?uBF@PF5N0L30>JUqwVXd^%K3OfS$?X8d79> z4^gRC^^<9f5;ZKm$A|Tu8l*PXW8@ztx;K2>ieiU zY@~6t-doN3;3FRA#)bekj}d03C&F{c8Om0Kdo_4k{ozH=DOkF!z_E0hk}*1cz}T$w zW95uBr%r9`z2?J{n@nS7&&=*KcGlc+%<7I6wn8;IV0D)Tq3?+-BM3q8_XX1g(?KDD zoGtlFT#e`DBuwxk7BNcv2s33>rxJLU;G_*`KzYh4=l}?j7%5BiO8w+{y zR%8@Kb! z`2!;_%xRw4jHGFp8Jnem>5*8QfrwBpRDyUvKicFRX16WWd?mvQ;0Ia|P_d~s>j?Ac z5D{Anx=4%SqkhQ;h3!5lY}ciASxJH3q6P79V*N);ti1+yjWweNN2A%s66w5ZbM60e zsWe*Z>uMP;wsE~*R{ZeA0ntB}%%UiY9_;#duxl$>A~nP+>Lm=6&PrFHcj~g8R8Vuy z#Tv!+vczNiKHlsN;0C5Un3rxUQY<`t$y_#HIHIY9;4hrjwvt}(IrpTI&ZJ8k-9rYa z(Y18lAygRmZb!HB2f8ecWYFc%<&#Pd$S+La@lKyAs8o>E>LldJu!ja7ORJpzVAi1Yc}Gy;yZheq;X_u;CQiS1#q0^EEn=^_YX}z_l!pyaY-UwoS(6Nza41rph%j3OB+89313kewO0mW#pe>_W{^z5g-Jl1v zvq{qpr{7;?a?ql|Cv%Q{C({#nJ>0cuABor}Go8N*wNow3UeM(2lvv(XE1>D{zjChS zh?PK7D8!_oyocjTOej~t2uA7p*m=U5hY>?V0^B(&;0s*r@d3NZu+8mX-f-h zMh&V^XKMe3qi4@))AwG-UK8jQh8{YV{Azv|@yBZLd>aer6{*SVZkbLhVv_xuTjKam zCYo;I8E@@CGjE&&eAzQ=ELG%?&flz-FCeaKNQWyS{_F##ocgr}{K?TEsUXI~CAPp5 zJcmjZ^-E9?y5I)GwKY9oLVb>-F4i9kiL`>cl+LP@@nHy+}MmEqA z5~da!)rps%E;!eq!Gi^RKKS}F379f@`pyZz9GO&5xxvY-PrnmgzNhyeTadJ*`0pOa ztCtA#_Ir~V()5{y!D7yqkj#GlGxjXra7gMpbNTe4{)-ET4;wi8>Fr?^KKp)rmze{5 zHX(`c4h`uyN?QHt?#Q)=Ap0vIQIY1VsAWrJIXJabJHYCQar2%-Rjs^1i^wVgWq7Si zK#~LKQ&&aB@T;DSa6OE>3xx$ku9#o}B%xR=EIgh^3L$0)5=z*4h(#9EAYrmp?RWF2 z3zMgRp8NRRzyq&U-Tj^Y%DgKHf9(3$j&#exky{RR-!y39d#3OgG=0RTl7%_n&;5Y} zw0X05_ky>l4tsM*i@b?%J0I2S(D(YWA%_N`7F7~j2-oEr@-n>g!&>j4Sk$$Sm>w`J z9<3O)i0wx_Nn3_zeHl8sRqk##2M0#{6@q?&rEiC5udvEl&`(2cHtmI6!&z4P(e;sv z;@ntnW)^9o!a0|jJtIIa3EfJv{5Wjq*F`d-3^Onze!9&f>ms&hr7QNm% zx5}1_&3|7#;=_hg*=ofXeTN)r?0gvW?zmSQMI%oN33PxjM^(H1iKytvWIYyXA!~;Sp-G)XVpte zAb(;ZdDIUh1_{-qOByiMq?>ifB{uL#z^sbQ$g(O;$SHRCvLhmoggCF!l&2k33p81+ zCiaoW_+=(ALu%d?!C+0<%% zln}lL2J$9~HH1agkWnZMv_@LtI${m7l2{L3cOZ6v{vi`;8sVhqG%bdV&A(ux5s*iK zkEi$Ddz8Sovb;X;IAn`a{&LBZ9P}N|n}^QB9A`7BiSxAFzi%;RO^CACm$an{Y3mwu zkJ;DVc!)yQ)L7m1vbEvH(oUkXQ5D60wH5e*{mM`1ua4ZXI1Tx7!<3go3A4z+6KZk` zN1CU=J1a~yH^7s=+Eei%?w|?Vi3Q_BwgIuK?eiv1oK21|rBPzgYVw3y<@z*=+;cs3 z-DH%ZctN5Z+4NE=!LLm!s= zs02{KqB)ji_}Zi)`>56p5n64 z{d&#FltM~3#Qh(sg2v`FfD*RHAW02nH8m;)%?=&PE-cKJdcM`;EoX^%9Zw=_G!+ih zR`M_4NC+YjW*$XTNibkMu1_^Cl}5K%?!-{m!6}6n(!)L_KwwRxN-%!{Ri4Vnl;mW> z=8o9pU$!o;pXy&drE4GOC+LW6h*pUg`wD{xTc?iiEZ;A#hz7+-`8!7M2GkRZ_-Oy`QKOtM*t%f)@R5b> z0}~FPbDcs|pA&|&tlNeegwYTnGvGQ+zcX-Q^(X-)NR8(O!95Ftd#2wa2z# zJZV=YmQJt)$f*5 zzgwwDsiM9DPS9gpr1_IP%;u33E*>1gYW4AIB^5`z`Xb0EbEpPpdr!PGf6?7W4ZfIs z=0}%&cJ1}Cb!I)A`%V3ZcTgMoclOz>o6nr1r_ah8rymU#r|uT3Z*b0CFl+MCfA93I zaO}?s@62DEJAFPYmOS(P`PJ}G{`uz$IeTGK#ieY=Oehm^5bEmv1v|pH%$VrZucl5) zMG50G?2}>#Ix8)Yx9!@ttVs)N?bQAm^2r^Cg8TYU9$nl7GMroTt0@XH914UyNDcGhbXiTM<-i_*cj{`7>y&AOG~I7 zg#NCOAn_*sg?=bkp?^Pl^j^Y#Y%dc>+O45ax1FWF&W_Fy@(!s!;Z5R0s%7__O;^&7 za?j9@9YaadzsWz-zo1b{&d^rkUT`f+$WlUdwa7d>rg6>C$`Xk{QlzF{c=U=vuSl!D zddPuc=qi_(Z9oDuu!yD7bhrds1~qs(1;g7EQauw9Y8Sgc+mc^5HK0<1u6+v&yS=lZ zNsGYhP5P%llWrAXg`ODzB`MeHKVyIQcTgve&IQcUOw7`5a|KwI1fjPg^WIm6nd96O z#dWx*&`S-B)t{SP`0e4IP$WZ6I_<#Vv!NS7jaRUJ8F(*p7|T%GS%m%NKJPzSpZ6~6 zTuAbWw0q8%t%eU9IVG=y^zQFPtlgV6C9`(#9i;lF+s=~OyN2{J%~-YVROp5;{1Ca( zHGR9glDh$2Zk7*1n^wXYL)7eM2n)36q+XjPUY5dqVYAFJq!|VIx-l9nOxPH8UYoYo zJtI?nHvI}`ZW{9fuFvY;X2{TCX%hT^+w`vqpKagr*@dm}0a&T?V$0we%hm?XDyRuBBAc7`EP4( z_K;y)cfHh~d&7Iqy0gbEuC_0?>4H|&f6BMzb)a>aV%5>gEttl0L!GK7WL!^|g5S_! zE^(~D%R{(gR^LT4kIuYdr9i4w(5@>F4Mi+9S-w3ZFH6)_iB5{N%Imf-HaRyM%0*|H z+gBOP)_flNI;z4b`(w8Zy{6>6*64o~oOubr*htmQi|VENSF78#4_Y-c_Z(7}<1(zl zo~V&6f^B0RTj{Up*fLy;$FrGy2Jp;@1Ca!%MSZDBsH#PgOw^lhXjG#%VIehlNFfpnRz>B;`K>qe-vG3){kGCY@bcUsmFp{Q+La4=l-?4?#jRO{=MC= z!qLCpNbRt|DV^V30j32rzFB@KUNk*NbRGMCA%pJm`~i`nSw#K_0BT7(5@?CEBw3_h ziezEa3!a6>!h23mwTl-=?%tL8du#cb&-?HDm*Bkl^0(qi(-Ri8XXjM{WztJc&mRdS zk%$K3G=j|XdkkcIu}x%C?V;FZI@wNk?jQbZ=U<1LYJBWRi;3Bn!< zy7o-NxtE*5Nj=m1!0kaS_gjEnNwiv+(##5v8ZJ+a=lwj1roo=5{W$q3`P*CzyH2FGh*k6HgD9f)%2~T zT5p=hwi_{`U0Oy)TJ1MpwN~F@7aqJyPdJJvZDa9W6dfRs0bJN=mt<^EQV`8^LoSqp4qu((*_Nz)Og)=x$n3Ujp|B0vtz2pyiu3o?rgb8@-~$KZ(4*VETE+X zinu-m_f=_HW~$*~1Fj&{#mQxqT9CuFs5xA|6MU>VaWVa6V#>UqGEF7ei6n_$!<9DB zd~&5^59TyPcvDxEz;?wCbx-gSL9$9wlGKYi>;8z5d#05QJ=k-Iop;?KXk6 zW8TocN#m|vC--<;Z3p|Y;X54~yw)zQL1g91kx^BvLh7COUXfl&UX8udyiC2I_js36 z0D_Gk2C#lkT`m@sq9~f_v{W^VZ*k>!X_wj4GH*kVPDH*w$&ygBdhIs7)86P{X(Wiw zB>4gT%=9~-_cu5n8o$H`zT%FsPW2=>cx2PKI zEDiktZRvI>0X4h`Mow_~=SU;?)+E>wL&P}*Qy-iSO$kCx^?U|sq@7P6n^DaCRuVWT zw)bN@{CSI=UtAW`X;bMgEyH&9!~ank2`@<)4@~m_o{a=u6)P;&?bicE@qgO22fYnk zevzTyVPch}27%wLHSjiU$;018A+190UKbhbwexr#Th;b$px zG+f}ZqT7q&AYPl4Qrs>Cb7@DZXqfsc45lG0G7K?VeOMYXX{c1F8emzSD*_)F=T{>6 zPeD8GrC~M8yiEaHB-KfYL!W}pmV$IJqVeH2vso&)m-sASLjT&IyPy8ObSd%OoBP$D z&z;{ryLIz#`I$8LFM8##`O@jrc(rEr^ZC*VddIm*ene(Ir0;zEi!Ak(**+CH7&DW) z2mzSV-Dkp>Zcs~xbUts}*G8{ZTLF*qS_8qD-v)5RV8hsCpmPTy=@5a%U*iRJ4U+Z#DeTfDQFO3 z`|gYIAcDdnPtj`I2#a&^9Wq(hh4|Z={o?%-C%-+gnbsmvGgr;|Ys1Fpv)4=|(KLAi z_N;$6{V}OXzkNFDS9&^SLV7>a;{4bTNc4tH^xn~|&*+ifyvJf>$s?KNx>+oAiW05H zLZew5i#j^)uxp-}!Bx_Pkgu20U^Lc*Zo3}EH-rhO62_lc2t(&3vE)g#!7*kVpJ<^w z3_l@(ZB2)UJa;ZA9;j2gP6mcnuvMwowpYN29}a)*L++jWJ|n=DyM5K2UWuEAuh=cu zcV0j2)hoQmuruh%YD~OIt;@$;XD)sJlx&_n?;lvO0y-LdgtFLKRUtzW)V`837_b|B z8_XGZrI|-qMPPltjw(s|IuXSjUxf`YQ2h|=OK&+4&^1~PchRG}>XB9bp>tqe}v z+!YO||4p_>iSHc!Zb+ZGEB?AX=tzU?ksp(#o2O6OU;S{#7R9l8X*W{M^y$sLgLYI; zIJ@x7_e~p6lj+E)QzPiCiR+d5&Py#i?OUE-K$wp$B+`D;C-AX-H0xoQMXpA*`Q?g^ zMdiKCEMtikgror5E(*J8NzHTV=xjPFm%J7h0U!GZ`Ud$>F6TVlts6rHJz%vX%}s=BL)8cv=|+NnoXXRle_j=zoJjT9_2ti7yPb z=^X*ERAlTWzTX*8YJ38?`_+YQTc`TuEzF!VXHn626YeayN&mXEoMdE~&yc2{^xWKQ z#}$)z&Z;+tyxwDNdZh;RhqL8ZKbc8lpGFiC|8ibc7r2_Ac=YR;uz`msT7Nyo_&OYM zg`ZKm7B4JOLUa|id|hZ9)l>k|i*amdAxVXaAq_$RNk)cD!IP2}0;4%4BtHa3vl~mU zmcfHQ4vQuL7AZpoDL(<$mu|meN1P!js6@rF<;J{Hbt`#RQ>ZmbMj48MQ&*xwTl&&=CK>;C=4B>rE%5(eLvr*Ly|=x{#b|OQj7z zWcL{rXRp|yY4aTk_4*A@s?(xn?G_XMh0Goq3_3SL`dOaJhTR|hbo=OWdDLdfup)Rd z2@53a^J>Ub@uV;WR#$EZ;@!C^EMzQVW@m`MdCbp8-y_*m@}9Lo9%}#T^Vf?H$Za;Q zbGbrU{z($%W3Bi=|FF8Qia@v$=N}dWGtqd|BB$KYX_iF;AxZ$>;SP~0rB47uxB;gE zAWmrkeFDVMh2I7|#*vbbPn0C~@sxmkyw92=b-5On0MPC5#Fn8>FJn~dJz$d6d%$3q zu{x*}4K=YiKMLMbWuKqF{QRNx@a7l4h|Yh+#Rus5edxO#wSU9v{SrX^73|CQ7OC_qo&t2~~`-?sz2wq`cj+8tXqj}`dy?d--4zw_w-iH8LhAw$ht}|#JRp{S+cx*{i@ZxA`;sUPNaX+owLZ6 zUV(|7l1$O2-`Biee6?kLd4O-b(c&l0Idb{O(LLop*x3`^r*ww24ib@wDig_FKgYzc zDiB0fO#*l#gjK&*HQ-vst9n&NDE3v?Ei?t|h9As?nlrJ9aA=A57>GAek)mpcvdnY#Czkd9@zizF} zG9_1;H2n7=M-R;2*T2@A_C^cq)K6%ZR;5MBR@Btq}lJ-c7No)!jg7S_iU0SUHKB z!YD08^9JffVq1^Vv!hJ+;i<@3Nvp%EWt#;j&&EFDl=ZD9?yMMd*o0H(}NIIAAYZ8sTg`n#MHqgr5 z2qr^Jy0nbEBA4Zg8uaPC1^`s!PB$K75^6QMl{FI>=xTLmII-d3@5XGJwc#jPvaHX5 z`fpT7dwtO3XJfJ3$~9&Bx<4*Q)5HDadUtHNHojijmL2OPht(n8*Q>4jXP$;2G9Vi^ zV2HL#d39&C@+>x~%7`KLrzr?ln4=d9L~Hd94#c87lZNQhXgqFPQ%A5zb4ZE82NinB z8T4%C#Oyosk@_XRUy4c2A!oO4I(K&4<}=cbrr#}#Uj7H$7$@Qv66d;p=J{Vm;m3Y^ zd6t9Os&Lpe?hPbu5FSLCK&xIA*Suc5PaU(#6Fd1sM5t}ji3~1R$c?6WP8I`KxJax__ zlev7sh*7GAn_@1df6mKk+LFd65 zNH=5)H4VYl1y8rLxzQZza$-W)mnh5vni_{Ku%qD3jZiA9-dtrCEq;Z6&L@U>b8%%^*_tCR z$$*Q{jpu7-!(g!^BhF?etI&wL;%f{QdpfACB(_v_tf|TIOv9uw;B7-rhRGzxZMMkc zn)eoOUijpvY_fUDiY3x7^y!|NtM_$i;mW*l#B}x@v~zB&VE+2XgU2^WbCDKoE7-d8 z$57&X({IJmqEmFYIh>?#zZ66Uk+AWIhm~AIG-8J7IPhajrK}r&Go8-oQbP>f&(aP` z)ezLYZU#}QsgxC1AO+)b@5*T5ULRXKfHkc4DmXk9t4R)OSf@fnVoHlx`cn@5A+C9H zVnl`7jk=9Wx)T=hRqc_c&EIfr?Xv%)*KoG|-`01$WZo zGtE%}R0FfOaDLD?kP1j3#V15F)u9(ybETbKPwo4=UE1?~r@F2({dPv~=j_s7oH;Ic z78lp7nbM8Re^UCT(@Cn(r&ou#zWnCFk9V*9arQVVLw`Pb<`0H-QcC_ZZGwkX4tqX? z@Lk4UA&(r>X=qdnH845TkI__?chw>jhJb?3Bs);sqDS?rp5!_t8eu!!@r7rg)fPF` zJAo~cSGz8>KlIjb#60%aaptu z@np!U!zRC;3@x{KY|CZ`5n`{^&T*vGZS zaCe&%xVz1Ees`O1p4UjPrQY5KQfe0jFYKoP?;!@=vOa3PGvQAlD_-&2U~C}lg(u4>B*n`fT`U{ zPtrRxMC?u-g^>8`~gPH?!gg^dd}qvQS01ckF+Qt&*^B2gx?EJ zJ1CV|Hhj&vDMaM4GX32vm3-v~B%!B8j(LD%6pOge5G;_g$DEno@E zB0^JcPT)>vb#utDWnX1c+(o7=V*5GLA(Fo5)bZ_-P2A*KFz|y(AAUzzT_ji%>sqsW z(cE1kU!#abe}Zv`;;uRRecrgq)`n;ZbZSF<$sbWI8_bK*)ttz&T=0-4=2A2lzc^}Y@uD5wMEWtET>wjyjVt@3 zKBBkcqwc+m)xdgE*rXe@K&hI>1nO0o=*L6#=wZ?Io-s|WGqQY^)m}6%_NDVHN4f&Y z&=p^er2qwy(g+--@A>fPPH^h!fMb(C`Wmwu=~}aE$^31YRZ(b8H<{*w;sDHRFC~ck znXC_!W&hpV!oRZcPEWDB=l~O_)|9IT1_5lHLx)g38p%~*WJ@y|Qpy&q{M$N`Q+y<+ z-^h6ZWu;2ameOmkOCkPK(rGH`2ey8eMNSfLF#s9QZ)qRuq%D~&hGQ>QcCVRGVXn$p zu30o8G!Sg41K0Acd@>9ZUs`Nu_2&6HD=Lz$O!?x$Fjg6NR^>qt*1CY(tOoPY2j+v- z^v5E?foK|QT~Ler<)Pbi_Ryg9Vma!)f?ROq>?aqNQ*W{SI`Ocp75{&NodHLPOZorn zu0V_q`)11nVMD42la&PC!Kn_CM_{x;mRdt>S)$J(n0=W6R1)};BLTxmV8g&eVOm0; z1lSE-DWqb<-7fI$a$&p=5_O)sO`I_%)#bw;3T^|5+9;@5*7>DvLck;NDJjy8v+r-b zk~8`H*}oFMMcJf0{r>6FC-3Hbwd>>;AD;c7@W_X9op~E~Ps@6L$n}p#9<17${&w>I z$dkEwhj%Spu}mCB7Gm5duzH?gR=u(B5uYK$qAlGWgDoV(M3yFU)!pwDZ+?pfkdGY$ zY?v3jO&*t%XAPKEYJ~izz z{mSioHS{m|@w+jzEuza}lM*#&fz5W%QXW=Dl_B>|HA;XlZ!t$zpjt77IBFH0aWBuy zigo*yBBjTsEDSfcL&1P07CXSWvzqf9q|Ev^m-dsj}wr?Z8%Zum* zXA5G!b(x4hggS3sD-nE%bQGLFM7NkmgY&+u7RtS6gu8pqAQT1@qG@u2)i8Uk2G@Lt znAOPl9#eeMZ8biCitEFqa6hDQ4WI3||1a-fqlqL+kXV^vE*$F!y`us~LjEx0yy}#B zmZ>&*P44BB|NIK`dEn%!i%j^$mG!>(hHRqg1zYNgFkOf=L$4})^st<4of*c=VXG!^ zhUKWOikGwgj0Z4H43F@CQd@j0M{Jt&f>l*8%gL=LF37G;w^$nBWuVYWa6qK~A1<$D zRR4Q)P;}x^!{a#_s4b0JGKoPlv z$c?8#C%{VnuLfO$LFCQlMuYBy+s|nBJc>r(NVVq<*Id<}w{*=awdWE;rzTeW;fv^2 zgfLPG<~0KhETeu7k6POUCs&(GL&<;v!fDw@Y_jUEp3ti~R@I?{aF}b)2(Vx^$jR_$ zvS$!E>IkG}fSz?<#w4yQAAXOBh0#$Guh>TnygCv1{ z0x{NnyjGIf8&d-F@rFLr4K!--`{4Z;qMB+m2kW3!h%0qU6tg;eE9`PPsHO7RjrMJ0$op_KCE zTL2R<90(clre31V&|^Q|CcrYMh`NUr|3hy?JdGkb2rh(qi0v;jVeB^xt|77@{Jbe=CPt>R1m0J90Dl7AAKh~ZtiV5!6qVw=3L8X2;(Rq;S zU-Hlo_dhJP z6Z;IAeB~sXzVw{AXwgh{D@qGWtzM!2B-dHIphH#}{{?-r*JAeBs6mN6h76QH9MF^a z(`Kk`^|5br2CKe7x8nTj3|6VW^QK)~EnUg4g0BF^)KpK&Z8#MD$v)KhFziE`WnDxQ z(ww&S@7Jl`WHWj2GGAzmkj5xi(CkAjz+@jLB-Dag2nyy_q90+nBuH0!@wmM=3JE4wx zj}3UwzK9K2td3wOSF%6UBJ?L#5{s)*>>K*iYr+!?evvlue43d`fFYlTLto#aF1yKf zV6qUssh^Bej^DtmpPO1Ps~l~pTm#%F?aEP>O}XrHV8e!rGQ}1_Ni|Bz0PdYYhLJfl z#ce8*JO35k>Y^i&s#Qx0Tr_FS*0lROj^y?0m3QapfnV)=CeB{HVA0mY&!p0cL(+QA zbZl7s_Qc5A=N9cexHxg@=mGEcLzX_y76CKq8=S4#I@CAI)lvO{={O6mLo1!2;>B+u zH{I5uG{A0mevpPXa_CH-j%OUBvTqn5n;H7O(sm|?D3VqsAozblUpXn>Fx=j zwDTaw;n9Hl17vw*djP~n?*Hok0H?O4r*C2Nm#aFd)C29DFZwoa_tfEcf_VKK~%`%@GAq;KtNGruB@rwpqamC~bFAcxl#b?EoM zj2I${r}M=k1U2woOFV~KcMWPL2FqciuSyq&d{rj0)0e0SyuGMm(QZ8X4SY$i!y_vv zPJGJtF0rfqx~SdK)^3tk;1KQkv!r?XW3Y-syzvo+Y3t!Xq%XllF*^4*t1MpLT+Dfa zP`t9d=;XYzy&wwOdC_5dHNaEKdM`+US0zw$%SZag?mTR_i!JP~;|pOw7*=M^)7Dq^ z%Ei1kPh0y5DM9Dl+_xcZ1q7^A{pRLt`nS7D1i?Fl^AUXA!3QpqGDX4kt?D?tI$fF| zUCpm7T}_5uO;!6@dV?ZE5SOYhOjfKlcxPAe^>EJJD?>88YBHS((wwd8c_4ZufUnYB z91pH#oOp;}fWdKvTuBr!J7USk=83P68}L!VEuiO!K=&16(C&s}IizAlk)okBNlCqQ zXmG2v*2DMYd9l0$!`@D7o3iVow>)m`{Ny(p^q;pTpDml;r$L?m^Hz%f2&LADW#6kI zaq3V1H2Pb;8>=>FhpRH_helsNubh80=KG7E-K`)sukeiXDD}A93NZH(>Qbj^z9ADG znmCACRvHk;@v_@s3!%+$ui^QZ!ImmXUqbBlV2yx1!$J4!@LRq@E0d4N#Fw~_NI!EO zqm-iDftiWq(O`2%L%7LlKw)WVoqfs7>!;*2ar*lA=GfWUS|5GN%Bgo&&z7r+#aw*Z za-Q$m=rhoNlasjqz{I3=5;t-{;2sA6Gx`ilv+wQfrv3TXIsl|E^(iOo)Z-q_@!Q3P zx7_N83Er>&`Pcajyk6xh2xNEc>|6U3ui?)x6|O=%3j93uB)Ajh1GuaC3YcaQAS$HW ztEN@y2mk-uR{%Z9)rgK_UVxSs=89x_><#;Y-q#PQUDWM(P;2ty>`debq;*}Dy!IpM zs+y*jnlf_ObWzifEgkr|X`TYB<|zOjqaQx5CQpHWf~Mi7x`DvaT-P?xv;Tka6nJ3$ zFL?^W+*?S~)-OH*>&)Rhz*c-+J8%-TMUpns9oH^i>nG4xuP^w2^Ao(Z#(%HxUuy$( zrXt6PCTE2Ta}=AwOYm>%B+C#clz|EV#}T7S89sm1Q6^S7UIqdvwM4&ylk|bq4FSqQFG=Kqtvo7G)F#q+=XL2rX_RRn0Jp2J)7^%#^lXbei*ZL z;<7_lHHba*Iv_pB9+|kMpZf4l+cnQ-Uf(r&&F$k!hrXpVfRCUQ_DVHq>y%4O5kb+93o2e`drJG7Sv{lZ{V%ZGA@AYgSp30474d|8Q zc*U5sNo5npsscp)!wSu7Kp?z6ig4XS-(tAWz?|U5!Z1E@;#V`Sv!ZB-a7q2kTfLB$ z#&)^eBD7ud%Oo}Lc`!9ZD0xT4m@nsLJ_b(fiXhUQ5kA{V^9~eFVALbTCSG9D%-p(a z3GUa$Py(ZZrdTDei9l^L#Ko~GlGQH%KrQM976hgM??8@Sb*TSLW46?cE4k|L>BCre z_2PtVQrlRr`WE}R%(jt>9qQ4(<3)A^B|EI(M|3KpuFz5NZE3yt1xJJ?#!QE(ySF&y za~BX*Lc~VEi&LB~QKcj31f$Gd&qND%d?DOvF#H@iB4c*|L=lla6h$5;Ixe1@fvBXh z-v~D~D1V$9k6^&H*iyZq4ddLfuztN_l^S>JpFV?TavnUG#^U8Gs)HtT=6~;MJR+4^ zx{wBR%2$%lo+SxJ)d@E^tcWFX2 zSsmhP5(|qah_hLrZu1uHXVMk5k^0TglFJ5h{-idXkTSr|uv^%S9&51M63|wsD@>bT z2&wGn^3!oGSl_M5BcR)G=B+bdyC!`%T|a)!YNPRSVt;-rBRhNe~=qhS(I+9lsO z*)O{Ij%20DLTI+<(p87rh8qmWS*@%kOEblJY_Hl*-o1EK41YR#W0$oT0=G;0I>HLWZTnqZ@`ukJXU3#d)^jUJWo98Y&L(>+BxFK;8q29f!p7sNebm!A-hb= zwL(6PuAB00ly;@)bDY*6=x0FD1EXps7IP)n5DugK75yv_mox4QcIV6KRE4@W&>DLy z)VReY^(afQ=l1#PfZF+!Pe8qR96x2zf()ptv%9SyycsFSNz|R&c}(@3Id;Tsi6qYD zf?qu5VJ1;IVTmH~n2KhiN1}!$QpYxa68eL1yb`W!N+Ea*K%*SCSx!e%4h{W*2^zXGq7*C5p-~Ka zW3enM?P^zyJT%1`P|=sGxQiiuN9Jk>Kx9ldM0$czD?u%g0V$Lq?W44Eoo@MlZ}ycMV>z&9!VW7J+j)Ok|AAT4 z{Q)a_bx+osyyMB|Zp)^Wn^!*P3ebOh0{ zUn#0}A3&9;fe7H_QHt>wuoa%RgnsvE6M|0n(yGVlcXmw@1JlG3lbkh>63@O?qv?+) z3^Wr@V_hX(krf~K9F6PJPv^f`mo~5p>oWL1XncxcRw#Qt2&kN&TxCj$2%? zZMW;VGsVGbR7QquhAlNK8et~i zI_Udlq|gSkx&Q<92e9)Cw?r5)qsN*la=JJ`?E#_ogyT(lLKCs02}9(m+1EZl(O30{ zb`(tb!?9(Inj&4arXeSx2Bs&*#?>TjiPbuPqE0?mGiIUzjA$KabXKi|Mss5{6!|2f zFk)hHF&ff}R63Nt4aTQIsdGe|Hmnqnh4O?StdZxafmVPuLTkj9s@U84cXo8PdYSr5{pnDK5DwLciYOza~CeE z>$vjysc92gR_-+;xA=$=7=nIEjD?$h zF|oO^F%4p%D6Nm#ha06`3^^BLvgm#`?knqK5aKd~V3Z;}cbF7xL+Od5s?AY7&ruy< ztR7lDwmQIAp=xzH0z_;o5*S@jDhv`E;Tfj&N+WQ(p(x&5vq1fdg?@T(SknD}uGCoH zvwfApGv8yO%i8Dcnl>4A^A9u-N@Y6tXDMr@znxXC?uYYs%b5uS%XW)d2{_j5-C$7f z6v-bDe9o*s@P6x^vwTr|83`UGYhFv)<^apURiB&7rHhVrIteO%TGv@WV4WY5miGA_ z_XFhG`StaKo9C9_V?0u_@rn{lFK78><9SlT4JFM;#xs^)5sxy-qL+HqqH7j<3-De8 zzGyDMyAAlt_x{~8E5E3ePHnR&kp0-*q1Mzmt9rQJoOW`M>O>F zD zp3jXf*??{ymn^`OIYy-$oh1%8yUTD(hjbToCc)m*mdVJo$D zR`U9U)jg%ITXoh;{L<$vZ)U4|ZL2-2aOmCxao-P0?p@d)rJ>K}Sw4m~+d@J66d5ihKGi-8kJ`a4P!M*w~tlr zW7VJ;1@FEq9iBgd4f8HD??o?&ORNRae3>D$!xw824H@aCptZ>{$!%a#1DFRNG>Ekr z>>3apuSayXf8mQ^N_2%!$_;H>FTyuCqEiL+Vn20SCM!OBwm52N|G>UQM&{3lgc3m` zqvU%YwSf&4Fsrd}mK3%BA(O*29D>L8NGF310!tF5Ja7jZQbbU%OS*|M;X9g4DSx>B zNCAOaiPr6W+PB~vgFJ6M=^Fa3~}-&NF}9*jv@-+KMZVuHtx3sk@RY3CF5y8qa*fvo%= z*j@L6Okn#_Xw)%64@J`YO{1fTK26Q!XyOp3-Qf+;GHg^JgKJKdLDd40pbXETWT8Hd z+RJDo%(Z4zv{Mlcr+CI2-=YT^R|R5=-5(rFJ9Yo3<7vkph-Kz&-8wIA_O|V_(!?q^ zS;fGSBLmgzKa{$qUh^M4CXmJapf(A5np z)ukE=#ao*l4UY*#8?FspC&x7govZ1{j4~~)`huEyj+ziad5}RlRSNqflUTqKYQ*N$HauC==4hDbXc+5gmgi_jvVqiY z-fc(ojm1-qdI0I(E^m@vB~2Y<{LqdA(S1Oas#5}k7)KsxD)JAxm)RA#&Z!5Rc<Qd)py#JzoVn|$ zWpgyib2N!{w9Ip~ds#PIdw>E@9eQ^ToY+XyJ~Xto1dsZ zpSjh$fB)V$KVkl7ZYAwlr2e#Y*XDUoU(8;&HJg=Lx{D3z(ynsEmeRw=( zEIEEiNzgL)-g-QN_qMz+SFvjL2JD_F7u^5vy|`q}#N?sAiY76Y$J{YPN9OYM!w7x> zq5tI{)nMqt;{@NR_;Ads0H^y89^d>3@u{fh!aMz+Jiotn>^O2HS(n`Z3t?b>l;3^UN&r@xc?n{BZIubusHvf{kJk ztBKvKmMrW`t*ut;KSODnu6Ac6fa1iN zX~*u4zA{U>H+s|LHAnWZ>ZC4Wy?j~B!$>rpxv?{IS%M1Q!E6j)>Yk1iWea>Cu@GrstvP~0_(0h-j8nkZ0U&@#WCWbJ$W*PB zpbj1~wb#;^_H(k5D-QXil}Et1VWqc(cZumDt?)qazMVcVy(-StxgGCK)!!>|=b=FCAtoUP>%U#QKAUT)S~Y#%biwm^n0lHW zwd}*UmV#yyFEkd~3B80NLY4p->F5;e=ojmFLo@Jnzow}ld^4V4-OQHrmVpdm)O6wo zeUNm3+qc zYhI@aE@GLYtgX83#n__ie%3=;BeI4~(K8mZ;Y2eQUCDPzqcc)J-zAMt*Da>*L0EHB zG1gJtp!yccOl{Icc$eK4(zs$cYsz+O}N;HSyL-k|JKx+W< zI=@9P>=*W3W^Y`chK2gX8hs=fyE(RYjva+?j1MXYUfXHnN zeRp~WkP^uIPy=I3!5Y{UhDa0^3x_nEOacDZAS;@)MDYd}w1I?}1XeAR{dSjmPRnXD zcfgbs8Hprd`@wR1suxJh<@1i0r0oYM$=$-tzsTb5!Karld2hMh`a;mcJjc zcZ7n0o}|4(!_mExCWDjft-3Jcr=chE>NcR_Wut~}N-=&zc9PNY0V~j8Bw<4~Z7007 zTU-!WNeLK$kfYH1N$14Af-o;doH)Ggu-@ao8<&>qG4{JtH?^6Ll{UQFuf_1UQ#y(z z)Thg4Hkn?3jq@wZ*DLp^U#l-oJ7mIR5c3mFhWf*u@yTmc-4v;~NjF_vBWTvi!S*zBt2R)ZCOb4|kv{tE(^V``k zm~G(uL&N4QlEf<~=Gw(INDo@~^hWZjaelo*_srgL*!c`g1m6l$*ILoQ1AOai@GYy@ z0Udei`CxUeM+n{$E&QrIZ@~mcJpYk8*WyF(7vZC!_sf=oU-A5AWRuk7@BdwUUV_L6 zH1-42uHyavdi98#a}pcnZZrMx+=xN1==@D7?#=tZxJo6p@<^>-(&%hbm^O?PY7Te1 zRIBbKq74P$LI3ei6o@*?%hh)5*EMRZUdanHGZ!ZJXA=;TXy~+w4)rc8<7|&dLs%*G zM{yageG}H+0DJNmGBxrr(LDDa=!dB*_{ecN*+$;9-<}3bx*Qyx!&%f5s`!W4%{o4iFKb)6|Mm|v2S`h03?dUYpnbXb|&epvfmoLf*x_DGpyMcJiY>$_ zwUbs&)P?AvHW0Rs8Yf;DZo{NVoFsrRol4Z=! z35>?np&*ls9!T;kp+R^r-`Zjn8(Xz85>gYB+=y|FmEf|&V-_I(y4CC8{y$$m@B2n# zBwiOIw0h~Qg^Ysx zmX+``R1gvn!>ss5ZGvV+>QZT7pgOk+qqwgAfQtnkv31Qu5#=!Y2DD&;&()ZEX%31Y zfNSci0vf1N$ciYiTeI*XjkSlyh^8tu_^?)7kmji%70cC>7OGc(ufJ#wL>768AUNH5 zt8R|~%gQfyEZCE){wn|p=YGnX_xwUt5(dqjd`7T z^ym|4q2ti?W)if}Xz;;>id-|EAONj7#sxzY-IW0h%jj6~IvAF5!H{hILO78AWl*UJ zhS7#s0YgJeee#D_gDlcOr}fe!pES*zAizayEiB2sk790~eP0XiAW~lgAm^j%%r_DFR;8IiIgr9+p*A_)zbFll|qORUJ?FSfS2q~-^; zeXwxIsiy_P<$jaCdaE@1V(QttlNsx??9&6Qh|4B`o1WoZ7U?9UiAX0nm#wC*<+-!b z@E7ps#VZDB~n}f?7-ngjYLf8g~ce414&1A z6Id*j^za@lEJq#17G$!W>N~vm&BzxYT3RJDmi!_Vyj7I`;$#9Thp4`Wg)Yqq~=Rbdn*S}%wvBu@FNzP*Jv_=oZ8cmhBrnG;ZniN|gB^G6( zS7ETJ6A40mh=5V=@NoCK;<3oW^-5>$$;m079k#QK_fk@Oqi!&&=#H%(r#@%?>ire- zWfrC0E*6dgLS%mehKm40ig;QK5l?OwzmiGniQedz!4sb>rcQa)s*}=74ynQhg4;4a zKlb;}QU*E)oKYZSHB<}YT$x(8XbHFI)C{UZPZ#iM zX`lMDdTjZ+K?4WRf3{R*W%HZ;bLZ}s=+#btsp0LDM}7!^tETLGHf34LS}~2ejUhhYi{xc3^ajq(hS zHD%I4RS(zqYB2GYQrf$bQ)@^KAs$NXl5aMMn$&=4(tuA2D`Bg!_rvj`_|cqOnW>|b zriL(K?3^RH~^x43SbY3k#dhyBIbmiPT6e@8VQBQ1NtOyldWuiS3!p0ryU|CUT%lC6?xTdU76PGsp*!s$s93 z27qz!mB}XaIh@62X6_RsmN;)R8>=|-;(WI1&`-Bj=xEN7Vzv`q^{=bHt$98x>Cxx6 z9}_p(q&}5?_aa>BQe*>DJqD@UPDiVs| z&#f(ZW6WwYuTAkV8rFPwVzn!G2W%Z4K69BQUOs;$J$(k-wOoA1|A$ipKOE!VyUebs zTXHn>i`@n%Ee~HJI*5XvZW70Y53yyz6+B-V-oo~rlRnX&mm0xK$b=(m8~HxoAAr8O z-k{u9a#EAq%q^z~KHR@ZHE7I_vjkT+)!cpTIF$+VeyXu(s+!vl*|*5Wfs$)f2%zK| z5=4phk__ikuKSozUUS)XIL*%dG|Bh-Ha|^n z*TLu8fIt6%`mx6be)`q5Q?|UvpWgwjAP-Tr@YIDBt8x0~3GIUvPi;aw`V#3GN1Qly zS>Vq7E2_=kED91a?H4^cc~T$_*^oa(inO@^%{7)i61UJ@Y}&AP44Tk}wC~oZvYUss z*Vl%$TTZ(Nvzx}XKYvdj)6RsE&}V6OTL^rr7DHES6BBYLyyDCZMmh8Yvg8$u$n#q< zKG3FBoS9ogIgrg~*pw1tAtgl9#%S~kbwz%;U8gkE87&PG)n<2pDiAJBxtf-FZ1K8- zyOjy(p4-p)xAeo+6SoH)pE5sp`}~~UEQ#=Wq&k-OQVK!0D}5)f?mv7pK6;!y$EDCOi?n zosT{5eKz5vq?8kr=N{U=fOYmtA?z&)-ac;8()oJ|I1bp@j{#i}e?kquH!FN`}r-)bUx^`#rC@@+S+CZD}#rwSxv4l?O z$Fpc)!%#n6VpJxr#_cW>7$4?^IkV7kjb2(H_DgL=QGFDF7E$FAz@s8OY=J@2(oD7V zcWOQpZn2^2ift#+$@wjD;PdwA-+hNUON*I1)bI9*Sle6hhZVuvsP?m$tICbC!}Z!v zPFOmXx#gi%>+(f{$Q1g9imli*LW2x3AR#28wnE~eX!n&+D+V(S(C;e}!)0IJfxbN|{M=<*NR}^L_C0#S=Y>o;p7<0LPfD012;J|1rihkd10vIhE6c93?|*tiEa9t|HubfoIoze}Bk zD$9{SJ)b@G221WTdzUOV8y{3nlAj)1y4qvu3~|#Cwqyw_F<3lxIO~IoP1N5z4zP=V ze7gI7x1*E(Q7_9|x3pQXlDJUSf*)k+bW|Q(D2C_wX!{H!cZx1mbz9Mt z1-ilbU}N}?oFrunb0n1L07{5P4yS&UF68Aqij0h*ty2zj4BxvI$oy%OR#Rq6a_fwc zilTgV@03gr^>gu?aWj@qRiAh;zhhUA<%nOlt}h-~zkWl9_3bV^mPci^Ju*;A>egqR zI6&O8@cow64`P3b>gUo8ME=VOiSDYvQkwo}I!f;P;71wt-ydZe#5|4Do%tEs&}q(n zhJj!vapc8_sg)2(ZjI>3bGbQ3a*nD$vd|y@nUS8ACCOu2Hf`BMuHAI+YH?Arw2YNn zm&`7l8GE2o*mm_=MgMtScQmYNpV>4gefDbXXoPxIT8|S8#g0P5!U0!-0^)9sO+U1f zIKh&9N1nWCFf)fhA;=9zzN_i2qa=jBX-6?}V2n}W35ew)I*M$L!rcHF!_SI;&}cRF z12jNB{l&@SDXkl~=x%8d&QvG6lI-#EXCBGxmwnK<`i0^1dhMuNbNs}}i32YZ-Zd_G zWNiofiomo+floNl@QzzbI(?b^!20+X8Xjj5>f~uR`c^|tM!!boG@xJI{pscUU&1PE69EI<{!3;^1D3w##CZ^iq{X`SB-fH;HI1y;VJ_ zem7Ib9D`BJhZPqOzSuE&27CDB?jKp=`4psn-#kEZyEtTx`r?$rXn%&bm~kdo<6+oI_s?1@2KkGT;OIRxWGWF`>m3XDb>AiNvZ-Ud7SQN z{QFBW$e z&&(8mM*`sH2~))fSGWDFpPwfi#~M46V2E@U;f!L!GRVeKILp$)az)_z`x>sAa`xTX z7(|F%fzxH8n-Hy9;iwgshc(NJZMCyfHa{-8S=MYTW%FD2;rZNHYXiD@Y%Rc(CS(A8 z8f+}ZPp|CM_sNqWjVBKdD|K{!hzg>nX)L8P&RlBhI`c)VJ=h~P00v3`d*lq5Fa^iG z{yW!k%O4!U&Syc}UIY((82VuuvDm~qb*+Zvozbff)y9zLj3GaoptDc|a?H$vuOi3_ zm#&uCr^z`|X=L%>#sXpfd?swXG`akYO(&l`Il1KnS&APY=FepHS-=JsvX6P}V<8*f z>fcoT{wMXW`n{-ZT6Jk@cC$;5vFfn}kEGM^#)JyZl@cxvNvC2m`1Kg85xk1*&5Jfs zeq-`RMAb^D34M#ytq49|37#;%b}i97P<;EDC3s9}CZFDOS$#J0;`GDsMD$&-BDZ?= z{_Ul&$B$a}&dMFE+Ox9DpU-X3_l>_Vl$lV+^IPwSymgiCdbju9iA1@a`PlMU^)WUDL^K|otNvwX8B zOqId;B_>Z7f6jBH_EgWSyE)3#?^lFyVEsI+6|R zHE9``@Yt$BOulho>1I#q%cLadv?(*igL_t-XRGGbyY`37SGf>gxO>3Xib94G#Wh%s zPE8C+w+&6YjE4``OQVChVHgcJRWuk*UtDtAMfZ&c5gIg8#WyCJI96%x-B582)scWa zcfRp{ypdrLp4uqOay%JIpM^_G0fynSB9AT|`Z;52!5R$VxVUn~l$9)$v4u}&W&E)7 zlUsS8o;Y^(TQ+mSw%qv>KYI7hM>Cb^iu;Bi|1$Z5u~WCNo8;s5Q#RW)_5)#*e|IzWR_l!x$Tc{(&Ue()lk{7Z10qO+@IC*C?R+Xeu}3TB`V{Gcmc4C#8ST?9xbR@>(`b&q_j33CE~`- znlz%vGqgd8*;(MGi^(1*sAYilinQZ@v+^LTqkZO^oB+OvK;qD6Nptxt!6xTP%8u#6fZXN+{v z6m^a#i#c2lSKHgw*6g62IixT0W2G+!vvKMi`t{Dp)2ohO_tWzdxQeF#y6Fc^ZhZdh zdX}DB2Ga8NfU{A-Cau+|#v$N`fpNZ6FHdz{Jno~f*R^H}v&q%fX6cxwxP$)E*-^C_ z{`#>ENz(8X+CWPFBHF_uv9xHBHY2wpNRf)sq-cSsr#ZYUb^9vh+wN2{1(}7ibY2M* zzbLs=$7C_NQ|Dyyw{`>jw=LhHUt$Mf!M1{b#7Dv!aEua2&_aed+sCm3& T&&O!b z)9X?Ku!IRc#W7MW*VIHu_l+Y5I`2u=YijJvVIVQT2I5nbb)d>8-je;5O zkF3eOt{_^di5%_rLTBMaMeOnp_TvzFsx__^c&G7rer?IGoq&rScp5iBpI3Y9$H(3J zq=-oEuaS9_VT=}B6$`Vp$$eDa@DK?IZ{#_u zQ{k5*(w5WQ@DduFv@LuH4Ag+mYpE!f4L#gdgr%i$yH3O0E6<|I4_CO`{F+}33zFX6 zn9(^sEHblG#-{a|ohL+w**j%yb{5_E&&Ktc8C#^zN!{K_N_nSCGP{w~txHn!JDpQr z)J*BxC23ISPJg4xF5g+S4kc zm255WVxSueG~-#^1Li+iy}@DziZ#(WzzG1?0H~2D zE)NI|hz*c>=aev1bim*UGsNjolUObfS>g^}eH0F@64?OoT)~izL6SHBa~Cd9x&|t- zP=F-&?RluVgHnJYk6kOHtQ&us-bOyL)>K8dS7T z>yE>_@91&u(_gOS%v_SQ`dW`2&{iVBBoLYBJw77Aqbdj|lrmZ^RvA8`7gta)u2KA| z`!0F=1|3``Wl+x?;8KU<2RTe^MYt&n@#dfBKijUj`e-zRt*Rz}}y(-V|%0UC21hFFM@Wo}0V% zp7fP77E{*~eY59^mk(c-orf%rlLuIU`bZtSXZ@xkm4FO^B$w zNfb|QS|w#10`a)E1*6*FwxbxOu_qab+MDYWi1Pt=p<4vvgPb-XIWb)f_kp6gk3J=M z4{#Ol!Afx-=#Bf){C;_UAKfMJZB_D=YT7#Rphlh&uRXvw)yh+9^9Q7Bq3Q;BY{eh! zgK+%0*nxzuCVn`Zgm4T0i;0vHB7=gW=)&S+Vyx00R&C(Kp=*?Ccm5fZF+#mHa{RbI zzN@x(#n6e#EN+lpt+qQ>{af6=tBU$u{p_zQtIvf@35gW91WztiL7g+gHzr0p&3;z> zlW!(D?}?@9FX?77J;Q#^|KcY7Q|nHdbo*gi`a{XP;|=PryBcRW5tdO=@RHGrr&db> z80fVbTmtAgZb}W%%}35LxIPGukz@48S$w%PLwy|1%4C{rso2FKvrreOCDQO_xr{Va zDg%zAa!zr+W1EyFjhe2mO=Bz4SfAOWr8KtPA=f#5Ufs>QF3o27m8m@1_C2sS0Az%N1A1xdk2&}k_t$Vh<$MIshQf7B4qHQTG0ykqlZc^ZSv2zq>As~0E5<5^z8m?=y&hX2 zI7vcSXwFy&uNtc?oKZrmR3)lJTTQTOT!G)XR&voGKTRDrdd(0FUSsZ};vLQ7HmDHs z09nLhpfgffc;^+^oE2>q!zxy*2v`^`5C~mEKNwJvF%U#VRUE>An5~OMB~GGvhBwe6 zz~Mb4RBR}P2D(6ZaKQ|{aJch;yz!tlk8oBq2B51I6^`%;qkAkV<1GHM-}c?BabmNl z$HtEas7u*cZ)ddlttUd4`+8kgv&3BITWsoARKO%PP1>FG*YuG|6{}W>NqWk5R;|3d z{KMI@(>YfrEeZMa(g6iWDUv8WD}&S_$FO67}l%5+}pg7khkx{k*{WoNb<$ zTf%uH9DT!-HP<7g4we=J9E10L;yhSXOzFn5GY4lZwhX3SRWA5TqN#jk(J@Lk%rED; z&^kuw{&~gEm%Qu_zgX&I6BK(;2(kr*1wmCZlr2*;4o$m4kr&u5M7}rA>NG~@K|)2) z4uYO1vkz!c3|>6bFfOI6g^X;RI?Zg1L{2i6v zzJ`ApPw?;JBPLl!eyU#D>Mic*HLlmDUSoT)@#@h*V+U;-GLHPX4BZp3m%=RRICu=I3sC6If~cZ!r#IGMQ-?YKW2h(ytw==&V)1=fuj7YnA>B>J#vIFz;B`Hw->gnZ+Fa-t$ zUI4!aU1L5RRcwU{S?Iz=Q7?Ch-DI!pJ^>|A%Z_-d@K2_N5*_1;1M-2}sJKt4PpnS^ zACR!oK=;F|ym1l2G3L@bGSgkus1*$&$1Wf|$~7+_T)d$!9RC^qvEP1VQvOfsOg2JY z$j0qGadeBC!8)OiwA0|FgT%ide)`$>Y^ig{>SYcYybGDymTBDjjRMUj053~v8SkZR z4k6EhD|i>KwZOa34g_r}=rG(y4Lyj2Y8FY>u9wXrfqTR%MZLIb3(vf*&@d&7Hn8v^ z!tp%h3GENML1`4(BLBVcWe2RpO5phPA> zM)%Js)}mZGqi)@(Msbc~B4+Sn&@XY36RklCNEGdK0^=m)^>d5e zS3ItKG%jn(XmQq<C%n zVQa$PO;H~L19G5dZn2zzb!$V`Y>co)iPAdgK=#5ni!0CuKf)UA8T z!x~-`9y|~O6bx!XJMK8^BIho#i*s%cE2r#TveYqbR7&EQQJ;-euTFNhwNDlgjAJ?d z#U3k<9$BBV{OFN2@~Fv^#tj}Zb^7QRD_!G-QU3y7FjQJD*9I>rQdk9O+OCNrcnTl@ zp|AjM93zKO@FH=pNPMO*AgXvWE#l8P0)a*Mno(jtICN1isHt|2L9M0j`1v7uoR^}Z zTY~iaikd50_1v(bM@yBhZ?&Gyv?Rbqw_WWnmQ*{kz0UjWs#=*{ZHBckg^somktym0 zmZXSUFEEkBg(g!?-bOboE2TBfC>5JyD?A4%@i~5l50wCSXN|zZXOVG1PA3}pOYT4T z|H5f-PXfaLpK#x3+3(QN_x_l<@SkbZ?@p7PLfWd+yVZru$BU&$_ZhX8vBlXaqL}-1 zVSV+3;p3%>`3lKF8I^3!Lmj`l&0VZaRzRe2){tATE%iwQCLYfZ980k0{Yde z!=9Q{@TW&hORf+jh6=UCCG;7#)%BSJg`d$A0QhI@_)M&27d{g%6hRp!eP+7rGiwSz zqbGpz&kV)d?pSu?Gm%0SaXJ4?nCmm`3O`f0Hk`{4#5rqNp5jz0qAOQ`mi+_`mikt? zIEWtIQiP%uVIy-4bYf&8Q1=a)*-(Iuo&A73P~1d>;*krIfWr{{z&l7CrdpR;K{^u! z8$r5u!%?T2`QF;!VMd#vD(UO;nLo??9wQu|jk~sZ(9V; z(^~DQj%)Uz`sEf@_UDod?`0<6JQ$q&kX8J%YU(^}UD#VtR9+04vBKJ?&kw zvL{V8s+9I5a%K3+>0$@59c5E_7D(;mY zASNUs0}%d2V|+*BK+=Q!kEABz-eg)|Lud;WPw<9a&bP|*)pT~~R_d6AWTWCOaox^4 z)~KUv?dEu=;rKZAqkjpuGE?>G_jKjsC+*wI$50P4iRN~t!DIcH=lxekR4P-k)^9+# z0pU@OEBNt7x+cL41`f?0nqP@n#ma-tN;j_6+wg>oMv;@<0;V!>0_gM?{8qnDPj~3@`4Jv<3w67vimt)4!U&=2^#1f3NyX z{WJag^fiBX>hyfgv}<{BXF}wTPqvX>31y?`+{OW?g7R9E2Hs<~p75&cKbe{D|zcljJiJ*!hAKiX&9`8$MHG|R+j24KN35d?PR>#_mQK#1 zRmP#TAF4e3E-=8L!~=tki48z;!#WVzA$1?`gntF8Ww0|tV@Xj9r=k^8H#-NXQ2yM)Z+&xDC>8i8V#3M-}S z@*#N2aTtE)L@}T&j1@$PafLL5(Pk{%0GNuC;3P7XViF(^{h$!|1;IECha2d+^YMxn zldmw_KD80cU)y5(ef3)hyUI4He^dl1og4V1iwqg|`-0@~Ta(qybZE~0__13H zu+w!E4laH1+*PSRFxih>An4?*tKXa(>yp#~AGxU*k`tQIhy+Aju~Fz-@EN&@+?V^z z!vUOoxw#+inz6ut{KUVVPO+)oE`61ly65~nsm6Ego+FNdr9 zU=e`Gb-vD_99D)i=8&o&Y*hRWk+auBh#N*aP+pI(dj1{6q^#~ev@0yIDsH(2Iu-V2;DU{OBO zeSrJ0IZ}Z^<&nMUZ^-aAk*=xon;*Ommq=_lQb!gODBM&L^IwH~$gN<=2PCl(0(S#R z4r5>kt|A1$xOx)ca?ria^ zMAoWAsfi7u1m6AH!SAB4#u>dLAO&w)@WZ8=-OetVz0yO9*LMwFUdj~c3eS}2F_#x=? zGfo(f%$zC03}KG2fW1&`*$W9kHZCo31rp&}jbH2XYkhug&abWcwLQOf<=3A4+Mizs z@M{XczRRy8_%)4RGx#-&U#IcwEPkB_Eux-=GtF`S&B?zov6WOw$rz7I+7w)}>fqvC zuQ{z0fdh%0aSph8i&4)gO^s4gd7_RZHCEY|%0?dSo0^)Ml`8da=yfUeR_f!_f>gP; zQh---i>HRBilZ8O(d*f%>r>@XIqeIZHYx$_Q~A%pPg(dZ^wFFs!$VLT0?-w(2%D>u za81Lj^=vtHeaP(*{#D2y-5vF8j-h!DfCcaYFzebJ!}1)1@*D&6DCwn+&Cw{&(cG5P z!tjy4c@6*;5C*`T+Z_G!9KG@!-SQl5@<1Anx2gS5P6xv;qQ}5E@4{b9vpE3a@ixcw zJV%Dj0XR>zIcDcMvTTldd5$SI$9&=wS_TbA-M}C}UoZu#Nx&r%9q_rF)tru)WUcw4 z;{3T=(icsRC`|#31r=V1xT8fZsD%ZOM^>ook6Wa!`+oE)+F&u=*^66L%V`mM z+)A}34@{mUP8^gpNo?|A;;7CwYUZigts6CL-MVqZR??APgFAM6xA&k5wVs_GdD>F` zZ0bMnviQ@MpINQc{9Y4N5-+eoXHOR3?1g^a-i7x+>^-hd-QM}XkL3TX5kL$4V^^QVui~`e8j(>(mL3nb~^M^0`@YH5=CKTenfKw|ee$z9oCF zqu#2h(GyooVc+L#u!C(_o#Cw-wC&z%*n-UDr11$~ zV2u+Ys`I2kel!5iUQ_#4Def(jqLM2rEvw;(*g z5HRuLoF&tyE)y5en7Mdx_im0J-S)BO@9%hjp-AEvFNvjW^b0~J_y{vv0#me3OUA>*OF=FNH3F}sf#Ts?|X1C+f zKXfU`{cHKF3 z+V)LzXKfB&F=pK2#rCmFB}@yAieAxwnZvPc;PR-*IXxHc+3ReRJY~|L!I=}pxxF*T z4d^!xHaq3`O55c5gx?CYQYpD+iE}j9VWFcr+%U%Yxgxmm8}>Mb8>DT{s}scX&a%<~ zxy6gaa&!t|QwE^FOBpWww+F*3qxbu($w`Wc3a!+e(Vg z|63f>36SmrNG}2HmKWA2AqFdn+W|VH-PO&kIHx#7lO9q7jBJLl2Bj7@!%(O(hEkd? zhw0J^7g#(-4}s$2`VteJDglfd#VVm8Jf{>j(Gi7*L;Hg4EEFwShvP5P8D0%?wB%me80J3z{SWqjwxz$iUfkM)?Hjhb`QWrU#i6j+}vA-zZ)wt{eXFz{m; z;6WKhhu~za(aO zezLwKJ(!qdNK!d177`|2-2^GX1!lmIdY_Pmm?Z6El2lZ1ilkfn9ApTl5t(ou$<7Y`6G5@xF@q zeqVgQkI?BA-_LaxeRKNv7j7F03^0R!O!3feB!M#sZH6I0&%L`OP_T#)_Rd1MR`+CM zxXqdcY?shHJuF?zyvNN5AkECIZO28 zaUy)bEOoM4eXLr2%9q``N{3&J=HEW6;5Q_!%tzjQJw-Nj7c)s%6FBq(GCIhxSJzw` zBsOvFBf(b|e*5gguhO`E%a`qC^8EZ?4}N(mS1OhN>zp+!=STtAStVfkpZIMnG%SAH z+=Z({TE}t+oUsN(N7xO&#>1J=9{yC{NmIK@=jG@ZUvu4o39UihS3up-==NaaoyxQ? zH%TP-OkAlnX=;!)eC93GE@MChG;Z*6DdGIRltW{1=U_24qje{YyZ^tzRin3Ok$Oy1 zSMKce^GC;?*muvLyKNEcw5QweC#QXtVgG2;f?X`U`>>w9GCI%P`N^u0vqtpjH29r9 zQ@hRE)@A+hw3#EpwZOXMKtQV)bkZS;g?qlCT)A~3elGt49-*nOR46u?cbT!p<~Fdk zvqAhC;HvoWVcZVHA`P0i4r;Q1FsTb!d+yYzk?OiMw)WiJd81j)NzQaucgmWL3)SnC zAR*8HaLedjVQPrCSZBtNu`^Jeh8SHzoLmp*QUW$;OU0Ad(Nio!pC@IK&K%>&a)F#R zEC=s@No~Wbc$;?~dq)nc5M(vE59E3iMts+yplPd{>hG-B&97NekF;}pcAoom_r5b? zk@x>-w7=Cw4DooD`X{TP{_wcOXZP=4PW|lRgD zwDjQg(NAGqAJu-?#K|~Q(Sf6LGXBGEjHAWsDf|lKM4@Pd>e1dn6M0@pU5@ni17r0g z$(O)Ru~X~QeAG93sC9hOu%wdG{THLzO))fImi}};P$RJ}zCvj%?paXn!hiFXBx@u} zq%Kgj3ELJ2I-}B(lJxHZCCx|HluMev9=2{KYTA{+r%R(VI&Y!np*`~EH^59Q0os=Y z2R8WOuC%mm?33lr0vHL( zUQLCbJ#LMN$I?IBe&mvU;pCCC0mF~io!Y-CJ#FqN{0g$^SOxrwB$Uy<-gJ&S0&%BF z9IIfr`Gbf?`Ao1wvQ`A0h6!nkkD+Ubi9U4{REqm>1*b@H*cLC*Dmb!%R+TXR zPDdCzPlRQKq4PxXFqkb3!rI}!nK?#zSYho^0tpzSK8@X_lO|9#tz0qY?U_gKst=gw z_op9~ObgsQZ}*wYN(#y|4zUw+YeqL>(SJ{T!oogye?sP+^Ha|JQoRZ*ljwpi_W^hb zjsF8KD_)w!@iH?*?8uO8rOOx~dN}%998Z&qIERXj!~o|bd47u81+Z*g;3r+NjE2TA zTJft)juP!hO)}k`4oSEk0cSpM=Ol%40dMn)z^CB3F{OK$m3SqZ_iv(14@0sJmZuAN zs)B~w#a;QYg=4h7vuF}PSnQJWSKm+CbD>boU*4N^>_?NBpS0Ecs~fgca|%WMfEeG+AG2FC%Nh`SunRSg);-3O5rlL|r+;Bz%e^6DvmkiWvaeoGJO z*iqT@`0hRX?GMt%d&q2^7})d=bh!1)o87c)#)Y5Ge8fWD-2`u}EGA`qO`S@(cE67K(A0dYM&KFWa>nbSK#}g_|*>y0VV%6coJEsjIj^ z$NY)dfdvy%7)n8PTzn-nCw0o8Wa+3bO>{0!6Yp`Ybv|UeuH#Fv!0IU$-mif6Ai8Aw zE7=`-ACh%$PIU{%j;ydX!sSXYeqoo!hLphkgD_1h&izhENAD|9M-8d3gkCq<(t7tz@7z!+RsvtTBr$Wto0oJs zn5t$M9F8OCKj|UTFy{j?_{wXPk$~jX*Qq1GI~~HF$RV=o{UCsMuY{~w3RpRq5$GrT zjSRU}Qvg_YX|noWsgm-17;7JN?M9Gofy-OK49X7j5e6xiBHU?*a|C2@)qP{HlsrZO zgLFb3q(FC}Q0xE4-FwGZRcwF5d-iE5q<3l{kRXUr1Ja9tpaLSG(g`SpB1jDhMT%0S zL_h?TlF$N(0^tZqk)k39qM+DCt^$e*RuB<$_IbW*&7OVs3Gv?hd*0`H{&?^G2<&~< zS!ZU=%<5}RCfWxDontujZOJJRMxdJm|537*qhsmc4p%4NGJR+#`d5y4J4Y;mPgQDU zbuc=Q)(VoJm|{6Xv0OPwjKr25V~Ib=k9=Qto@1f8kD(2K0{ihB6|_!XcLX_It0QpuBuEDnEh2Y4^@!+OR0ZeEt-3yqLe*R{wp!FRYX)`wZzQC zmWiDcvlHEY?I(^>3c*s5Ob73?%~+(Ol*-i62lfrdpD3?U0N+X1qH98q&^6zvJ3te? zBoqqjKmH`DPWx?n-WB5qQTgXQ@%%@J4t!Gj=tl<+eC({{EsHQ__{aHnm#U$`DkjZAr2_*AAru5(!on2D`Y zvjm(U!$nf2txF@4ohW~aiWY03)Xa3X!k>@S`36~tnIQTMsYVxO>y+cDzq53feq`G# zN9U|C`u#S1>V$6=eI=sCkNv~Q9@p@v!&wKu|L#cjx2`oEH>jQOP_1QGW*MLTUg=e% zv?KR`t$`}+aL_*qUBf-LytE(=VVoDrMPpF|2B z3-0Jn5upaDxRfFlYl9TSnT{uo@r4^tee=QFqS{L1@Wy;NNd_Ck+rEBy+rB}hck>~=byU=9?q9G*TpY zNTU`|?!2G_qGrrH&+_~sh0az0NwHPi5kYw&lL3jhHb;6n-eMG*LEY7L4?>rB-h~Ja zxdUoZ?J)LfhWnRKUN>495wS6g)@JH8pXeJAk8ZFn-}>BFOSFFG#f39P|J4U{XSz|< zIA|DR_ytj2uWa1@`XffvtnnjeI;8i)ZD6Lc@N)0;3hCvBE)-XuP7yS1#R_=Z>h!^; z=KW}P4)>l!6hs9}qHt4>**RRk!n%u0YAoIopLm_JM>A>_5Y@8Lij*UJ@-;S8nT<^r zmUNPUJY;YQMIczjEAogSL?_>%ygWy&B#b^ruLMG@0eMV&?6{%{dmLTyph*Jt_qBCV z4?3et`{e0cO%cjJqpGOi&WnOhWZ;g4G_gUQ$A6u41*oR`70_Zfr>tH&OF0Fbr2XO@z^Cqj+rs;Ye;T!@-XdgD0ebI5?ZHrFCvaPX@ z+rN5@W3q3FT3-E}@hR1*rhf}gIG#KKCa{f0yX2ZFn)(VqyF@m zIQ{*Z`K9CX#pimlQ8_v(=kBedzxeF$&y5a9=-Dl{Iu03AjcMSw;6g6)EGB40USZpZ zLG~=WZIuuVI-tB=p^XQ&gnCfh)f*kkrxhKMvZF~)kIs$;|E(6C3AgW#1<^%#3Yt9C z&Z#IL#bD#is2=h;@qehj(0$F$9B>Oc-hMLwNn`kO@s{z#5;1I2{$%m^Vq=7Odxf6j zyU71_)?4_l=n?$iQ6Kl-d;q?pV~{c#+H7wcS3^pk8>A0g8Rg)s_Mo0SjNGr@bpERZ zZT+~TU$ykibP_Mz*l)*z^djo-mlg=qd85

1!1exG)EK6u~BkmExnKuQI2?mS0J2 za8eyD)-9Q_IsdBnz4POS{BrHRiO0v~>)!e6H?A*Su|PC+^caHZ>q76Qr32=rwfK1D z_AbkY4Vymc@$o}N4py6dp`EbWp;||;X1h}}$TJxf$O&3tY$jYIs7+R=w*Qtqa7SN5<*9N}EpY8#chxqKjiIjb=Hx z+Chva9KBCN|64i0iwkxTbh_E~11*?uol@L?N$7~uF2wzFh(DR^UuO4gXBxEkM%ri) zv{8cCkun;+5waD2M5f(dWpk0UPVmE=cY^K*j_vt{8zBbs^hFzcmNd&3Hy{JW1IEr3 zbL&*n5g6oCi;Yc?L(nL2U7~N1O)%ia!vlbYMwZdFAmW0fdrIN zkg4yE#qURHJ-y+kXZt@&VOHtYM)~KZKnMxaGo<(JSZJ?<(l&l z(MpLRO@LDCtC#n7x-Zwx3Cmq|-x zbLOtb{aRJ!x#TQu{|f zRiDHaui3cY5W@fFrlPg6d9lA7`$9OKaRCOHa?8WwC_%@2xoUGPF9f4n=M+ zZH@g43-shlE5@&W2@%Co!|Awt%m{H9Rj&e^NpoOI7Dl2HC}mAF}Mpw~(cey_SFNnts9e{53t%_oJSeqsI(39-&dxGPX0H zOwo3DLpUGVDkKluH9=>H8(AqnsT3UEPl}-YU}Y{OMRTH85;j*7ZLXy3BvNOSGLz^) zB^BYxzM;IDhui)k_tcgVN-GN0HY$nN1f6}j4+PX&)^O#YHa*vwDe@nGqJuHK99>s` z$cQmIiq7S!yP(NxNbb5=nP_ObV79E=z!IXx2|89mKK63S;EfFDnMY#gku>wj>`0n< zWM(AIJhBK+WoAxp*OK6wV>xZpWCcxkiM_)|wlvaBB~9;Y#HkrcJ&oo89zGlMh}Uwx zAuMgA3=R^zTwbZxGHCf&OMouob-CFX#6?HhE=;1DE7JuwP~^hRT=3dqtTz1(>7P8~ zDDs=4%ma5?b0j(q&Qs<9BI)6r=Hj*IS*@eKAL%* zpBtnhXXIEaGWgO^^bu<7EQWvdPV`Eum{p&awqkV29VTDmBvwUwrEU;O;l9?LoYxAK z1x?bzPhsN)3Hvg>mzq@AxV?73a>BQ^X9garUrvTQjjjSih9kGQh^G5pYw|PQRA^p{ zOMMDt`y98&)%N7-X~k&7`2Jt-=__~sezs>jJW*-`ff=-u}f+(~a#&*|R7-tm5DSL5SUY;NZ{$Hq+ zLE&_YNMN}Uln7QbrI(mA-rHF7H)fD?*i!%D7cOl@{-1yakOcpe{npkX*2SJ|we^%p z#3gL3TSf;p;2J0~iAxaPuz4!rTthK7ok6(7p*kSfAgP7Up$n@Uqr4xqe!64Sug1Ae+lGvv z;GA-9&Seo&y7bdS2cEfgq2XJ9!ma;)Bvw@Vvu?lm(?x@cdZV93%H;FUUNZi44C9$B68if0oW7_d&Q-=Y*Pbl=W3C9lI_Azd zFo_X=@0ODg+5`Td;cY5FJ?coWs}iZ@<=BtP$3e``rWPqz%YSjZpncI`iB%-2g`*}f z7bFEe;7h8G>-ikMT(r_KCZ4!>a3{0rL#z=kF3+wlHgY#(S%n3gY zfqG{}>m#9=RzbvW&?jLvkgP9Q*)Nd8T2Jm#-J}z|B%VZ8t356mq znz@mDN&H9UzGP^{X4f>ia@++$?K+D|yDL;BsfcEtQPj*+AmIa@04e^^68si*w;Cc9 z+a1AqF&^6^wxdn`C_y9dc*OJqO-!y}m~eB+ux@-6w~3-`+bXCu;Gq z?xB;rw^8XcpiBeo>B-2NaN<>_!IPqBeKS2# z6}Ovpw`9i3ma#;(gz65r(Is;2@&O|v#tc33W9;UQ8()gPbfoXJu$&PqSA$&}4+yW= zxc1f5oT?W-?mn~o=RZ~(oVsgc_nA=h$lKr<=PbgBi9uxV7VTxT<2v2Hj4gpbA%yU! z%92e-964R>rP_F?tsa7Gy)+eWCe^(<9=Q1)_6K$e>c4epO_@9BvJw!}-gXZm+k`;t zHf*Tvl>?978qz79QN1=+ka9e^?H;+HrTe)|!$l|;Wcdm--n-xTjb6>S&b#P2@2d+I z?HDm8XF%@QL640!E~}(i-=pHUrF&jm)$GNcy9(VSr%lOiK4SXxQKhfivt@PEMrqC# z;EogreOl#5DAS+MmZFE!iT~>^oDr%EXXo(jaAZr^l2vFML6qS@mF>idJXfgqW0j+J%IvQZy0|>(aT!4xAC<& zIju?x8(h>}Kky;=BiMkMaexZq;P8T(E7+hu3ev+L>5~0OmyZ31uF9MCT*r263!j?4 zZrIR=y7ztmKCxH7w^@t(?{Ct7a(+&ue#vzf=S-eDxJ8$at)GRAMy`#%3w0D!foJ9c z^T}5eQ|3UR=x$dOt|ZD7XmzUxMp^>fUyy-NUfCViM3vp?4W~{WYK(S@f|15}QN5ao z>#=WNYdzU#h-oOM!3`3i_6rzKWz;vO*lt$i{i6n@40@8c_Nr%(&Z$!6);HS5igT0_ zc=T3IvDG(vOd?zLjZA%yXWAX&ePd45*8L_ww`lg*<(?s){`YL_->ci5l|_MgruT*P zIk^)TOd5Dkn^!yaON}G#r$1)0%uxkVfNIdq<7AEi*qQQBvNES;WN8r4d4z3jE{)w` zuFUOmfwv?@D+V61OA=CTb(xfq(lEH@w3_i&a)+nK&09Ko=IXDG&T2d~Zg95_{rfqp z^fZ3#y&!Mg^7V6DefX`zSkg4}_I^(c?%XAt)}jTbu^4ONM)z5^W!bl&hOACB8Q#vo zXuO(}3~vky3+8m zk~FX6qP0iCGxbYJL=vv83|fEk-sbHdE_`}s;Y014-@9nyq~{(QvNmV%`aubO?(OhM zulw%rp|@+^C%NKdZ3pM)4{kd!C8c-sf#dUx+go;ipjF%ZI*QFrI(BHuilN0JRWX68Q1fToe5niR2eN0(D&M9Wt7_JJWyEQ3)%$qV7M2Q>9- z7#O&>aziRU_NIm~7<9Bd0w0oL*39;p5xu=l;1RP*V~qs6F9QcFOA0Y_FE+%6q`m{Z zEt9GvUrT0YLFiRabI64<<)7pCm-`IaH@ain`=)O{czovN=Z;){{IS+K4-fcmTgSGd zoA0TIhizCga_11^hxayaoRGI+?UR$%IX>+-KeqR(-tmp@8?bE7LxufTJ=x>A+irVg zM3;I?dnLYjIKH%M^0fPV4ZnM2lYRQE0R{7iL=RrL;0c`3k?8+gA2cTJJ;GZ>p3y3S zTxxdsDcZ6spdOUFk~yYjLup~u!rdFee+AjG$WXA^=yXg$8$Ba~9Ak+|xI`l^OsV<{brh9+`t{ti{&{pLrvz-uMAS#`|trzih&$)YMIROE!tS)@{Pg)h+2_>zCGx z#=djD3&MZm{FiTae(&sc%xo`ow&SjC;JvAcQdE-_bk$^*3+bmkWYroqC~|K3Kaypd zEpB+WZEn;xQR_}hQVYKQ5L&0HiY21MZBl}H>v5o9t-&Pm%MX9gopjxG&y#IldC?dp z2JJqyZJ$w~FZ7LaZ98#f{LZ>fjNhJHe0Or2*T&9YHe~67`Kxdu_hTkkUDWrvpFEL# z2-VP;i>e1pSXs_<^5eL5KC3YLvSIXjGx3Mr)9(%Aj?lD&NpptoOfFmI$J#bp~sQ7CAoRb~_V zY_N9fl7EGAp*+T5BZp`-X;KA!^BP}${m&x(ZC_JRqo=;nPK_4GGhGG`v?Bm!lU!UjpIM8@Xx~rFqN0(onqtEC+w>!LDcE5kP`YcJ(iDc}E6Zl)}qGn@a*TlwFny#AYf*Ya{* zFPxacsjd#9o#PB>NA<$hv?<;c$q7@46XI@gt7A~PH1sATd;*rBJV(+=WIB-SZX$0; z6!Q)cIXWOZXbFNMxJ+|WU1Zl9q0%@dE2&j|xft{QCxiM8oB8=0#!1oOnWYP+7<)z7 zoac>m#-GlDHA{NTtdg)IXXVR!hS4JL@xgg||G|S#xM+_gVUIMyJUy_4Ogo5oYFYc0 zPc(;qh=EvzY4B+vEZ;J;b7=6rFWxZDdcu*Z?vRBa)g6wuzIP{27GI1NpG@-QiwnPr zh6Y@W2+Xe12hi?V=G-OPxo<%gPnE!}qu|WEHHneH2@!Pv=#7qd>3I#^x7_h)_c>2B zg(pMT<~skSpLXZr+(pyvsC4zBt=AfBT;$o88Hz|x#MNf(j6jt_B!vZAMpBH>jpn(s z#~K-%*yFXblG60kYo4C5X7C{IW5&D@^B!=OWRDq>-S*y&t&YBrUoL&kxzjl5o&=vj zMQs@eagi?*0->5iRNxS);+l3n`hc5?ZFv zW#1!H=&tXg6uJrgWJ*}yR#CxkJ)_Arkd)afJSvG|yDgJCC%O94W~~VIup(btOFfTn zqt@%_Q$<>TA{}4{6h-4?-!`e4U27y{G;H0c?d@Y~#Ah^`KmOGot^w_O$J`a3RVS)# zpZmia)e)gzg35iIrya-K17IPAY5jSwE(9Xsxu)0|W^5{{a>hD-UC&JRI^kWyp>X5T z4KD#`fr2{RKFN!ML3=1QuBE552ZM$!6u4nKVm;gTjcSVb#Ji!&-)Q4s+4gpz+kAVP)y11^f2R7ZKx({eyPSb{^ld z<@XL3L7@@O3yz)c?HEry`Zr=9V)Op@<8ia}@yLrA5Ppk5oPl8@L_1HU8g>%;DqC<=Iu5r&SabDQF_xEw5TvgIL-QT56epG!q zQc#ZWuB?o{l0+Z}_h>9H#Pq966>5oO@hGlmwZCG<+$=M%0)I2rw{u+tZGh20Ud`Eum z8kx=uue|cZ2hDoKHHv7M5pj3F9+{NON!)x2ZUpn7>QLQEOd3&8ESvcr#k^pi*b*c$ z6!2eC<{a*PDm)zSI&3_Oh8d`m{vYxiCWu^2@SBKg5t$J!BRWT9N8H43GO*bL{N{-f zn>z=w8~WXI`XTpKXgHNnIY)dh#70A2D3jH|5ic1YJE6T172nO=a+&k}?cEKfbW{BW+)%m&_hqEo z;n?RvaC}24;D!U=3k+AC;i4rz7QcYwj_CaXv&^*X-0i|57-Sv7T7l(99l?sO)} znjhRW6(37XzM=DaCF9%kTZ~JqUKH_L&WqGalPZ7go%hAWNoVJ6KUq~AIrrC2(eb@z z?~9JRO22t+|BJ@A+naASzAf~=p+@fKJN2@?ktn`X@jx~K)yP#2jyKlDUS0Cz+teC3 z%xr3nw^k%)s^H?ipvGRcP#Om*Rg=3xl{*zKzW0Z=nmF&JJ;tZS;sp`E^%6>SCRP1n z``j-l&iH-VQyvhrjRK^FQ)1GrD)~#J-ocGq)X`Efxp@GOBg7ZtZ z)HSvglAt3;$-XdOtmgMW(UG08$T8HN9#+<3Q9h@qXY3a*91!)lip|EmZyM)C z{2G{gbFZ&-m6R9<-)a1=@do;KwEw&M^CIHqmRm&h5*?d~_KYhFdnN{Pt9!jpj$6T@ z0u4d#9d-(<5n4P9t?leVW8|v@8>f7sT7br}X#Ap+l}(p8@rH>-3*_a-rI$7umkPy7 zk+^Y_NLVQ*PW)=ltS@na@mJ5x{(Q2oA9&B$xo@wy|D7f$M5ldwj9u?Kw~C5|D~*er zn{73|Tm6Da+QM@PZE=^mGd zuJy6a1A2{Z9;tWHG+usQBs^@Kc=*9h#^pj$DB@`@t0qqVa?Z@pr|1*Uis&yU3H`vA z#w$I(>2xD`kLd7D{r5!Iefx}U?}-vowQz;;-OEk48W-0r7s;ps=gcmhGP{C3#7bvY z+XQiN-r>Ow);Ob^JFCsk!)<8qRC8QkJK{KDeHwc))wvoqsKK4)%1sxyr+iwS=H!K& zpj%Ls=mN2CJuFz&5z_3gF4PaoX7*ZD)cUf1uYbJW7vnM8#o9}LGEjh zE}fq`R&=J-9%QU`lse}lFX1t7Mf2XpC`mzORNB=LuS>8i(!+m2J@v-Qe?gnFXBr^L zV>>k7stjZ`v4l9MUQ{?U7A~orHAO9ccrw7@!Sc)1q$b zT>883#mV}>(oY?moGotTyGG-_BHRk*Jf*+s-l9byE2s}^HDR{a*wT7~E(`~E%qiTy zNLEr}rne!e(!x%us<6tm0)szL9^pnn2h$GK+P7)J-aS`E z6A{^^$Mg&H7GIuioH4E#zkX;Oex+sq#=3Ln$l>39_v5aO7jRPi1*j9MtIdXIB0W$~ zCeJ%dZMx&h#A5nv9`##s>z&Th^H|83Xvi41-r-kz9*)ncj?Arhu0hX@p`kVc z<=y(A*XemOYPxjSKAi80H|Y5>tc-&{?{?tT46&xipy|&WAM_E4Jr_Rplo-$ z@5wLS{X~<&D>nK<_1``}-D`lcGi1YpSKe$u`~OL-!emf620Y$sR6syq))-|q;$7`kn z4U<9z7TB{-zeS%Yx}cW|EuuHP+@{YrQ;AQVhXNnO7-D2IT8}Kgb0&>AGUeNG6>{>% ze0}7n{9P-?uD+$-x+h+EO^+=y=DhNPTl1~jZhU0;-6yV$9$_R88uAB8$^O`{pChVP z1)W`@ZQZMRi>kpZy*G%RIk=6AH%@j_39j_PJ_N7IiiAJ!rd|)=U3%k5FNKbEjoCl^ zQ^OedX@U1*$k>pLQ#b70-(}vI$JX@`x0F_2dHtD>woG05+xyi|TmrqbG0rbQ?|1K z$!w#G;|tKZA~cby-ZW`rqy^+?(7;*0Q?pC>Uyc(J<`b<@;0m~bX!dp{(>)e=#LEoS3`g)t1^H#4~Kkm$wcShvzdGSo& z?q9Fiqi5U@^>(fO`_Budx82ga)BfF$t|_=t^zPN8;@T%yJ|I5P3+JOY<{Se~c#o|L zP1%Ng1k^ND_E1^F&}NfEPvoX9bU&&rkW0_NfSKDt?PhM#Yx2M5%{@CspZxXQd0$P@ z^ZLCstmstsOGArJ>GhW1D426?*{v)7p8L$TW#JRwA6t4XX~M~|{-bo3jsEDs`T+J8 z!s(`ahgy-#}#>)O5^}vgz)`ypo*N43$-mrgilGwCh=6z6kDSVeihuY+P z>QE>Cdg$!Y8A&A2kJLOzMFrr{<$p#p`xbI+iz_{|Zv+!J>jD zBiASi+Y9vo_G3fqlRfD#|e*t7i8{`HE6uG+0vFD{+GdFhRGJ$<>z zyyiOg`#wE(z|c~27dbx0$|Qn64f2LcE(;I5%8m|N45ep!lesVlF)q8hiNsfX6)dg< zQ8geopB3L8t@;NcMd0=>^#xUy?mdpx7>?8bk#+iO%F|!-|Jmse%4m+G6b4!zzG1?4 z`U4D(R%-b9gI|x0$ob)9my+f+XFa#Fc1vu7xeO|c8dPI-Q1f&`h!|g-nCX^|9HM0+*_IdqGh$Q32$}zVO{wld`-5r7+zgU z#k20!fOo-z3t_V2JxSp68Mt)W!}%`Ab4gM?{KfB^Pj$)Z{dk+Ar>3r2*H!fFF>30Y z1rt_Z-1n66Lhp913hFfJ+Vk<=58U53wrEzj*&SLxKB?8{`SXX&%4=q)< zaQe` zSEgRy_d8pUlVc6l+n>9q%sLiUSnxh0oGXzUo$H8f+$6lY?J!f)(3vHvfPwtjex}lFA(tja}KYr}!`@7fETaP#$i~pdW zja{DJv+$3n(L_3Xvajo*Tt|42fc^?8QvdAuU>9+;6B-L z@R;h{3&EyKsA=3>N~UD5O7|i5W@}1@b|Dn0_C|)8p%wFL|4~+Vl}E`HNJ$c6a^lgZ`uMS(Zo^Y$W;)E zkGkT_xa^8U=T0PS;rd^1CjQvb*_rJ`^|HM}8~F^u@)sP%t~yZIfUcQ@;y|V8cRIQ7 z)S1TZ)2KF8LyL;)pX}Tzmc1$}7Zn=c?5%E`5K)&zqVeI z)ilSCu!$;Z%g0 z_t>f>kRJBBlnUxF^R|M=9(A&gMZZq^>n|hH+24rx)zP~2fTMMeC>lD{7=Tek`Y-Ea zT%B|yp5D1x#oq8_a5Qb=D7dxE>+-7J;i_Oo}!fb-WT<;ZWGz&sv#GT1lp z7jwZOE=2?%!k{Vd7B`Q>Ndi-WYLoGwc;xG^MP=i{-5sm8abgZ8dXXDbN z#-BMxVXq!xkH*ZMB}U|k&__iAX=PPG<8M4o;ln~SSa`IyB2hF|5pb5}vE!>6LYvn*kn-q}HZVAm405nDzfpPWW z5!RC<^lu1Y;X^|sbeUKX%+o8{pmVhc0?<|_s#`+)4M5-Gp%F^lh5mj4XbX3U2~EjP ztvxhCK+^~b+N9xC53Mwn6M3{^ks4TOD-S`@@JaX$E_>PoI!n$+v={9a#aSn#`BXFK zQx-Z`{8|olY7lgD3wI$_^Jd&Nf}qnZ=wi($9#kW-(juw{L06E_ygtB<*{{WX*71pJ zY|f`e;8a)~F3!YK7CKj43_x3<==dOLI&l(rp>{0*ZQ)J~f~GSBXdWS;F~Vf*@vWW) zc;B2ym@C$Z25O!vi=`yo#4-?mxT9{ua*v!3{H&USwl0^i`w!5$Vp=)Sx7wjy*DTzH z+JbVRZwrDxV?h_gFCTi-Na_Vazb&D8_P~wV%lX`8VOb|4(E;A%Al7`c&XhEQB(dN- zD?ECJcEkTR-uE-wI!-tr!29+XGjWja%t7D>b9~J7y3eE+&Y&8l<-Sn6s4R4@Kvd)= zXthHbn&~BR7vcuyo1tr(6p+wF0YEba01f7M4&%DRbrtUma?=i;1{D#Bahz~{ ziT7Q3EXCR!%c8&bV#s2Hdm~faa9|Zp{7#?3Lp@pPuG?U?!>ATYkD&^Eb;v z=Zc8|Xe;)hI15Abd?fBdQ1)ir33l8(LO}Bf0gVx+8XX)ZJi@*_!d%fJfW=ag)Ne90 zV*xZgLBl!`4%RB%7chINgN)LS2IPSyvy~3Q=hO8Ppe4T>w4?Z~bj)$s0bZgHC zTTj1BEH;L?N=mypQgGO`fyQHw58#Eo4cvc?H>pWe8Ub2mDZiV=>SzYHz`<~fnj3d# zP}T!$X7qqtMp)TJUKj-7LT?@n}?Gl@hDC&$93JpvsTOu=)~5z6d#os ziPkhOkG2*XB-0uryM-tKi*q-)PHS+T%Gw-njIF+&Po&~D>?u+wl9{ABk&4@RD?%-SlK$k__3v+OBrNH6i9UdQE>x-|O2t#5z5#rrhN!8S49z_)jmRfFn5>s1bg~Tl03lbsJHT+NO8OQiX{B z)Byjfk!Uhn%hhyiD7%_&{kLJA1O@_&W#>f=WL(*%i2`(6C$yko5ByDLryH2r9#0Sh z=FDB!gH|x;2Yl|@!JMK7pPrN?IwJH9YE6eJxys^Nz z@*2Mg-nhVkW7q)C~l~jTs#IozH zB1~m_vsUUM8=<2~(>vj1z-Zz!OOoYi{710K2YadjM%v27{yqVIWm(o96M6-po4HN_ zTI$@z{yY3W#oa8*E9kEQ-GOQId*xQ8H zs1w!*Zx7L~KL{!gbQ2GizqwYc8=$`BS5O{-)3|@-Gw?^{SGY}wry5w6Ko`|qhw+|p zd*I1N%k>_hvC4E_{7w9y20@d>p?1P< zV~O)3&oa~8bF=*&FqW0zZs%C)cQ)V0_}k!p3HF)8lML*)Gxl7qzDQQsEiad86=|yh z^Q(uvQ;-rEORiQ^%rSSaX=g!NG<7<71i;9|mz`qt9tq4IP3{)6yF ztI@9&Nxp6LxhL@btLzDpexO`EF5s=O{6I?Y=QGFik(OYtn2dR0&q1eG9u1-es?9NH zU}Rc+#&ce2G&OB;e3#$1vS@MDbDM+*eT&dEr-fP#i`yz~fJfjWZYxJn1dD;WEk>YD zJ?t}Rd%A)F)qA+QDvXwusQfkTt#bbg=qz5}QQCFr1CozaHMtsiJZwYKWTYiIN}C2t zQqEiHn9}Q2*}}6%gn%DNJW4y0c!(2AJZrRt*65XfCh@eh@N5u|!5>zE`C96k61rar zJ4<5nTbMRzmq1lOfezKrl%z>0a2@A9UfsLo-bH0bQ*+!(*8@fMb-bq3V3_td`HnDd z=+8>V?Zley<23~yUb`G#yM9JXeKGr2@Lk^D-x_~lbnIdX-lczprf89-#kl_CHb*IqB~u0UseNDWsAqSvCmT{z{%vcN3aMXYv|L zrKxTHy2MTXM~S;oJNTb)vuq~Z(EA77j9HDNhjE`1#eWhrc;N)$^_PM=gN*i$l}t6@ z;P(&vZ{U3o+~sV8$623joMZSpc6hw6W_(TXOcly~l_M{Lx1M(#^jIacnHEf2Io_f% zN;grXtdBd75#a%rM&{ZMGcE#ql5vNApNo8IJ>+|B7r(D>bh5)^R;!uACmEgeNdb6m zf_9nTa|Fp#N8+dRt?d&{)M{AgTgjCqM0%FE+k;=3zbXc}mSHr~2TT6y#=NG*+SPzg zX8B;1hA;gp&MnQfZ_IOB)qu|wJj|VIj zbxPzIqpdNnfqljv58w$N8F@S;o;BKY(&GU1h-~iM|8x-L= zx=H_$nAV99_(%Z-Ox4aH5Xw+;OteQNt_^65*wn;@2n@vwO3m1nxDi3?xpvS0;mXTaGiHBb&&v54D*AXviOw z^F?>ubKXzD=hDzqqb7B-jZaL)t$5CLPfgCbSLykK9{A3<#Wxv&b?}g-5B9Z6-nf7Iv|ycp?jBW$G#8hs8BF z7fV&y4K;u6^b7MGTqX8 zmA@AG9 zX!2ppOQKE&V|L$=nAy7t%$Q9t{NfmvVeo1)?_U#`jrEI4Ljjj_Y-Zly&xFP})o)tf zS_RFx-K0%O+-t>@0B+0TP*R9-yU6~LxYvq`<=|G(a@-^XCGH|^e?SIWxRqWbabGcU zLk9MA+%#$h8v~(HjpK#WWad(At!NPx`kw zZ+NES*CC6E#=2WfRlm%a{vdb;=w^g+b5*XNj35=P&|4GU72+F@JRNmYn?(6iJoDhrbtqz&ikp?TB%x_O5_h4tGyrYkR@Ny)^Qr)vM+j()FafJl zfk$|nN0=)*2C#4-iAJd6?R;KbN3idRE5Nfo%A$#Nx`Ma^Y2YRYSp(ZK)#M<2ieUjJ z!Q0#lIhX%7#zg}l4U1zDN)`Ek6XSB|1H~ut0d=JRLwT|{(_XY?qLxenjbz4MB9q!Q z`J(j!peuLqBDxX>Vd*^+bPe8-5{6E^gelaX576G)V`~30v6h*4A#ViSt}m5_L8!X^Nmd04+|McFzovnXnZS2?!22v zbHNZJ3XeOaj;UHwy4kOVTLc?(2ZgaAD_}l4g`YD*$at8s#m1 z571FOQ;B_T2q+IK<=xP0mlHyH4{Scln z<))BT(L69;wuoq3Dz3cTXy|OtzxRzvwMxcM_{Bxg9CDe@ehn{=XJT! znCdVvI>0jJD;mF;d&YXt-qYTWuax&x?E|)J>CD+6aps2kV#xbxdPj)|wByl8S)41j z1mI{$;fxrQJyxNoy-5>L_%7OE)>9@`yJ8X8A^|*>%TK1S>BG2#(UbS3t51^#}syu)%7>!YB zM|1`Z&nueg3_AherFqeJ!664IxmDS;8}7#)v!@yR=O*?(61$3|Vznd;$!CIrKKHn_ z;+Aygs4KxDdT^T08em!9?C&=$95fP%qbQ$8f?rXwqq@eOHtn4}>WRkp_$F)hJO^m? zG>0KKuZ-H^b&Ut?IN;gph&p^zd*QPyHH>1&Crc7o`iQc(q=rH3E1^dr9?0u!^Cn3c zT3-pX23|3qk&ojn@&!*D#{2DAze%rF8TLmuE{nPk!fEAq08K|G!;iy>p^W2p$&UGYUu81tS zZZI$UnD<>F>j377v$(19SxDWEb|C3?*nz#R9cZ0#wFelM^+UjtMbVqI!w%phpYT5a zkbf^%bOLS7-?MnV;u8$bTvbA^6?MviR@$z_O?#W5u|tPiJJiCgct=&7$Zy>BVW-#P z(QT!k4E`2)+uBXJqPOs=*=pp`zV!pM^B49IBA$xVwAGk_&WCMbW4C6-GrYesd2k_nG*>LYZ$41s8P1;F3c!DY-|scy?_v0L{%d+`_v66- z7E^1RKg@p#nha~NmiHAq7ydESETl_L#dCun+D&uAXm$hpcz#Z|o{^D(&oN7U-V2|P z;b+X>g#U$~F81XSF| zRNN}Y!S7-2IrgQJZ&GQzk0a*sy&K*Yj^kpjMIHZX11KTTu-;Q^{;~pGqVkQEEXLY9PjelzYEN6ZCfpCG3R6j$>PhkGLsH2Fk8Q<@9O(d^|s6KRCAYGxh*@S}WsWXM5*8T2)l% z#haaxh>cl4NDr)CC~q#iw3G?OxjJ}2`z>zofu;$z8;Vep%x_UuWGf>=LE)WANpWc1 z6qiIrARhMJIz*aqWTdB|8RGKiAF91v)PK^L^Lbk1Hq8>^D%O2)c>5A#+f=dit?G^X zwsqBU-t%%dW4+$lmuV#5o97-}VaRGx;LCS}i#_*@c0X1Dd*)qGJ{^0APZC+H{?`6W zpa@f$--F7;rTWrarW>y+=mrgKt7w5$m9k!0J@T8hmb5+u6YoDrFpv_8U(SKYFv|5C zVBVK7dSCy36GlbN=yb~6EP2i_s*}JN_TYU6`zE*ZcXNfU^1+Jyv4)40;|LbA1v1dB zBh+?BJ&QLLM5uziqbhK)dmW)wxXh0KLd)0fwnhuoQo+{1&Re4jry5l$TT@v-y=Dd{ zFj8ZyG<9=@lS)IWA0oH2gm7~Fy4h$p{^Mg;k9g(O6Gx^k_bvr3%5b26ggna4xJ_E;<;7s}pGDD=3d(L-39FICtbML$A@kl+i3$ zwn|6&PrQ~_kw=78#E5vc`C0N_*eA|k`8UqU-@rkz>V%lv(BdhXif`Y}bLI^Gyeua!!+B*j0rL}kah2kH5bj{Nzj5A<*|%lVXef zKb!kq&7mw$qV5zr<9LX95a(SYllOn4V@n9}7jOK3=-5JScw|{$n(V7V9b0aF@+A*< zZ29=FQ6J1V8@8-{_0=u~qo`p^?He^$Uzx;9 z%(UU`aL|VRV^8t>b%1N(x&#_smU~dU2$^n5Bg^NnEH{oiGj+Q2x0m2MlyT&ytRV0; z?4R-~4g4>l`%?pkdqDlFGlAuLWqzf}Yk(d5V@| zb!YHAx|q=TWi<~LF0@|CQ&iP>&iM`ajnt|=+D6ewb6`FLjprOkv5UFN0ps5Y-HP5b zH)I(HZpc*nclC$Z>)@4Ct$0R4i!p&{oy9AaXPM6*j~wgAxc^;clH^=;DkbM?tDOp* zN=xfgxFp%tM8a0V9W z+}AaX-}f@QItPLqq-$T&{?WULBL7anKWcPytaWz6`(MoW4g9O}$n6~p&4 zy4tJ2@V+a*=d;1|z^TIbZ-rL;7Sm%tKbMi;6bp1;WvTZC&v%XZEFksRuAx1cVxA>n zcWW$Fem0?T&#T&lmKCI+%l;NiXb-GHvmWPvt9k}=l6Z(;z^Wp?VAkl$%A1=j?_vN{ zjcGESX~5BA=H#>ZS?>Kr;FEiQ4QK3|oY0CrD~>Fs3H*y@ZghFM`{)?xN_BsRUDU%I z;dv29Yk9BHo&xs!!KgGoyPV@I&+aHF_*T8cO=nl0#^-@sp2kty z^gy*8+wycC)M;egT$u~pJiej~N{RNVx0yM&0 zdg8en(G$6@^vs`o1n7>>`S%k=GX>{5jrV{fN=%Bx+X8(19S>zU!j5HV4Yibh-2Zgh zahg{J{sV#@Nw`@DWx7cnbd>h4%uIt0syMuq)-M5LUz`ub(Asf66(AHBmzpfCEwC^p zK4c2XZ_?hyH|aOIf=xoVlF;Cs8^m4UvvzJor!)BsIwi1Yl(6jOEC%|0z-!xq4gOP% zz|48VOkM$xdy!Y^8*>b$JccKr4;8Y^_)CuAezL&!g2P_&P;cAsLNqNytLLP>O1ttP zf9oZD-i0Nif%h3&ffLe{?zQna*L9Ez5} z>CACwE1ZB|g*x@J^}6-Wlw{d_J)UJ2{zlqy(Ne)%8dBt$k~j%p8FV@7?qlI^gc=~H zg13^kfd5WAen>J&2fZ=4-p%8$VCB-$_}jX-S@;`k3(6UP-7~m2j`xn_swXY{O|ZYK zsqt9!0e;Gy!@iI-Ut-~JqRsbztKjY9uW#a~b>|E&;@TOA(4(^g)|1y?$qwZPuUjIaSgclxJ!EkL#r@b-7+-ugLY{>R0|2PV-Ic zRnJ_n-Nr{w@`EaG?-u8)8uh&GWt?EEmSrt?$`&94&ghZFpAo>{+C%$j_-?Dy9#oG@9<~I-RMfl_a z{j#IG+>3}*o0U@9i}>V-dnjNEOqg47;;zT@@2Ur+-x;#DLpek!cVk}k|^@bt>wu89?EsSZ_niI}`yu%qMcKDLO9W*T`!(=PPMWdtBmgqRsGIyWAQ- z=wosK-T|KG62C?g6`f$PR$FK%+_bW)7N?hi{lU8&zrx>|%`3dciG8BJW&N7+C(M9F zvx$G}1%}SmJ)*gSw(LZeUqX8MYPR@k=aYB$>A;9A+{*7>7I$0GuN>Se&Qlh5TegzQ zdhIey$s%#T3L43AzwC+@y>1$}8i~YBGDD90W#@eC`m(rHd`jX@v2YiQc+pNlTQsR_ zj)ZY@=MIUx7zEVEjJ&iYTg9q_hVBz=Aa`%_D;0CB; z!28DL`yUzp1!&dBVgGX#1jD!I_p*k9v}(X(gu1?l&h_(S%Vt8c8b*LLY$bSG1$ zwz0}l%oMmr6o3a!Mg?90{EK{&*gDp`IMNvY3BVUJ{9h7Yufy;606v4^$<_uQI+>v4 zI>5JJctjWY{WN}G(`e&b=z0s|?r7rq#J>TKnQKOGXE&1D$itB1`OSX-cviwHr#>HU zstJeo>%$7=!`_YqXBzF*WY2BY?A-EbQh1o_dnm{l7ac}TSmHA6oeaIYdWj$!( zEBlhtMe>0LDO3PtyH7#(YvB8HoN4Gf^Zs&b*+{B{QZH(Yx(Qe=_`C+iTlJgBTQV&5oZS*bC9ER1^%EA%s(C- z8TcoI<{z{{6)_l0rD1s^C1oNrz1fj;lJUn5x{$U^qgPy=hW^u@;6A(PZix=NXz_}> zsbU4)f&F%1#F_ewgIU7UFKH6xRM(3;Obn2+*FH9c~9tHJY0@1h(@j|t};5QCOE zG1$}D_@nH(2NTAA#uZ*@rj9@9(ocBi)6k?#VDlC6=iwk>fs+o9%+tzM`ldR z7`^X~kug*8mwohJr-V_%_eSt@x#O?>vB)kkP8cT&MBihFjn@i9O;Ph%(SlUARa^>AhV=VM&Z&U;cd_YXmx4L#Y z;2KOdim^K(3ZE_=~D0u2Z9fi0rTRxg@b zrd}oc$xvLYg9(ErB;5ciZdTwYfXEVp{35oM+r0-2pS%B{RDXFb$`X#7;jB@%R31VbEdSP7h>$K zJ@o(&uMBALigQ4~LvD>7J|(lx8}=~#F_!@ke+_umDmQ;c7FVqS50 z0{D4%)A(IyaKdP(_z~e3#jdY`UwUO;(a-pQSL0`RIvN(d%%1dQ0Y1wd&r1Q1o5c}e z&zqep*q-O{!_wyQ-w_zUMSb`S=r%r3ZU*SH&qMU5nGk&*E{FapXG*Hb@#93`eqN%_ zBNE=RFo55pPqgI$gk6FTv4p>|iGOtfzv|Fpi*3^xInL|b(}d3otgjXOvts84^jzmf z&Ye9q5E?9UbK&X7H^{q7S+D8P+3Ecj=X31AS^-?iZ z;jm>Xl4N#p1D*{g{-$<#xUAIRMvXgrjwm9E_?@w?&}|4QQ{v!fns z=Dr@QzG5V&fyuPD|HhEN+YI0KZ>~_cPcx*u|Xr zX$5cZZv$(vBEM&fFnm{YJWug^>DfAqIr2%+?}GQ>F?DjRlYoPT|L-jM|G2h0v8hwU9n@O}VyFsTpA+SW@1?_c3vmt-8b_YwjA>-=7xvJXKUK4qZI zMbVw2EAXsTv3z&cK#>nP^3a6mSggGj@DW%PQT8#;jbpouaP|T;?BnKEZMtP2t9++w zpvMQiyMdYa%9~7&bnI@*FlHS5z7fA~jy>@w!#6ee#07re()h^r2E+F@-?!)Yy?~!` z)VT)$<9VImcL)B>4ByZE-9Gf*KxU)sPS+`%{VW`Fj~ETXPFw+U*V)7c4*}JX0?$_e zyT(`G%5yQ-?_BX3s^^;Vd*dzY4C5~X{FAO&EnQqN-+yMD#ru~*kwLB)ygzTgzrc7( zQQLJNzyB&s;%~t30pz@w-=8+&bLqW?yi4a;S2XZkG2#0gXVf@RN#v$+;u(Gy#~FT> zIra36sz!VcZiUYwyB(j@6` z9#K#hvotAhUwIl?C$3pEcw}p|w>9h6*b1g!Q`xtGzG4drMd_v^zV3)G0cn7{kLcoJz6m=k)zMSZnvmfONq&h`;0!N{1o2pY(@=CS1yZ=`gI3`IL z`ZaNmYDY1uNSt8G4Nlj`vINW3xA1$uU5hxBo-5x|#|psTZ`@|D1cg3Y$GCv^kRz!4 z1HAZ{;oAyy9x3O2u8tlf>OFI22|tV~1@WG##CX8zAyM0E>-ar&8@J~IuNSZ@0Z*N} z?YY3~wGa6HZev7HHpY5HMP)X|prCAw^&*P1F{sl+6L2Oa$=8c`#vFYdblD{S(=D=lZ&44KhPLX;xUjk^xWV2|8nhh(Om4+ z+v*?dmmDdM_Kt~;)sB6R4;@#WVa@^01(s-J(jery z&{mdu(iMPVC{hI&stDjQDo(Q{&%Fh)$@Ruqxrl#O8^` zNuf#iCcTy%p4>J0spL;nQd0(|tV}tRa;4&(6~|XBsZ^oT#7h6Fe0SykRXkN1RXJO= zd)3FPPOo-nwT{&mR6CiPn)+Po|6=aHiLQ zZz4^)AX0)L0wTpq6+}bK5?bfyX)}B~9s!qo`^XlAx zrq?qE>dvm~tk<#LZ}n@|-&o&hP_4n(274R0p6&MR_UD>D7t_$Q;napV8okwAZ`eGr`R?Y97H_vW`9h-?wzqt`W$%`uFLrxz$E(}+&nJJ;+y zp!25AH@ZC2Wqy|wzovfM{h#;$G(Zbj5U@SqVb`f$GrK+0ZC1D7?jGIycVEIcd%fGpCK$QVQ2W%TyVc@*C+PwA0pap}igNFBzO1kVYTC)AtJd_wyPD<-%m&YtL)G2pgzTjsN@&$8c^*Iqtl`MpnG`y}v_$Q314v{~`t ziff-*KlS@`@~1aezO?d-m5HmGubQyx=&GdE^;QpDJ!AE+Yih3$#w149a?v1eV6s0uD`Ou zYeT;cpKnOm*kWUcjh}APHZ9wf`c3_B_H2G?^Nh_`w~X2{dCTc|K$C%_Al7~@%~l& zzuv!j{}20j??1Bt)Pd#)mK`{7u-d_Q4_-VJaA@OU{qVTM7mfrRdF@E-Z*_iK{aez} z0Y}dstISgC*s5dqjt@J&HMm@GgWwLqZv;;Yem{6k@TTA$!6$;R1!o7}KT+;PgA*N2 z^gJ>0#QP^!pZMX#@e^Sul26<_>2t}!pB#R2*2$G8*Pq;b^7zTHlgTIVo$@+W z{Z!*q?N7aaYQ(A8r&gWXe(Kn%E2ol9{eHUa>AI(%Ki%zgztiJS&po~J^p-Pa&jg)0 zb>_;M%n&7{LP*_^Rw3O&-U@j;WM0V1kS!s5LqbDtoPF(V>{-`2^<33+P0w{Y*YDhz zbF(>b$-$L_2>7TKXyL!eA4;c^97+L zLu-XL4eb!xEA;Kq*`bR=SBL%(dN?#9G$r&-m^G|QSi`XPVZFmfhP@j$Cu~jF*0952 z5n=YQhZjm*cGdR=wEhVuy=; zFOInw7TzYjclg-w`QcxLZwWsbeknXH{K2IPm!7@U@lxMQ<1PhW+Ii{9rNm1)m$l1P zFE_p1_Hy6LLobiNyzuhc%fDPceEIU_w95}7JR@pFycp3fVpzm`5g$gZiwKH17ZDwi z5h)_eM=pq56ZvE0k;sdY36XhGdQ|nORZ-tZ9gVsal@#TO)}pILKOfyC`nBlM(H}&A z9K9)eNA&UNOVMf3IafTdRJ+pXN{1_bt_->I?v=nRUtQUH<<~30SFT=3zH;ws$*VQ4 zzHqhc)ty&QUcGiTz5 z-ipnP&5P?A*DLPLxKVL);y#R99`{w;H*x#pF2tq8{c*$lM*SNtZgjZu#*IlgX5N^0 zW7&-jH@>@ZCh`99ug1R~KPmpB_%-pH;&;UFk3Sh79)C4HAwDhs zK|+Ou>Isb#IwbT=cr#&G!q|lQ39Az}CTvUCo$y=2xrFG1n+f?hOW&+=^O>8?ZnnGW zck`8-18$DGIr-+Cn=5W^y1C=#zMB_sCf>}vc{kCTSShh);&X{FCU#4FD{)%l^2Du) zM-rnFGm_M#YDq1VdMAxadM{~N(#E8HNf(n6lX8>GBsWNYA-Q96-{jHBA0&U4yfgV! za$IswN{JNTlr|~7QzoP=Nm-k+Gvz?asgz48aVa^sv|H70wYb&u)~H(_-1_|1u3M*X z-MsZjs&{Jr)b^>br%p^=l)653PwKJM(A2A`$*Fm1mb4maEz)|VjYylFwkmCV+Of3I zw5w^!X<2E8t(>iat-b9H+eq8HwmG&Xw$E(e*ml|u*pA!l*;m>xrI$^gl^%b)!R^_% zuV%E&csb*>jDZ;=GR9?0&3Hd!VaBqIH5uzNc4ZvPxRQ~XsbyBrY?0YBb5!OBnV)Cw z$~>KUBQwuY%2C--$I;aBlB27mpJSL~f@79rrDKcZC&wYj8ApU8-jVLOlO?jMWHrqS z$m*LlG;4g;ds!c5t<3s1>rmFEthg*&R&G{7cFF8Y*|oEqWVg)@$nKjxG<$sZd)f1| zmt?QZ{wn*M?CsgVWFN>ro_#j^Qg%%C&1_qCZuUc`r?Z-~w(~jXi_Z4W*PRob)14nU z7dbz1ZguW-?sXn@9kGTJpYM72)&9!G6Y4fAT3FRFS<4N{zOm1`oyt4iNn_kA8- zEm6%=>gy#?EX|*`LSU|T|CC&kk+>w*sjuMu_k(y{pDdnZV@G2M&#wfpt1&j>Z>6M9_ zB)-(^in-cW(cj}WF_kCyYU*}z$kJRavb-z~G5R>92eLm~tg-A6e9Bxz>tjV%OKIX; zi9=Qg$9@6h#XLRB4By9wwqHLc2I%L+3zp7etL_xFJQ|6Y^)%5+4--|eIlrZ3itn`g zq8Xvk`iob!8A4IJi?MolF`7JX&})gI`ZO_7uPxqEw~G$?6t+i;jl2Q>OC{P^t7V$o zJ3T6kuk0uDQM{-9 zBnEhN7v=P3rkCD4{kS-&2Z^t(XQ=xhsFxL@8)*&X+4If%7hWi|Q{kt-FJ{0e#|S%E4v7JlmEr^VXtuskOok`+Xc?jr`5mMW zgq}{(o$YV*FU4~8ZE*m;dS1(-4o-^}W_(w!nZ~)2PrHf@mbqdoeE7a~tyrdiEk4qF z7?IWmN$*pSZX!}o6@$sw2+FfV8%BA*5-(Wm zz{6!|Ukk)G+Rk^Bt&t^G>@fFHfU;Q^ z5)KeU^wEZm{R!3>@r*Thu}SZ0-14X=qAYo0yB;p;>a9gp{Gw8= zRuO9bNNlqFAinYZUPO9~hc{}98Cs$kD(gdE0`Fvr8d{FKexT_<4{y>OMcE3|#_s%~ z-juv1^VKo&;5)E@I-UW35aPZMXkghZj_RM2FY>`Sc^|T8gFceF45aS95r?%u#Wx;( zMP+MCQN=Qlx}O70(#5ZOU$NEtp6FxgDq2{6L|!Zt+dRF*e(K~K&jwE*$(s#$TW#hMOE!%QO=@qp90Ygp6hAtAYQR1QV)H|AK%i@ z{ist_Oz}7>I$NiR79JPGG3!R;OO9BoR}nKQgRiB!=mKx7hUN|6g+}TDF-*tm;K|H# zQK%lqc7R<0nJuVCVZIY_tGkH zi&5%0;ivWy&%%RU^be6UUy08&-o9t?#V7AAG05YR7^IFs=8X^|w68>aWX((3P-yy@ z7;dc(zkMPOdkz)*trgg=M!iiI{VelD9W9+URvp@P65}nkp?M1O=&%?h>qU2o3CwqV zX{{wjS>r_@@@O{lZi2psb32P)sPhi`YVo~3RCM&5LBHf-EF*7Y%)X?u@Yn0nhD(Z8 z$cZ25%lN{lc%L@if%<03#gLSSLBkAGUL=5n3DF&;zkfXPFmg1IpQy0iV zLHiSwT~H<=)_`_E#=iwNgK@~}o_Zbel-@=>N12=JU1*;lQ)j-SA@$Y+UR|s2Ab%aj zdctvfl2~ua7wfgH><=KkAYM}9D0e*jd&O)`hVnYAc$RaX()O?~)0Ec|YM+a)`WdcE z5-(YX5$6JS5r{0Qt|ZdMCC$ksDQMyDi0Q@UM*9qVHzEqVTbl zVgD7;#adacw+=VY)h(-uYXN46Vb*Sh?~2}f4dG>dlWRtE?ayM1?bNKXT{grX0aR@au6OItj#pm=q1D? z?R@|ZwGYK|Z62~-_P_8cm_Dc?phMTs;%MNeo4-UT(lbTARL0dv79FixMMcq_@q3gs%G zKUK~f_ho37;~s7CzGw}-H|UMTaOD0leHQo-etVC&d2FMTkj_NPGewVsU$zR}T33`c z`9{(@9$tsX==+7#&n$zG8-O%~)QQy3L|IeMlDgJEKK-Ad|LMKEx#X_@O-0xpQ^iA5yM6=8?KeFuqA%sWP$a?tK5Pg-lvJrZnr5qawlL+?{<`ABR zp7oUr#sk7&>rxS|eS^-0ZY%X9sozQ+6aEiEKiVR7UBa<4MAq&!!ym*>Sr3vve+~69 zMx?0|L$AMu=*Ln=m3lJa(86$$sWYRu{@2jWLzN48$sHaMm1Vuly7*s14|H;=7fT+I zbyuXDOZ`~t=*2^+r_v9+Zk9#rq=Cj26H>;~_Wu>g2PsF2Ly0qPBX748#(~qat(dY^ zww>aTGQ2pXj4ysKDSL_oc~ty9McY!QKLKPQ_uUGPf^hOu3T%-5fG+!kt&FGW8;W4& zPv-Hj`IP-i5oCW+9Fi_YC|-tQ+h$)S%OoNDup-EI^QqC^vcu>JuMV=zXMUoI(E^>Q zwEDL3o!PD!qnYi4dMX;1NuK@dcuew$S+9(}NXu?+OI;lOTk1@v-i>Z;jwxi@mSY#j zOVoWc?sY-x-DaC7H1%%TCeisNkJ4_U%{1kBkiJ3QU+UJ6@+Z&ZeA>oQGhehvQ&!3G zK^EIm&dL1AaiNqGvaL&2Aj5ikEWEaGi* z{LHvo+sL?u_AT`+nJ07nSR|(k^IDV-bL_%+qfmz~;n_{RRiYK++*9Na{l7?#nfG8^ zBk3W>8ghIfxBn{tirP?-Tr>4k#$$3!R&?E?w4s@#g`}Ms%JH?7C53wZqmXfm97{;L zOFH~5WPGE`F$m)ts~mgCF?CUS|2eGjUylDxuK(?P$s_+kZc~0gj(Zi+;jbaIJ464o zj_@#b{psK2XAv#jVa0zvUNrvwZ{c6%X5o0bXx#j7VG(crEnE$a;cM!nfgE2-`pY&W z`PW)BhM`~5ZN@_r<3&S`2}?60PgjlnB7TzXLbgM*tpuIZl>k#>y|v~+Y0+I^S;=Jq|A|aA{iF7;i5js5>&9r zl>5&Z%Pm!m&1xlMm;S5qKp$qbV{BI6JcbS`<-I9GP5F*Kk1UsZ{v)~Fk!>lH3&#p( z-@~@qPe>gf`7Or*GQOxUkox(f^ra3f?*YtoxsTajNgZFNE&C#wKI0Hm@0Q199QQEw zae1xO_Zc%VcHLt|XEd+pzOpYUf|-6*y_Gdfqx0U zt1ZV+kAIBPHGQ(uNLb*i^_CWlJ!`o`EmFLVokvKxe@RTG_ZB|#SvwyY)3sNxuA-w5 z#zU*%VTQLo)~g&)Hp^o?Hz&VXxCQIZjus;ZjB!i!xrO}D5T?<0HkQ~C6y zg;$;PvskQFKKWrWcg#Ic)+%rQW3o4~uHf(>TaI zb=||mL-!<29Dn3bo~d#NPt~efiRK!9GDn)sF+b8L8}gW$BXY{0)Xd9?^Wat<){-Up zSe6F~v$k42Jw3>?%&bm6__5O6C(1Pup4{HkL;eAmds=jE0JzeF>)iKd!%Rb$aaQPM zwUqEE!9`X{AekjL3m5Y;QsIu~CMU{^WTrJs2}=nL!m6sG2u-tkKnth=Q7oEnRZCC{ z)Po0C$fRYV^b)c-@=j#7q^uI?q{)y|JVXgDaaVzPL`D~FKyOHGQc0IYV$aNkrzD4& zUs)*1%&walgvCrU*5@mB^S#JdGV{v}r zc^aHnoOYa0bc*{7s7`jtHO)X0?eWj#O>Xcw=FyfsLS{tX*y`StEaIjG$+<6=g=LEx zdz`Z5K(kC-XI2~}BTg1qo(9Eax?EY5n3N%;W7Yr3>_`VM`Menh{dU(=KIe2Zf7wp6I8)a+36qnbH2^J@N4>s6l; zKBauh`8?%Q)2FsieV=ANEqvPgwDa-vnc=g*XPeJXpPzj8_#E*$=4fXN`2Vp=o6(LR9NbA#Y#PzQY(~Nq0|p3 zHMMWtHBy!9#wq2Zagr`G7#slyz&@}SFt?)oWK=Rd3`@bif=ne_602ZLfuB<6;q`~t z9)>>L_wdJuKRn#}aMMF^yJAxBu`A9kUTDV#H}6418;qs+&kGfnI{{Q zuv{f&CE1eFlD~kZl!oiZi zTYpneE9XppmzjI~OJ$Q1$@kwW&5J>{gJM5rhQcWKpMQ+i_;Ch)T*{B`bCeS>6w|EI zwyyRCBf(0*lbGspa0#Ti&wX;OMm@Rj-cwI;KKQ@<)CFpwx={U4U8F7+3)K(QRqASW zjry7Tx%!2`ZmE8$ex-h`u2t8G#o{A%y}Ci&sBTifQ8(kM{IR-I-KGAd{;d9@?iNeM zGBrruqwZCIRrg^jox{7?R;Y8ur|K;=RZUZEs$ESNE7jX-hMK85)GReyb*e6&U0ki^ zsJUvMdPmJy@2bD^X06ZEd+HzRef3ZEf%;H^}I_MJGReXng7A=)-1+&OVxgo-fUlYUYAL4=D- zTB`Q5h|+$+>b4uHwMW}4u86B5M%$;QY5T=B?SQzh9n=nqSnV+HS-BzNY2m+$1fH}! zsvXmgYr)zH?WDLV617v>X^|l^d6#mQb_QN^3YQilazw7k6L++;B40bFofmhtQ1Lr5 z!lqqNRP7?7GaNa2S&LA3?W$r$cG$HjzJKGXl+YJy>Dq0jvQkBVZllrLoe4FKai`Khzf?wVLB3`+}COIhB^YC$W{%T3@Vxq`9;leU?63%jL5`lwL}2h4-f-?fWaQBctaiZzyjn19Sx`hRP~c0Ntt# zRIs}0bM@+a4P}rrSQ(-WMXD`RhUri1%azH>6lJ0Ep}s=@MEP2OPhr+JNHpphRHFBQ zJ&Lk&;338MD9G<=z79KVXyYKIk&lo6IKLoec;g_oQIPVSZ{r}XkxxL7RyUyUfI0(x zmisL4K6<%NfX_Q4MhEG2&5#3QmJe*|6D0Z#7{|K*0N(MC4`ds56Pj?Fa1_X7i9wY`XUoO+@H=vGhP~h_A)t6I(qWu$3 z?jKUbUn4pekAMRHhm^qHBt=lix4Mj|<6Fm<91iqr98{uF&wc~^$)Yb=EZHdNIse8% zr5Xh_Bq-fzPkm*n&+>i)4tEr~7Q@KH{wbdm0MA>W47_r27$Hx?vC=6nmA@YC~mj8HM%1GHshZTeH0IY{;w7CJpp& z{xq8h)%jz4w>G!5_lEa;(cID(jef1Ixn=3v)_uuA>zY}E+t!g+cs#n+vrp$2>Q@8c?gN(WBV{cG-W%}OU<&_zG1LbN;d1dC_ z4%LVoCRYpOs%bS2^pdN|aEmrbrIcjcB-u4>if zpYsKRAV%rvc0u~pZBmKR17H)fpj^b3|b_r3MZ%?`J zRZ3J;7|EOWEsYmI?ZWLcVzAP!aJ#JNuWT#a=3R=)y~6Esq9miNE@HBn!q>y6;fFpJ z>dZg`YmRUJ^DI6ByCKV1-YqbOz3xzKlxV_MCt@ZN_I-5KbaQWvn9iBgDCZcSTx!B` zo-zV6fRA|P(b;`DK32>mjS-yvhI!3&j!xzd^8QV@dkfxe@RGSFbJOaHv>HE=R`Gj` zBL_aD>cc&!5soJ9N#m@9TSjM8DvwX#REl&_B zVjGJhenjWj&?b0$11;${*1;@_ILV@j(<}-)g47=`uqaYPU>(8J5{igsQP2TyvQB1E zL>i02*x@$oOz|h{hb#&s2}LLh!y?x7qA|k$$p$|dTrJ( zDlf8rNomKrlj6s^tI~~iPvuqCeUv_|`}2-6MfSX`M=B#(k5)#reuu9UE4&F|BI|dR zcUezSrm#MOmai$lDJO-hL@736GL#I~c}gDZd?lat1Eqjx-Z+6ZRxZ{ast4;5YAM#` zc)CkbE2&jjS5qm4+JJhXKTyfJ+E{JOx|!O7^)Pi5>p5tS}roIVdg|-?#!D}k74d{%*&IzKEpep74N(B&O*6UF{)Eor?98QQzQLUzrCFz z0wVn8KlP4(4gVT`NBylb42baWR@GYdivP-h!2xSKrF2;uaG+Wbzv2GlJFl*`+P{7E zv;OV<+xxBXAMYPn<9xuJr=RQGk+q-y_<)F~mv;{HQ%O^;S-Mt9pVB_rzN-V~_IB(vA***_O`Y3d$A;bFa@nrJFjm&t#ikYJz8&U`=0;!_ImsF9UF9P;6J{@j1C2I zt78MowSt__>DYrDhjm`xDW!8qzoT;TuOYYHWpu}P3hOvS(xX#Y*V3ITbsF8(+UZ0< zcEFvk)_^;N=F(|zz#T~^_u{{j^ouQ>pr3nj=h&Tk(b-*=`VB9>bYAUVB>jq(fH?tk zNO5=(Iz_k_Su>qi{}rU){_&m6I0x9U z_>Rk;VPjR%-2B_>-}6#L)#8^FFWuvKzODBE{Bu*@ec9tw;fvz*%l%)HmG7YY`;T|~ z{MVoRO7|U$-}f&UdOhAZzBH~q-utJmCwZ-St^d>6e~&Y4aP4il+T>%yZe0KSIsX=8 zoS?41D>`ZfL(S`~V~wrGLB7Q2Ft)(Q<&9m&StEsdFXKLwJJH%CEsVy%DI}j?EIhX5 zpQ&4PkH3dT2H%J=e}>~9j&7noe{O6tzBTq3tBk|OAw3m~IMLfUYE&U+F7W<)UHwySKfXUSIPv$q$QVgo_gx>~`=8JJUt;+J1}%+smZUB8 zE|Oe@l>T4RaGwJg*v*toIR_Uc|Cej2K~qLCdN86ufo{@XD(Js(r`-_HB* z@rClqz>IG33A>>Bw@=*5w}Dt)*KtwoOE%UIh7X7p|n!k$TzMlT@-)b^4pVn zlL5?ve64I!wkqFa>G*)zt73C*%wsK4KUSBj%hcuSC+Z6IQ*~wWIk?C3Z|VW{ka|Qt zsvcKQsHfC3>RI)?8m3-UFR2k~lzK&tVTL(QjaP4~NoopnZfVT7-DajOi&?fDX4mqW zQ@hW6S^=}rJRQNKAD+ydm1gG5n_0DrS|zQDR!ys+)zo~n+S)T(U9GBRF=wq;x#!j7teTut)4tZ$%Goq+gE^1(v-V5TyqNYYHk|$10XZAS?EaA_XTX^K ziq@`b*R)t{IW8@iXJg9f6_{oEP+x+DM*5m=V+_P72wS#%mq{tc6!M!Q6^N@#eOt`> zE=7I6h^Ek*iBoQfPn87TdbUbQQj*1LB~!@~pDBi7h%eQ$s<-$GOI$OaS8ahk zZoTTS`il)}SG60S2kR%&{Fx-u;L#Kz=Y;J-^671n4zFg4 z+wg0a$be^^A``xSC>*Mx8sZO3qsRgI%$4{Po-HdLz_;GwA-r2&6u`4S!hmn<@>p4Y zt-hkeyU!{Xt*O?U8PYCVSH)XV)3zyXutjcH+G;zsoxBhW-d5VlIbWqcynR^d2uB}Nx?-a|qx8_u zYUh;RJi~HP>C00r(aIZe_f=&89DYq12$#nzZ)u5Isxp+BT$?gl&dw@h;Cz=dmYLQ+ zm3OoP%}}N>hpHR2@1eh`{G<=lhbc#R zR%E8~8}qfZl#_C{RXHW?p~@M3q5h!~!hG&x8G442qq8Wvyd^3h zOL8gE8qFvQ#2D9=Ho)Jwu9XGepgcHfT-RrTV9u#RyRJx!D^Gi?q*P@tY@Mh?jhEMI z6Fvhz0qemg5Ka75a1F$=pCKw@E3SmUO(mWouY{jKC2Yi%cwW7teg>Q+T}4zde30bc zMkU5}l|)_o?+=zj0G^MT_YMx`UpeAK*)p;tllTfs9t)LZ2D}z94QL*g)J~ z(%1)nWB(Y(&J%`$3ml6g-5Bt@k*R2)jPZt2g|M2DuGA!~4cah=#&_Nof5PE}Bftc( z#Hge!2P?rUkZGJyvWydI3$Va=Lk$EA!G~ZGSPVV_>y4Z02Cxxq0^fkmU<>%x$W*t2 z@4zV9y5^beBGA;QCiM+ko-JjOM_;3PN= zLcloy9n=dT99#yGAR1f+*FahUPk<2q2_71mwD*d%_KLLiirP~~hE^F=1=YdRpcbfO zq-)KM6B=~TUH~n@i=Y)~4PFJWfnH#V@rJe%d_&q>xt20(+qr%R+dJ9bMYy-%o<_NN zR->YJ+PJBmA^t3&9<@-;CI4C!VH`-|yjz@OBQBGD2VoA#WBVS*{@~bs!aoTOzRa!w z6_f^Lfj6jVr0bPH74S6sHH|m)TEHensYX9(*O)+4M>*nsd^!siGZ5;h`i zOxT35DPc3h=Lwq=wjg|guqC1VQf3?Q5_lPO0G&V=;17C&S3xh(2f*_>Jg>hA27*Cg z2p9%NfKf&UzGE5sSTGJu0F%IEFqJY*2Q$Ge+T1$w06*#*xo!*C#yL9(;W<5+Fl7UIV?rC!D(;Yyu}qBU-*;8(agi z#AO(%TCS0xPXW`w3^2>MsxKi7COl=N>Sw@N-l};)l+~j^24lUaDNPWi$)Y3?lq86f z1W}SKN)kj#g2;UYxep@uLF7J&oJWxJ2yzrbjv~lW1UU*K7ZKzli(EvIiwJTNK`tW5 zMG(1&AQwU8B8yyPVclqgWvPvkt$qo<0$+o*AkE0uDuPO&3aAEZfSO>hk*$veqrpk8 ze-(Yq8yW76-c^ygBzTqWwG!^lkrwQ21~&*o@@9P8M7692eyIl0WuZYl8$UiN4BIR zThfs&>ByFJWJ@}-B^}w4j%-OswxlCl(vdCcwE1+}ayso;w&8T`hVaJ!)muvhHufDL z2N>|50#r~IJPlfbHsB@jGUxz0fiA!w3V18O*+h68FipoRl#IG~0D zYB->V18O*+h68FipoRl#IG~0DYB->V18O*+h68FipoRl#IG}(73OJyE0}42xfCCCR zpa5_02dzLGKpTVt4k+M&0uCtPfC7Av5ex!DzzDDr>;StcR~7nA2mPFbzQsY`;-GJF z(6>0~TO8^i;7{<7J5ws;umgGPKt?){j}GLK19{{?9yyRl4&;#odE`JIIgmkoJ&ilS zm&Rgx@Wu3!i|Hj7(@QRR?WkxPNWF9?a9=%f@ zJxm@wM;^UK9=%4McEiZkcz?c@2r}8v!;7jSs06BjsbD&o2~L9$?$trmXWbCA04>2H z@Htoueg$V}mC@wJ6I2G(!PB4?@CBW~cyi?r0zfy=9V`K>!5Z)xAWiiPa1#l8R1} ziawHxE|Q8Kl1hISO@9?le-%w%6^&ky%6Qz{s7V{HNlC*gVK^lW7w2eCW$<`j!c%n1 z!Ah{os7VRKDPcGz45x(QlrWqUhEu|DN*7M)!YN%ir3$A+;glkr+=r9vaB>|^j>E}u zI5`d{$Km8SoE(Rf<8X2uPL9LLaX2{+C&%IBFr3lC8G3M4gwdnkMKd{qWRFL($0OO} zk?irJ54;LbBky7ae2t`!XPl6L)JR5ZBqR0Xk^1qB6B3a8@ko(mah#qtm>NFG`KK5s zoW=qcf?e_~*2nXl6AHqZb-2Ja7qMiAV;jGO<<-IYv@ekZ^1vOC4}Rx-+NPjwia$XC z?Op)Qh*vCxRg4&pSyApxnFj8sgf*NUgt zil^6#r`L+7*NUgtil^6#r`L*C!@*?`38KMOa1G>gAIhoT1;2woxW}L1p%KIQAc65g z0^@@O#s>+E4-yz3Bp`v4>FwgREy$y->~G`T?ZoY1dnenw2+wdFDXX1j{~RH7M`9<_ zGsbHd*^dNK?8kv*wr#}Q2{YMuu$=?)*fxxKdd_%V1;}B&Bq(jf(2K_F-h>s6WTbqu zUIoV(4|_>2>4jb>ry^W9WI~ z(Z>_e$N7337zZYRNnkRV%6+GUnP4vWUkf&J%@(fR#_=6&?*jXYKL}29UI;uom;R4; z4uGz3W;f6s^aQVif!K@Rg6qfy9PWa{U2wP)4tK%f@o=?+mg5CSXTZ@eINAkApMj&D zaI_tcb-|r3xH2BDbitJ_xX=aHx!^h%T<4;XjiZl^qmPZFkBy^`je`?iaH0!Nbis)( zIMD^iIpH`b9Os1NoN!z`92XDAx!^b#9Or`LTyUHVj&s3rE;!Bw$GPA*7aZq;<6Ll@ z3yyQaRZh6d30FDcDkog!gsYrzRXkkff}@;pQ#{<{gp-_bk_+x}!9DSCj|;Aehil@s z8%7*`ejI&%9NgoAbK>C~p122jY#Y2mO93h<3o23zl|U8nH2bYU8^AmM;Sv{I;(|+D zaES{palr{LIKc%cxZna8T;PNYoN$2?E^xvHPPo7c7dYX9c(@=QE{Lc0UDUjjns-t2 zE^0iU8jr_jZe_;16zG9G8py2iTg<2p0UOvpMtBOjC3{?SHEPp`+RUK_L#U}O)YKMg zY6~^Bg__zzt>jQEIn+uHwUR@vgitFXP&Wta=0M#XsG9?IbD(Yx)D3~MIZ!qP%H}}X z94HzBHAA3g2-MsHHMc;`El@H93Wh+j5GWP`#X=~34yDhb^f{D1htlUz`W#B1L+L{( zeF&uwq2xJ~ID`_1FrMiOFLndnK~L~H_yVj0zX8d^=Lti>?}imUa`X~c6h}Ouh`)g zJG^3tSM2bL9bU1+D|UFmPW{`de>>LoFj{FAb$i#?rL;wFpG5sC>|JFY#^~=j+IDHv zDqno`*Mqp8jCT5QwHyV$#Ti3D@sZ#$c{P3KS;C@GVHnpHjSQ<88Q8>2LDPR5A(m!@ zSeg-HX-0^p86lQdqD2`c23%+S&)lVQgB&L?nnt@CsrYPwhZ-W@C(=t_K8yJe(--BjnZ>vsDFZo(t;&Q zVZjQC#wxtwh|}tU2H-h> zo~YgBS{;OPE?y9fAcul%wAvSdA7G!Wa=9v(t8%$2m#cEQDwnHrnYnt4uN=!OhY&I{ zLi*k@Q>bR#DV_>a^$t??4$|`uDdv%49x2N4=N+Wy9W&)9!Z?r#@<7p@DjIi?o_CO* zcaV^GkdSwfkav)dcaV;E$Vndard9(909rE+X^S4(j-1HxNFQdc`-1*}5e&I?l3N=& zm3nQYh~oShz@5cSD3Od#VdHuS$Fe!sMVJHfz#Whe?lLETkMKVD6BHPc=&_OLu#rj$ zBMqC`75W>S;$z%GhmB)=f= z7(iX>!l{fT-BC!A(Ma-%oIlA3rB4f^PYa_@3!_g9qfZM{$uX9De04{YhLLW3)T)lM|gZ z5}h+DyV(jP>3(b;n3^+_si; z_k)8Vm}94S$JS}iJ3|;k-8X8d>8=P>16HYh|Cmg5tL#g3JYBGyjbW)4Vd>F^6 zwM1&^xbg~b+>*60$w-32PAKezqQ{}=aVUCRtHb_waFXp%!YB|2GC>|Ctq3ZCDqt#@ z4rYQ`#&L3boLoA|p%c4GYx3Eap06D(jNTI7%^{aIa%qEKbK%!)_%)pz+u+q4avcHh z+2J?YhI7cZja*-X$7FlXA@??TEFB(8ho@xw&Vi@w@KYB2WTQk8@Q$6**eH#S(%9e+ zJG^0sH|+3+ow0W=ykUnYvMHero{(}Ohq|{@$9C%1PTfj5kwe`|S&>7XW>cTp)MYmH zm`)wqsXse)XQ$LQ>Mfl*vr~E-^<<}x?0CG?MT4tP=^BC-pe1N+BtelRsF5fZvHv+( z3-)sEuY~&uqd1OrmAP+kD3!#M5*2w$!n?3#Cy6Yqz`5-I!5i#!tht_0qYPnX!m7ko zCq(WmwSX^ZLtH09e?r+pyAjG(+Jo>_!roll2MmBOM#3YbjhlRl$XglDH51u?7fMfN ze+pJ(EV60<=mw|-bpdgKU?KPrECN`YRjkeG5{|D1YrtoKvSRy5r-l;M0~|jDj)0@Y z9S0}CDG&je1yZko7;q2V2M_4`${I=3Y!Wq_M9n5rvx!<`;+J4)UQYN4`zzW0n(g&$ zZvxy4yHPrJqjYLKiJDH-Qi#i7JBv6M$R+L`arcF{UW(c*1H6E2wbhIy-lXfT`+(Zu z8Bh;kRneaVjX)F73^WHXfF4}?3V03l27N()@CFzFnBBw{m5wbc9b1&-s6;p_5spfP zqmoQXFo$pn`|C(=Gv^!v!Ni>cXW)yLqB?D;HSMM?vftfbxZs`~afN5CVhClQkihm$ zdKepRr7Ch*%JMdBOS$b&*c(Y8+@8gDH%Jrhyq?7Hw+@VK5=GnNr;m z<7qcug_6U@Sg9;+g?BI$w+~3^afLW3L1JkKacn1mo9MmCJl85EiJerkm_fY$r3@h)$#uM$nRZdO zP}10G6J===UbF!(+JF~rz>7BEMH}#<4S3N8yr7^BUiX66z2J4Pzepz8YuVskFDPn* zZ@r+Z4G;RVP?cT_9gl0>Qi`}(;!@bpAj}2mPw=c4JnN-v=u#EXyPg7-K~*CSzDF)(gJ%f^WUxTQB(53%>QDc4W^adnMT;Ny(O`&mkS`xbUG@ zp@fsYkPT1G*4WP4LG_ohF-Ea{h3y!^Skrcz2JfZ8d$&a{qYoXlfVyqaDY_8)6ZR%V zQ>S)|%Y5GfR*{2~sIR3MELU z^`ugQRI>zGgt_#iI%r|ejiykFR7#OTDN-p#CZ$NF6tV=VlzBut-GmDLX$jbk_S!l zph+I|knJ!NI^;nI+1BzXa~@^Rqs*C;7$kAyw`!2R~ z0R6qV1MxN=oUPekJji!*SLe8k%PAAq~~^_DLU!7ooI@`JtrET6AjOahUY}XbE3I9(bSx1YEF7*7h0MVEzOBG=0qEF z(mT88nO$gIPQ4_k0p^0W0DT6n$tmiH$1_X$Xi52KN%@rG4!M=HOZntbYC-wvp#L<> zl#gbVk2aK#Hk6Mhl#eEqk0z9lepq~#DPPOuJ$)5HB~S%S1&jjGit^Eh^3jI!dD=3I zew;B2@+eC@fNz+oM&@K8Z?ecm7VR~Qww8rQ^ds`;oyjOWlTmgiqwGvZ*_ql~fM+7K!GPx?7-eTN%Fbkzoykbr#Yj4n zQFJDwXcwbs`5cMMv10da_0YJ`TNh$Ydp3Jx}Vo@F&cI;8qU;< zEPgzP!)P~?(QYOq-AtZuwK7sI1+Xhp%R$)0g0P7N@mysYY&^VFgV~MO2_M^dg2X!Z ze*gr;Q!ZlAT$Sc1Atm8+Jj0>^3;?|-2%SlDUGe* zJFpFW5B_QPqsT^dkZTSDT7-Ixb9piY8(@%n8iasz06M7`KsdMzB0)5`3a)|TGadi` zXDu`IJIh%5x&}}mc(jME4%3w1pgUzUnc8hauoa43|1!2<+(xlz! z590p>22ZglKn3oZlptCLGb!i~kL^c6dM&ux7t{fDL4EKnXb2jErr>$d0<;9JNRKCO z^q0WPpabXxx&WTS(f{+TOAz+cAbkjM&$^Z0cMoVkr7|Dm7^advKH} z9)yA{p0muQN56}v@jLr}7{BvI0(qAzN=?}&L0j5rXTokA>w#9(hhxLp9|0zSNyOpx zu71lKpSObVz&7wbI0k~jNpKp3fOCL*s~12xxC|meG`I?`VNd)9oB`xh<2=m<93Tf6 zqO=YrbQP2Z)r{ZurwLnuHsB@jGUxz0fiA!w3|9bQra`_3g36P0j%Q)#B#X3jT}5jlS55oGry{ z?PBl|_!uk&%V=XaKs-p~98YTQ2>QZhw3{2$RBvjC58aZ-G3X4Fj91LG@<~f-5&5K* zPg?n;l}}nyQ^+T+eA3FtV&i^p-beK2sh566pm@_*Ee0Wds~CaGNn4ANv2G*-4 zMuO6eHjeE_=?Iwh;~sI!Fyojqj&moHH$)sUVhe^rajY=y8m)bQf6#1(c-%cYlZSlof_ifiihe zrn0n#a^$ZaE#fE2_7r7nN7>4f%kh-2JLM})PCF>C7UV$36UwjxCoSfATFfxYTaxlx z$aynpdKGVyl8kudGmmAB%jR>BJ(%TsgSmhKgz|aENlOkEA zNT5O3;?_g&L-566DC*{w^QL{l#_Z)?tkP|b&D202%JuJq9DUYjjpgHwOMJL0;v&;32Nb;2M`X zl4?tvaa(B}ZoW{;At|tO!fk_~!y$4Oh9_YLp5J$k!(6wQT8ZM?^W1L>Ih9Nb9{LlTLu?MQdC0t2{C={;Qe3-OuorqeLl^hoNpP zM`Jk}%TcN8OKQov+H=&roTtsA)}%y{*Lu+oN^@-{*Oo1;FSpdOLZ38fA-Ou0BMwrP zqqi)sTL>3phZAVT)I$c8NJM@lQC~?&D@K1pr>4)bmCIGRT*cddh~Z5>a+a4mUC-#m zif>pc76q-yn>guRs9}+N775!3@4pf}E!Unne>9+~Xg}TY3X}d|Pn(`#HBC=2`gxW* zVlZ!qddBntt8e;%@ii%Ic+WD9IVKtl`L?Gf=6jx+V$r*bSJ*X{=2-ON;D>lDdN1Jj zWy9Cd!P1_0Ic77X@HeZ6M- zef2T@zWSPeU;RwKFTB3+`@-uBzc1djh~L)$)9-5_eqYOZtJ+Gwi#W>k`x;|O`ooC z)2A!W^y#```gFycK3xf>PuES;rz;Vk#EK#bzr;!+8Q;VzA_f1%YT_0?iZw(keu_0k zn(6n&tcmuVuuESr-k*uD7k*;+df^WyeZ5rE*Gto+ua~Fk>s8A1^(t-pdX+bQy=t4j zUUf`guV+kOuezqMS3UVwV*J6RmsfMs%c~_`UKjBMlm1Djf`^z2$^dUpMwkJLvh zKkB3P(aH|f%WJ3U)4wax z^zTYC{kxJ)|E?6%zw4Ii--XOoW|-}>s@XoPn(foqY@fcg&$_hLdMq{Yz^qTN*?>i% ztv!qUZNgF#J)tRAHe;zm`{G?;w67QN?37-fWw8jh;heTCPvaH#628FgSfqCtnm(R z+)Lk34^u8E$b~(O4)(IBcn$wb-uAJmrlcrgN($YS6dtCe@G)gXc_hS5%ALyMgPgFj zmXe~HDJk#+WAQL0MJ*&nfvAPFFocIGFKU_cqLwKyROCe!w9Be^!9A^1<0+XE$c`qQ z*;HvNYAel2Ekt?c1?2@i$6De`TFR6urA(Pp3Te`Xqkj03Ry5^^f*cvlRYRCN zD6b5~m$ahkOIlS~qAU@ zX-2D+O|Mo3Ie3;_onzeUi3j$1wnG`edg6;6#`Xoqv7UHkUt~L+@vJBQ*_YV9%(&K5 zja6gGr<9TfdjEGiESZH~;N+KmGRGZyjA- zRsB|VeY@)Y+J6nd$M1pP>-WOjOxb2Zt2PT-wOP=rHp81goq|?v7_@3p(5gj2s}=>V zS`@TuQP8SIL94bwtMa|i3hf=ELgs^ag6}Myql4s70Z+79w(wm9T+!y*!XJ(nJ`gQ@ zG^07)rF+y-dZ3XT1&z%ANoeForjc=-DpR9&GEL5nI+-uFO{W{XSVs}QHI&( zSF*m}Rm|)(O?@r$>*P8tf-aL~xLz;UbH^2|>)e3(=fA?|N?FNV&zs~Xp7dt9nNYWY zO*Lj^=U>CGl2wtKmS+ieX7utz%;<%$mTF?Vi`ASv$lY>xRAh73iSu6Ytj^3_rxr1L z{eJGdM%LhGt*piW!;-E{_y~7gFYBWwvVmEG4Vlrtkvl%hs?M=I#)w4$t;J^U^0+)6 zCGrI07cH0{{3LhZB3qbM_!OfV1@g2!9kr%Kc^)|?&!aZhu0WYL41J9`ti|#=|5z2u z8;q8#tX~8_> zFNuwrCdmH+Cf8VY%1%=971*4YU9yXKc7xG*`5N1(G40kj#JP`AmI7KY*4SiiXNelK zzOyn*uYuC{(qb*fW(zZwp^de1lrW3GiP?u_Y_3q7f%0|M=8Vx5Y75Z5&e~F2!nXtM z^V%K@Qn8kR`guJ>4}tFh`scMHmSke>1PbV}2GAM)P|!fHhv{MPT|foB{y={Ke>mu% z*CX@@_^zOYUXRox;g13>^w<&T27ffDq1W!(9lnS5fImi$f$yn3;d^N>_}X5H1wUFx!;jH1@XY^)(o2Yntd{_t!z|}woy#a>q0VErbFt3n@3%r-z^vzDy@0>q z3at+T{(7-q9CcuA?n|Q9Vf{?i#k!c0l_k0aIsFO7RhczTsFk`hQoRX8)$7elzgut7 zTX?SD=x<2LDqTeym`9E!xI6R?Y**f?cTxw+wH#OGltZf-yHUMM?;=iSmP46Yj#bio z^;2F*x(1(XbuBZqAJ7Mgff?t-NneE2ZqN-p-A3kYAFPiuhFYkPF?ai5<&RzX z&AK^iWAo9mNd2Tf$=K%>-9l_nG2^n4^S%ozc;==fXKp%v zn48X*fa3}OLi?ZF@?2}$<<>dzcb@Ib#Akh^=Z-c0?b;5TL0Lo(ra8TpVtPTfw}cB@ z9n^($-i5_G=(n)qDk9bkqQ6uKZ3fM**_8MCy}4Yv_ZNW=cQ@CR%k0l%z4Dk)mOJ^) zp2eQRHc3xenm&Ks_!|(u748kPehv!fR`kehD+%=5#A0p6VY$~PP?NA!1Z#qr%r>Jb z=Yk)T<#VELip|YjmD00yO46Ljor7a7lE#`B9F$FqNz$jt-`jH3+)Z9W>gsN-NLfCm z`FJISNo!0lgyoTK`O->5r_%k=_SA4&<*+IB+^C(?t#u8zzp(!dxz*{vx3%EAp%b1O z^KH!zi~ zsSe|_wJKi&>p1_@E^1crB1Rie$?x^u*;1A6MQASDi}`A)y{9@WeQ(o5n!d{I8T6Iy zh3qlma);H?+K1KT`i|Ou^7XpD%;;F3FvxQ6n!We-2Ir320q8BGo$DU@ zIev}zjn zT%H_0h%E(+fhpmdm-ra@(|&ZAK;3DZ8>zwwZ=bSNcwMp@pzj9<4pd z)j#u>t8c0PO(khvNo$D3Vb2xO(iGMlsgAbxI2YGHdd}T%|K~Ifx%yfEx%PWwb=lR_ z%d{jc@AF4mH?4kJ9^~7YG^f)0a=$Oq zu$KF_u$KF_u$KF_u$KG#QOn6a=1-1xPo8}K92ZTQdG0x`V9Ko7=evf}=AAX!HAQfi zYZqvzK#!g|>w@!LpP92K&vYjSIv~(e#Pi(n+2%TWj?oG8&!0TUojrg4(cRs2Xb(3F zdW>5D?dcXmd$~o>-fk)MSai_n%FhONvCgEpk`5 zWp1Tg<;t#eH#}4g3y2Kw| zHf#v|$g)AWk~g{1P0QD6n8~aS6yC!Yp*QA2jZ*YhVK}Td7;ir9O>?}}My)Q|yJFOO zC=RV-)bvWOw`=tPL_bg~JzJaKsMQ4<={IUR$MMF8`sLnF&+=-BInGpN)@8P4wqZXei%w8|do-fLtmt@@+{mnVtA^v%dzw~-h_WIK7Id{Dzd%ZGyzBPN^m_2V# z&q>4Vxo4UeNssI`mK-dd$mi!YH2+ec$=~s7 zdeZcU#TtiF`mUJ2?YqHursLbbt%vZBr8@H+srK>6Xt(>o&vnmHi=TDBch9(|-BWIh zd(u6@h++@lozcZE?tS-1_nv##ZDTz0ZTFV@17nnLx;Iz{?=|ZFEAD0Yl6%p;fL)92 z?n6J{U*H$`3;jI*Q(uOai;o$({FD3CeTMBmRw~2d#SZLU>~w#1U%6dwxBD8q7kk|| zbby?GgSAO)vwEy+V*8>1%NIqyfj{0Kz*uJ^f1rMa6^tgnsc(iIj274dY=s5Hw!WP| z*v7{g9qS>-NKfe{z2#WxBR`bmTNz6?_ikFsmU1iUH1(`l_exsg`X*)mxjjL)aSA1R?2DVrgh+>P?}#@Ir2X2tX<|- z;&sc_x2!4I<~3z&eE=6T;uS9lS}*+)ckuuuXV;PRN&VqJ=Ld@|@riZ(lpsM`%MYzX z+&U}qaZ5)0&%)x&*?zL0WcFVUD2aN<&ErmSm$+-(BkmUuj7#IO@uYZqJS$!pUm0H; zuZ&m4cf|L_55jc%aa?E+mgGI2a?Un^Jw)cXkDL7i(=L{Ex;CXQK;i4r|m0dOy9U-knV)% zFaA7|WKuFEna=ejhy5Gq>_GXWD47I~(G^=dJ{b*nlq_{Fmapgt*5OTR!$Ph#wbgr@;=3JjU$4vau@m@D45>Huesqe44oUok#u z8x^_7-DbDRxA!If5Z}Rf^qo97W8%SDPK*{}{c3?f$@la9{mK3mKY$$n|5b%=`aig; zu-V-z0vjF(CVU#0a4CPp4(1=(q0un@bTnLe1bFZ$(BLur@pA@<@OaSRGeLoW5n=Nb zD`8hgS7FJ1S+tygXdjFo0{2}PJrdcPz#F4S`5$XjRL5#)^b9KmXRUmK_A-i0Tx;t zVWG8&G?Nz6O4`W5(q0Y$KRgta@NjIk9#vyw^>&arEUe0%QZ5xBajT^Y)a)_YWNYYx zlRXJO_7u2S*3KywPM?<-RI=RF68nJH?%HF?&Mb)ji$Nbx}ji>#Tr!Q7mQZTeq`fa1>qn!;B zIDwwo40>S~(%<5XK`sKxTS%X4sa#FCE5JKf5N8m#b_*Vf(dSCMeQA+cn-7=8)>-h6w)ty)MxXJb`hk`fzUaSIRTSv z2i7K_Z(Raz))ka216I}toNEBcRcsV|YP3!OZE`ve#0E~M>nuXe0C&0&bZCJt#OFd? zq)YW`T8Nd%mg$Z7xKVEfAOTl%C^G-2)HJc*?wClrfrVnrWS({m<}JJZpj2h>v0Y4}!!$Lyvi^mA<=}dw!Dv zf51*U-(TV2doV*jun!9K!9cO9M?L9=MzHVD!}vSp2>wComDL%8vKqohm`;xd`5E{H z4W*Te{4_s}_GXTs1IBd?Efzgk_S9{ZpNQtMuU2bWYuZ^_Pg+?*)@om^btSYRCF!_( zTUtOzm|@h#5wx@gwA&N;&tWPpZsD-@+TBW~) zR`{1gs4Dnsf3->Uw7_2lEmz(L?)W;iQhyC)wP|RD{~(02|B0*phe7gjkk~&q)$+DU z^q%1QMR5IdaJ|nY`YUL;zYbbSZIY;x_^vl!)LV;hnYn|w+MREJR)SmFo!3Gu{5MAF zirTY22w$!LZmwcES0P0~{;Exi@YVzBg91c{}j0&Lq{Z-bVDZQEUrhgRyX&???j^HUA24w|xB|HCAH zdx+;hP3}Jml23xc+P?v<)PI9k=~!rmPJ&jWLCwzu=xUt^t<*E170g93*YVJD z=BAkIPoR}L4O)S=HP`9Ta-9KPtuvvOdJeQgXG5#?TxhwT2VJf7`*Hmpw8E{U&Xz_) zoaN{c_ei)J?yRm1cMn2K-3I7bXSr!9Jt(q2X(M#sk+hWE`2Vf|OAE&{24z+>SQj1b z*Bre$gkJhE`b_pe?Rdsft*>-=w3xkXbQ$}R^qZFQSD#r}?MBb(8vg2A8QsE+?5*s* Og7zwfTI$9`zx;34o!sdF literal 0 HcmV?d00001 diff --git a/assets/fonts/Roboto-Regular-fc2b5060f7accec5cf74437196c1b027.ttf b/assets/fonts/Roboto-Regular-fc2b5060f7accec5cf74437196c1b027.ttf new file mode 100644 index 0000000000000000000000000000000000000000..ddf4bfacb396e97546364ccfeeb9c31dfaea4c25 GIT binary patch literal 168260 zcmbTf2YeJ&+c!LCW_C9{yQ%b)g#>8<(iEkL(v>1zZlrgRDjlU0dJmx&=^$)IKoSrV zsZxU|AR>z5Z9}l20?D3y|Le?7GJ`(v^M0@XnBCdk%v|T{^^C+MNeaV3m13K{+@$G& z#-8btTz;k`$-SGkZPWhzu!d=pT=54<>VBbF`;Lt#PMbAOk|!OIq{t<0+9%arH9dQ$ zB>NA=ReJUr)@#J+`|XBFa>!jtvQO_bc1&#bosRXATxJBm@6dn5fMMev_1q)Lkpm@( z9UahX^a#mM3djA%6E01XNL~&(`)Lu;v*9K>98aP zR2tT6{0K(_#UJNc_{!c!Z zHiyUi0&y-VDU@(;Ue%q|1a+I5&)Nmf$Q>PAJ_;}cl79l;-c zoIdo~XNRV&S8Ya8##8v)MS;?a$X>x!Mto9awqs zs!N0P_4{LC{>GByaS~6fl;iyg!TwH9PyrpCbj%KCrRxO)l{KBlJ3TQ49vlNCWazs>e-87}kwAG)TIKE@$ z&Lf9sj~e&(ELLYvyYnBc$i14gZ1#*yHts)fC%<@Q^VUxyzPJ^A@8ZJkliut1o>tvfy;HCik+H8mvxXkaO6vErLp^B065TOx}dv}4AsZ9Aq--#xEO%VwQBt>`2_ zzk}I#?%+lAN%KyfTQuv+9fRaEgVd}UyZ2-?o4I4hd`Ihky*svO-M{~9MOS9*+Bv`3 zj9okC+uQW()3IfnzI{6U(O4bT7+R-a@jdkq+exXClqe-jbN+=NDgZwf3=t@UlQP5{ z@fCoiwLCN6Gl&fN}^1L;6Nwe)o_s{CG^0hX6%JhxJ zJ0Fj3+~k{9BiODolctYdq zi(foFIrqR6<@)QZMzAjY-8Zwk@!#HHvHbgP1bJ&|nVO;=k^-S~aWS%LAh^Ah;2uS2 zzQ{P2+XcPnN|raUOg=c54`!LUO7MQ3!Y=G*yXaaK`E8aWeE}<9hOU*ZmKqhhu0)7V z6iOz-K6}s`>cKwzcJmqYcP#C94u4%mj*)}qL*V-`36>+9mBK)(H#JTU=4IFqa?C2a z*AiH^vCq2e9J+_h-wccdcC~o$MF5G(KU;bEBSre$;clYBy?ByHUsU10k~&?p{s=AB3TS@ zX1hvZhw92MQ+kS}IAwRdtfV@_lIwDw$v)g^5?mHz8qFjy)t*_8C<(NY;rQz9WAxduWd2H z#>m4!lKEKW@>YRVps=s0im zywy2O`TYDnxH}W&FJ{TL-`Uu4)Ux#pK7RCB_H}-pcLjWJ6yH-G1HJ@lk`7-m)*fuE zy(~`3l2Vj{g^rVww969fu5FaqNG*xp^^n*oPq3BegPjmA82{{qQsA}l1aja!Wu2Z1 z1vr{@C8(N=l{m>NxOGzk%}CZ$jjimnoX~`cZZ>=VjLhQki*vjuF8wrV@c0?U67SE8 zb2Hzby=dL?`AS`R_9!OJ9r@mOH$Up3)kyHXbMn8p4~?F;V8%NcGI3!lsL>WY8vwn~ zQeUsdLl8=W*30}=f|ey^%cX1Zz+GkJ|7d>pKzywQi(e7=k!~U2ESbf*9Lnr-=W@M+ zEXqVzkDgN!=#MtEFgoB|si78wEYNk~kNB5y=k7l-3g zOZg}7`!$ASocZaGoB0o2`&~=MPFucl=7c77dPYcf+R!*o6{ojl270nbCX_G zt9ZA4BzG;kr`)hLe{$GXCJQ=v1aK1~q&^P5sE@{xpmC&u9l>_QX^H-kM7~5wRwC)3b|ndXH0mdb<=>ld!u`gnpIrz ziFewlUL)@1=l!y3?UPl@XG~wge;PJt*6msI)RbYnYu7nC?!&L|936YCPVL=858t>^ zw0Yv1tVfF$tL5g589sOJ?FHb1zQx7LBeBxTQa2roA}li28IDDV(>j%K5*Z3_Bt^Un zx3a2L(Ic2JuNM43?vYp%@q{bVDcRhq&>B_h!Xz3Vx6+{A=ALgK=|B8J#*N3^!{4i% z_}yRpe)sj2H%yqgVzE56Nr%aIGM4=`nSaQCOyiyT1lv0G`zND1v^;e8$m*5(#l_NW zSjJ)M%g~2me@V;%EBCiDT7qXp=1mA@xdvTp*TFBJfxYgCUnb%=Un!%RU2+CV#xI3A z6TbwXHJ45(6V;aBvnUgv;ajMB*lH}!776nd$^7I|MVFw(W_nMuNz2$o3bmyywph8T zTn1M;a4$$ddt{=zz_YP4y744SiG36May^PPw12nCQ|5V0;-en;5?e*1IELtq+9SeGA zmoIfBG^sq9EKPL^$^Un&Ch1lUCM`YP=l4ds(?D#P0S8>-(pb8mT=&%(9o`(&e{zoe z?V%5^ZW-1h-xpf188@%PoF2mljT_o+%bD}p`*#m*m&H$%#@d7V^Y&}DRj>n%rJ<6i zuI{z?0cJmvbfrKGt?Nf@8k(fp{6guSpELV8xio5uEb!EIW|ud8f`GSLfu~whw%hb! zs584!=_#=<^saF66VlVdXjRdQ9V$3IOp1$FWrsaXrL$-e1jylGVKC=v7_&#wr|IDo z1=!C8-8gt8HEn*&Ma#lNCmbKtZfe_<@Z}>H*u!}a*FNTF4+I7+VTo5>KlnnG1{ViC z;aTqo1>I(oA3SD#_Z9vg(yq%3!z;5|&o+8%HT&y#{=?3W?SHtqjVUXtH}qcn{_6v5 z7Rx%rGyZzSm*>}Tk4~(6hwWhHSvdRP!PoqCzGP8W{~rGA?~3<{D=Q!jtq9%efGzEy z1q22Wt^%A$6zEJ*>TVluAt9KA$PR4VNhA2Flxy(#Sy)*M5T6nYD{vu6$12K2?}oXj zuXZDwd*9i;`EqJ#Px25Q#dVgRpW-CMsVT%qQnWh(3?w5yhtr&vuHGom z@7(8{f4r0h?Eit4iOw&(BlGZ;)7qvz71*Wk3)v`^w%|NV*~Y!!?OVrxEnN5u|6%C? zP@OP+8ki20A`LJ8U-3-13o=0o%m$a9>Znx1qT!9G4#fq9j%9)!R@A^Dtwzr<#N1oxGLbnUSiYJ0kZh=o?NOzGa z{V#m-KgUs8CEW&BN;+`7(&b8W_XDAoV(6t|r8aoUu4qO^6);nLWjPTZSX^B-+AYT+ z0Q2z@85#9fOa8Y<sEeGf;v(VBKC>o+%if*A;M9ATvq&@Iw-49&$|H@w; zsV(-WCi;M(Bo2yOM2w`QG@vJo$D$sN2Kl@h*}_5p_SnVH}`R;HQh* z{cCDkTq~K4%ge)0@mHycs4n1bsFbAtmBlL-E+#>Y2nmj*Nl3r|$u2#ErY8&2mB9SM zE1&2cNO8hAqtjEuaUFXB$?vYMy{69 z>(XFpqBKuhgFrY}^6RcWM}eK)M%uYic$&Sby_3DaeXM=9J=4D3e#q|M9iTb{@<4Cq zmdk5E-kcx2C*;BZmAB>a2%xaGT;QEjbXA8Gae@a~%V%^*|5ZlJl2N-(6%vDFHdxk* z7Ur*qyy@4mzlL`qQrCaMtA#X%@C%}qSa*^bkq;;1!z2<(&7r>ph?m-R{N-exA`yOk34(%U(4lXEO76B7P#bi z!I48(l&d+p7ZiEdHJ-n77klo~pifxiJ-hhv&t#^sNdEI*LkjsF7V0IBfounfNC2u> zZM1+05%$1i2=aLh0tp6sjNnTPRD{8PN`1rXnT#OV5om&LLc+l9GslT>Y*3zD_5lm! zfB(&Qv94>jZe7gR$@RRjUk^Y2^t<&-=T2Xz0Ip%h0X92u7%9aAE-q@WqokD z;IFt0xC~~}6hD#Pby>|XoW)qP>O>aPVRKYL=tBDQpSX<$YT4`wOr60mHg8*kUk~t` zck$T4E6No%hVXlpU+#2a!o#o<9Pj4&pE3LwO*nqSzxLsHCvZ$G8G?LMAI(-qByDU? zPt^bFl^Hn)&8d53PK&M50)>Ehz&BBr^$C+jh_^csu`}HjN{o|_^WFLEo4=U<@)@kt zCGVRoaq+IrS^TE_s`q`H=j&@3=jwVhgXEu9OrEm@6;&p+g>4%JDkMmKH7T)bi3C{; zfl;RN*eMHxV|GX>G+IJAVd)dBab-DCx+(W`v`nESrOckL*N_+()tZz9xzpcwSop2X zpQq*TT)k-HDmLU|AAaxqOb)el;@zw*neyCbm$UZX8FOL6%vDo{cb(LK($?YGpN&5I z&dk-5uf2tJ)d59Tfg%pW8dw%oqMET3i)$dV#>CVxud8^C`>@Q4y@Sxk*3vt`&FGsZ}6?2^L~FD1ed>UkBHx|{LhTgeajUHRC)&F{Wv z^AyEj;!m71lfO~EE=t(2f8Pe>3&4N~K=lF!yY#FkIVft(@tJ{1>rCpT4&!2#Yech^X)ugiio{9}3|O75ZKY zz%4bq{t_%+u>R;4UD3D@uPH9YHEc7rG1 zQKrkaytTaX^0VHv@@@GO!f7ZVJpxGmz?Z@}T8L%w8VpE%!0GoRqnIrBW0P<4fIJ>> zOa4s$qG-7HjvS*brR#UX^(W%`{!&x@`j$%?+-_!dO_f9xhzy3!B+LFbhgc*z0;t=k z#znH{lotzcDV2&ID1WbCzeJtBVIkdd89yrr+NVOkDoaSsQ*zWINS53k76Efg9=05K z{5YS(CfI&>JU+{TmIo$PMLpwLz^=ePQSF^5WXKazsNj&Q9=WH-=6OtBjXyujW{CSD zCxc(JBx*V^ErCKHi+dlA+or<3@MjbG?EHND)JM&;>=|_DM)Kzhd?rXzqD7KQ8NNVc zh?8KKa2p%x248Hv``BJq{T)_qk9vexlCOK8!PV5_K??P3C`N6^5IZwsYS*z*dMK-C zsIp=exl(Ft8JL#n|B)vtZ>Od%}OftEDBq%pGa{d+mEP<^1 zFnGN`sjX3Mttw5{qMxCvsVCa$iS=2YXb567C7B4V25*((m_$^L7A{$!ctLD~Ket5b zVSyq_hYd1?e!{;ne(dyVeftlg?EN4D~im0g?*UvGZ< zOy}OTX41m3z*z|THu`H}<;v5V!<-%kYxdI_Ncfw^vJFCrWeYn%%eMIuWwn4HLEs>Z zXG7&LQ)vi@r~G}Qg94Yd*f5uq%~B~oMW=3N}&zdL6Hn|CK?+1wA>c04d^h3tC7 zuP&Wpm%JzD^K0B|`|#3kUSszqQ2alj*ga6JqSQ)rR*C@(y2y%jo&mDq@0fXqoFk+l zQH?^Q2a~$T`At55V~=upEkBhyGfb@>G`hl+m$l*Rd=R zYk+LH_yWrY{F+Un43!ojUeJ1E>GrVZo+0ch@Oq8SlG+j=4B8|ylDUTe73pTLdRzu^;Qg=ZA2e2FoJP+0U z1fB_jhDRm6 zdJoczr~x?Q(2pX&dW+wi^yRdxKY88i`}2BdB#+GCpO452lPmdUM6kHu<2QR3^Pjl) z)lH|`HtupoIrr}JkcDeWTfKl~owG+`Mg6qUC=yAXZ^TMseG+b=h%nDjuaQ{WR2HH< zt0_eU?db_G0E1Dk2#J2I1Qc-)1tKG<+V=gPJ-NFZH4I2feZBYh-z$3-58rppmFYjI z_o&519f9|ryp!@f@Lm>nVYU`uC4smG4LpH9ePjVp$f5zDh>#kw*7NU1_A)k331 z?E*^2lw8pw#h0Y7Oof-FU^FkQzF>Ue*Pr~}xAXAjS@XJ2Wp)4f;L1jJf9)rr z%>pR!uOKTfsihVW7A|Px)MZ2%Ut^7iHz;Hz1gbfN)~Kfh$c_b=H7ZL>j-_yzl8AN@ z_p>IGPO;8P4jVN5^^Am^9OZ*me2OBHLH;oaD^&)J_7_)NQ0 z)MFg$%U|%$0~f6WAR;`4RtU667htxE7kl15`K(F2)Os1~%;E*G zWT_i`j}$-^ihi0VT2O_G#Oq++a38M=1~YJLm_&=wgCAw89FWl?b1hL9A9RvrwDAcn zcAN6m;xCzN!kuNe_=DUX3l?tQwP5Z}IdLPO$1m~V4TTF>-6H=3H@`fieR&hmE#N)X zN&>oa(g-bFx7p#PxgLuoia6B(Rp8Fhz5>NU`wHjCF(_d5LoD=odKo3=!tEj(VR1r!I+Zuv53XMB$scpp&)U|x z%a++2oiy(zEb zZ_4Xfh;B4uYKrKnq?X)Z(Me|(aNx(B!mQx*#1&A}Wo3&rr6g1~Iv<|y#1;JmdgqHG zkL2HPYjbD+;qP*%_3k%nFpJ#V{)e3DXGiAP=8qcm4vT5k{)G->+Ri$BY{e^Yc4_v~ z%MChB=)83Qf424PKCC0H%fI-Z+{xAmUQjPB#N-8ufZD*RXnrtGj0_vOHlm-8B1BUs z8TIa%icoMLsG%o})EZ(|x5&?=M}id+QpqE7u{r0?rM(#YY>Ot7-#&H9)`&k@?Ctg9 zi$R$Yne*h0i_wq3qzqvH7W9P^x(oS_63SZ`)#z#v>dIn%L?|FUgJ2P)KkXS%VlzSH zj>vt1qo!0HdgZ-?Ea&W}O>;a$-ud{Hoab%w*9IlL@HC)_gGtE+H2<10GSDPg&p0Vj z0Fr1*Ey)<6<1^?(K6xP@|6!rhu<*35sjH(VeHCwmq@J2h_!~N(TWDh8bBhERHxqa; zbhsu3itx;)zXXUEz#%e56b6TfC#x+Ba`>rC{+rOcl693OMfr;;7;=Bm-v6recSc*?=JCQ8Uup;Xi9t8 z$Tj_=cb1Y=?B$g!`S12)1aCOt9p!`9=7SgMkuph|D^U2jt|TqS1$e_u@Y=$NtZ2kd zLko2}V0I$nh(gIdIWnGXyd(U)X7Ubvq5_g7RTSs$b^1vvU7w!%x51!hacke8j%#rsN-m|@8 z#1jlt7J=xEO@Q9&ph@v=!6#(%g?DN&Xi2)+QDEj#>V-j)Btj^095DwIfxaQLtrDpc zyFMTygQvpu0TR7iL(iAA?2CMf{q&NY_s^co&dJQP>*`{Qyy{uIwD+;V@) zD#m^DRrIHsM$&|#6Hihp_KK6<(JDL*xlzk9jJy^TK_cymNz!`6uut#+HB6F2!AqTiJ(UAyINl8yk7miJO zG(;Q284eZ^6;)R>TPJ{R?P{BiS1xayJ$?Sb5zD79-*DpO#+5Tyz1e^9%%Yy7PkwW9 zFT73S0{}Bl;oST z@|B?tqA(#RiKx|Nw+w0-@evFXRYWxh6H!n}JD{z!-Hh4+{Y|GJ5gLKfJA_IgTnacA zNUgvNi6mi!o<@$H{)fkmoG|^59DjM1@)=*sZ2TyDnIFyPAF&4b=ip0kC}rhU-r7^P zP3Ff~#jhnH++dnWh zXXpGyo1dM-Vs?$J=e_fKtG2DuX0Zx2T6dVw_J7#1PDbCIXP$j-@HrO^igNe83= zX8=A35z~*^E)xS&XjFQtl^4}JPnt73wsbPhQw#E3dg?PXWUDD(W01<%Jzgau45I~M zXgaIxruIuz=3~+H;Ol}=d%U+{{fEcbZrZ!7N4GbI4t?W4-MtuJ3TKU2*rpBqm(82_ zy^W)fuvTm;YkA}VKY02SKX^#)xO(%|LvMPnZe7`@etYncBb#$RrqE||Y zrRBjv_E)Bko4#Z3(8*2OY~DL})|zsBYxOP_MzrrL=f@{>nml0m_>?(m$w33AFP_a$ z_G&k&YWYR1Ve%Ui`lS0ytCYUV`%(g1_Jm6gG~&Np%%Sz(VdIozN-X+<%8SY!gHFOc znI+%^ghDAP$8x=sl!j~^^V1TOFa4T?&cbf#V8-OSrQB#EMJ(E$$z6+%bSI=FCL|`( zhzyc3?$@7YywPCIO`BQ7`t|&tU`>{{kVUNCHFY9$Ee%neqdn`IcWK>sp8WY!+;@h! za~F%>yNAUQcmB!uDeY!Vne<}aHT63sI4kG4da6_9#%V23if7UyTa;4EwhdlaS&gaW zF^EAkxB$lNGpI#H#aiB;@+MoHHP?E(?fd*k#JPFYi zJ#pkAid0lY)by2u2QFVea8PD(TFaJc>8)C+c>~w29W*#IGpgBh^;)$V+7fr}g{b0B z^$*-R6#e&NHV>X#Neqq*1Dw`>%<54LZf+^Dg^L-~pw z{2exJ2Ya#TL**r<(<@D8~q?Kn;`}4ckV9%5m}@?=DtjSfdwOHCw-f z`K=k!!NV5IYlpIO{hQRO|H^ZtR=o4(z#(mx0>TFJ5_t_EOpq36v8D`-1wt_h1_(8& ztjOa_Nr#3@??{U!rMuP;!(fL((SepkXJQ}>5IagC)&fHG=`l=%nPeI1RYqKnW1NK{7Q3BVqm>S~hRk^to2+-<>>nUDL)ZcW2DpzM;)a zO>6YS?;~yvliF#)Pxs&$(SZoxjT4bh zF*1S%E1Cy4v_MC&PE=P^lrN=1705(r1lFDn7;~mU?hgO%yO*~^(%L)c-E~7m1A)DlWlE}b=uQSaE4^2>US9Fme$qZ)c?aNmjYTJ`|=up>TTrXD2``dIKmysefF zc$RWv$$%#;kplys?7{jQtWOxky6baO--4!@C~Hb0bX*YX(~UJn&vnDcc0Of$w1D!W z!jCb0r^zHk=|z{G3PcjK1C>ut%sVC?U9w$%2Xl*mpOe<5e#bpAj@i!}^d+;jhZ?DN&%)w46l}i7{=r3KL% z9y6@(lpOia2Pdy>8rIl1VI=Py{La|?K2?T|9@%a4g^%BVZ~w^F%UFFl$2Du92q_o; z4rF%*$Av;K_$F$NAV@H|h2xD(pN2L(Vs+P3Ea1xUc9g)UOiwst z>F7~q;1t#sbM=SEVE~}TIDVM59LEpxgE(u;+Dziv;=nzVSUbKSDhz$i?_#>>9x_g` z$ea$;)N0k~vMPDSbWHHcmSyy;1e@iYB30@ZFBC?W7kw(`+B~{KE7O(CBg(KjA^<>p zO?rZFb|yMK*%1|Pi-@L*2YPu^5*ZY;(Gb07Mz2Lnj!{SSwG{&vZk#I@)#xp!^xuxg zXeIJl?-$)BlypbGw)XoxHn2VQM^D*Se1zZZ^KhY(F&yo?!G~rPEp9{&yfT{q(EA7O z35LG_3D7IpK&GKf1os$v%kX2-%Pvv@=-P7X@6fz!o*PGpp{vy_|D7_rR&Ct&Vm&f2iHTgz9zXqz)O`^25&a2X?usb}sn& z{f$%3H%acXB;%EhT8#>8V{5$eT1wC5^V)U2+~JKO{0s14>*9O%$*5da!?a+1>6|9( z5eA%sTA12&dY<#~prx~|BJ^2B!`@qDy(HTvS0q{2f^4FjEeI_>L6?KzZJ>L^S-Ms& zJV-R0l+%A*PrP{Q;n(#p*F(G!SNcIcCK5cA<16w@YKdD7|wCX^s25FyqB<7VbFu?U!G@IdIT|!@nOH?Wx;v z-=I%^@K$x~Te)IFQlkw;{>?Ykz5CXJ!AjfFD_wHA*%1diz46|v_4_&wne=A6@Wlt) zw{O##7ymfgbNrQBdE`A#vR?}VseN)xpJ3DIBByK_G zqN)$?!X-60t)xs6T9(rEG{5N*@60VYlozwG6GLm1sCJ8zA=Vz9ATog9sOa=)1>5>i zNUYlmCFSv3H)hYdHDSc%Y41*`z3^s>yqO<7_hA2rEe6VQ^Z&DS%Z{m2R@)-^BR-(} z2Jez-U(a6t z9D27tR*1+1M;F#9TQ>3_t_v#hhU_Kp;1`J?j65+j&Pmh6CgRhcWTX| za>{?bn{-Fb=dN`*%<2h`twDn#F1GoA>qgn0iRd#pEc(|H(D9{;2!V7klq!yHA2lrf z21d_=xieFXbCXtvIi_4VG_NTau9Yn>W^J)KL@b#N(TN~bF9xE>|0Rtat}9`?PY0)^ zcAIo(@tbe7nB4!we;0cFsYEl@iKvV4$k!Yd8!uLQ6N0gYmFcFVpX6w)k_QKHnCQ;L%K1#|d zCr2hDiEebcse6y=EtJ$viEX|7a*h@aHM%L)D}_m-k1~Y1Dw%CnR#wq2qoq=YK9FoQ z?Hi8u4%3Z};5Wl8idctM7oiVuN5Cvb2=*c$Qg{NUj#UqeG)NlTM0v(xT044|1L((8 z;6QOp)Zu;Ge86Z@0ba}wQX0S}&z_y{b?4(Kf0|)kU2f^aO{nLFlw2DZ+fQd;_np`<8I7IBE5Eeo{1bK3l z4-u`Tsi}?E~ntcW5iym%09JW6ABl++7Q)d-@3JH*N%E|#ggnpS7pm5Tf< zQ*Z&{jRRE@*nGZa@@}OmO_$T8dEtVQ z{f7;G?<4s{WF`yU!&3J$*Qy8%oUiv5l@C!Dg?@LLpSk)oG)S-FdzfEsjTos0vf!&V zd#Wg<*eO1OFnMbGFk(>_mR1v^y;+zA;k%OJbOZ?3vyOQ2)JZZ&59FqrMlZDp{kP@x z-&Piuy_!jl)-18-QNp`KWocrgTiwzr`nSF~t%Gor3?xxN2=4?@G_Q{NrL*~kfoA}(f`t~2qe;%{@)X=wQ zj_BKGB&*H+Ke%!I(xK0P9CY zS#+XDx;8P-mghS}S55vv-M8yl{R@hIGe zqWRhq4+=9>qBGJ`#VkMx1ssvda?kTS*VL~YQt71^o9)>n@8A4s3G9zc`$F2*+tZ;xsz@DCR1@_!c(U<60tvs#FkK}^A~aZd zukZxWAP$emLLZ$|-oyV|iIQ00-e1@D?7o9P z?!}H>{!k27A3v|pRqtdCF8BR}y|{O+W5!JWe*L|Fsi0SsFr!h;`5&{cqkC=4{)j!i z+QKyN`dQ%I<)2&$^1gkB7exWr=CN1k5A;;pLe(XhEa{~=#LSm25C3fTG~~hXNQIUy z$pb|C3EW3gkpT_-;>6n14%i87;Y^#_EF&ApskYGNn>=c1v*pV#S5%iASgsZwF?U_g zkloFPk_;cfWJEt$&tPK@2BCNi_yli2M9qo^_b#>7kUQ3Ich>VMBxcPqQRik*$^t20-w{%eGKKVbLnAm*fNFI2yk|F#w5+Srj4MSM~3 zJ`l=c7_Kd;Vw(f7uOIEem7W}lO_5WRS$^gwKC*DVt>f+hexHQ}AcOC#!=gGe0=f49 zn%2yg6>N5mdrVW$%QtM-VcQZlf1ho`j%%R`e0=}X(wiO&K<05PQD^Yg)8rf5_`~h1 zUTM*^jqUn`m2E9bkfPv1oeQN zXm5-9QG`@YQzAuK6aGEz`K^d;t{q8QL$q9y)33KHiGWK~`zUW=6G<3R4wMrocl*zz zNrxx#gD=&o{qjq7>Nd7b?fll*y%Q&PN_x3*?JQYo4WhO;SHs8rXh-MQJ3KBdB;F)Gx*lX+10m!3!ERz|WzjHzXG_!gLD560MWN z=#3O9xk@r+HkAgG{`1TWy{cDurrzWU-QCajOpdAkobA@o*%1wb8`g0QSrAb#?B$xU z0&l1VN)7NB?G=apK&TlKq07G%G|ArD3c$)Gks$%<09QMVYA3eDb<5o^^FMYCJ9RVD zR?M%kBz}c#&D(qk`>gn&sOm#bl%z(1lHycimD)-p#nzodHvgnX{5tKM z37hbceaAg$q%Yb?;=%<)Z@6IVrYu9#Hsr!4=UOk&N?fym+ zH%=?pO_5m94)rE)4hdDLvq^+(WwAgABncuGY#CAJ%`u|WLLm!Krv|U^r)buDkw>l+Sp~C z%e(lcJFGbKuS@D(7Qp{v0a(YgdUEuw>aWTS487A#U?kO*AQyscIyFpW z@Ss)6Gy+JTVIVONvRl9+E?WX!N#`27bF|+ao~Oeqr|Ylw4F0H!wS^5j)K|}j4jm7A z+G!0!e`X_(Q5#Xa4H1>F*1|Lz{zge^1+J0Fl?6PacT%nGZJe*XBev=AketLIQ#Be_ zqbDHL)~_c_;nUYMXFW7{Ksu+O!=y?alV|UiUwX2a*_BuL0NV3zy^7se6=?wcy(fq< z6yVVDmqr~>g`tCL8dbo_P2d$V6NjMxhE?<`Ak>-4m=YQMc zh7w@D#<`L$Zmh0ux{~KDlx?iuV*V(*WRsiy%x|fz?;>>N2-V4!XHEZ%f3&+~kDHzR z)a5{9A0cCp8)$Z5RRLD*|L7>9jF*^Tpu`ECl=xbb*hL70qKOUcScS(3T$01~%HfyQ zxrNx`i@F>X;srHM(8~ec_L@#HfwO;5%tU@-S|N;Dk_~3owC4k&&LaqP3f=szHQ#MWH4+T@&SiZMz zp4!IXN+vbIDrxp0NNVseD>Tv~78bzrtV@BeBV=M3sn{(PFHHWOzodi~F?NT?D3`pI z*%A2?vT=*$mU6Qt8@%XqR%pLn+ZfzA5`LmvdQ%I~c@~}WWs%-1aDwLt30>kqdC}t7QW01(G(_ZSxNk_Zvs42j| zPD@i7Z)R-C;^M6z74oxF#?1fVBk#G7v;%p{u6*slarJLy-jj73p3GJE?^jvUuPg4i zzznoE{_t5;!qsyJ51vzt{#MVENANmUN}Nr1K*?jX{oyGR*7_!h6Qr97+f)9mm6dh*@KU-^v+Th{ky$yq-CiE&f>@hx}NSn1hHBa}YGF5Du@C;I~9Z_n0{A=tpA?dRalyeFN?_jMK!(*&St15|oTdO8n3dr^T0F| z(l9dy( zUS*q?>C(E%-n0&>9c#Yax=hX0)26dVne3%3K)#gs64jY7%$^0Ax=RJm8C0<(Rs_2n z)fthGC9BDtg8jghrlv7)zposFei~g;Aqme0jz4>BAIlj!^*__&QGm%&9zfa@u>&n-wy8gh{m7H%_iHKV$X+xr+CTWlUWt%TxJr{vLaUrCen7 zS!;fjU#yY-?Qg$*dpYsDC%=9Rx|}F}D7OMGg8ns=W;iQmkDheD(DIZ`aJksz^hUK4 zS<@Deq0+B6Y!tLAoFyo+#I03|AE?hG-YX})ra6rasII;Zk3i^h;W&_wix|nwoksVU zpa#^osmu)^P<><2$9hsDAyI)VObsrSHM8{|AIJ7Y)O07ytDBP2rsAL6I>C{$kSM;Z9`}x^g@}eNX+>eh_c7Y>mqF+s^l?3UKJkdJL z)nQSqg9*%zspeNpbn^LGI@GjE`lppFHAJn7zuuory?2ndI8p^9b!t?!=mtlR# zO1_+LBr94OHM7^kP3+ZKnTO6SVWE>_+YD?zKM&0_srRZOYfuBQrfppcv^u0i^51Fy=jYUlu*)IWWN!yga z$WNFndr#SYVxX|-XtDhmV1tcUe72ovBe%W$Fc8~4pBR-p^5V?)d*);=o%PldwKe}Q zZ~QC&VY2s;a(BbMsYPd(pEz;x>l@e#mN;jgatBbyW3L`b^!k>xu2=vzwtoRYNNW&S zCZ6|{w>ZUu%?;ZT>9iT@nHU9weB@@PrOEX_{C@xJ;WO8=MzedjmHV{pom8i3r+bga zT~}LwcHqq!U%Vg7i~1x~?Af;Ajs_jmUT9jqdUy(BSF2?e&h>c(lfV%!S1y_YTk&+TB}KL@-{;Mu$f zgy2)dk{F7MMz+mxVnW8;l3_3{f$A#BkS0=xkMcQRIH-D^YOf5Q@)qOUlniC7chIbI z(^Hl&lb2K7bur-h3vke$r6DGZW+Aq~mjRR!Y?z%6+}Y(Mr!qlFj&eCADk8gBi;t)6 zwv9b8k{93n=&X#{hzb1ilSALLxZn7X{4vk}`nrtgUdd8t9&dXEFq8$?y`hEb9p*^A zmV@0YqiZb@Ya0+)Xjxh;FQ6*8+1rOZ2Li{I*1b`gt&AWu4B8gG=FxiBDwGx`4BX*x z7N}kkDG$Z-i+-N=PQT3o2e;1~IsMLbew!EOvdP zVbGL?k5>M{uSfD^xqsB{t-Ef#Msn1HSGBz))`YHjUpgGH>6d?#!3i|4UA(2h%{XYJ1NpsD(pF7oA}XKl$rm^DdvT_^7bt-Y^}?Dr~San z-vj!+ydaW4$38B{(lA2#Umo(&-LeW2ZDK!rds#s4mbz)>MJ_`Nu`Nlj{1^Or>RDWpIvA5KF@;1}7~?JpoMWgXf`kvweKYKKs@K&&gh~ce(=`1-8OIo9(UMs28REXl4x#Fm|*g-ga?G+9Yo&jWd zDAYP6SH4qyNayA$m4g$TR_51_^BajTB?ebcY1U;(HO0;f`*bP4%CC)gocFZ+f;^{< zUuK04-AU$KqOM$C=$!;aIDUHnDl(*%d~~twPH50YFj$FMM+(%W6g5AWpc%viQ`Be& zh@v3K?1XAD0b+OX%B0iXQIX`4im>06k`AkmsoOYG3*bfCHAe)=_VO8xj_&!befwW` zf7ob@?F#2=%c3K#)Sg`ijg*hbBL{ctschbRia+2NA3R{SS;TQ|wfC>xXU^_A|Lu+~ z)Cad^$2X9vYQ=xrvPI^pFFK(0y-i3JSO`&~?V-lZ3sa*-iVej{=zUY>k|^aY~-S@OGEGUw&iJBHh0|Ma6+^r|}?_TgHP_7PCMP zJxC?5?2c7Amt@*y-tsh+`5&{?9eA3`-VOY>pVrIz<5a+#bx>-4UQjDe8mIZ|87hCu zhnh5@vHB8Ug78ur;OW(JDur2T27_d3)Pg2AZ};YbdswbOcRE~gQM7Zu15Ij*EZb4Q zPH!NmhtrgZaGOx;8FZW3Ilt|_%B6ClUH2|&ShaiKl)y^LIM!pqmi6=SyodA3ujfzy zq1wW{$6>^7&6U^7jv+t&A%Enp>CM|PbLu*oWD#oLk9LU&gQq%6W4fmb8)IbTEWIA0 z++r-g#H*&o8wLwIR*J@6RNz$c;9{z)0}ZBW7h+xWW^qVgnfm$!1EY_(1OZ@Pq=k%u zm{IbjJT~|nh8@wr@?Q1U&CgdBu^x*yWzAEbL$lrn<(m(W|ES9AynTTI=KXWg#4!sL zvTO~I|NRu}jFfsY3cWuw(1F;=U7;jtk=9j!CyOcG%nzw;2cOJf4Ee524Qj3x)X<>g2#9P$) zzp6)beCMI(ora6fXgpa3n!u9}9P&o_ye_INzu3Z`wB@VW0OEx$upgwUs1gWY3`@W| z;fpCg-nU48iN-?6YetV8C^Q!4B+RLCXfG2B2qcw~xP-iFoVPI>e3wbs#@hRd@(#{= zEZ(?!ArSS7a`)t^pHxuQ>HRWm>ZC=2d+YKwn1iIJD?}o%AErYLL83iniSeFRSEhO) zRpqe%j5#5$M}N8z!Kz%P`V{~Jb1qbEktxTv;mL6%ns(WC=6K=Hd2HMp!$V?~0mllD z$ftRDWbhEami6OnWMwex_nAEW$uH_#yh9-;ty&(_h^c}P=jaMW;L#whrPIw)jVOVf z)?^`iNtzSR2&|tIX+I~_>SY|vgh8aH`5CjBKoHt$eb0BJu5veW4@kdK3%%Z6uI^ly zw~hDxmHotD_?FGsmbZb;_y(=!KRuAMyaVYUp48#-X5i`U^sik}F-aLcGh#4oMpfx8 zO%eW)c4pKQJ+i#B!7XcTzFoJYT6Oi0+6K;TOz(t&SoM&P_3JxlFBd}A@#33 z?_XwWv1OO z;iI6)hU*Z`qV(-+9Bw>ro}M=2#FO8WvD=nDza}J2SaY{BK4u$puFB#Mx4LsH?BEYp ztzxbn6>_f~{o>~Fa=8_bU%!6BR*7ZtKeuh?zps){p3GuFtThYDy2RIhfAP|H%7CKP zKc74M6XAS6f&zNFNg#FwH}=@DaDl~o82+@yVAx9y2D&<2ar?<&tPXpx@Vd`n{D#e9 zu&D$djUlOLaj!7!V){Qm^F-Xjps&G#)R-cSOOjau18d+m5i`*imgI$}yVSG!gZ94p zSQyTCkDVfJle<-lzVQ{i%Ijv$PQw$n8I+7<2Xwm4Bn@dOPA_UCc-d*0*EeJBui6E~!L^UaRIcpHjIe(Ik2|8aXG{QBqZsbdSnPO=3K zK@FDy%kr>okMXn@VZsTV?|A^jqtalUO z*GxKqtmOa6l+#l*#Dkv5T?Nu~7u6|uW3NA8D(ByLukrpk>#=C#>IJah`@TDU>Sx7P z#=FxnmiDb$jHR$67P692p#>Ty5tT?%Bj5(h zf-rPyExnYuBG?Sg@HENo9980sT+P!x5v6lpp7O>&d=W2g@d3=g>+_)WCu#+YDI-rX zbpZW~u`gA2|L;)t`6q<`gpRm$IV|%-5zQ^rf=tnzNah$wG$S%(UHHof<;jOW?aznq)7qilXOEAs=M$+dV9_wKyU@04ek z4lHFMzi#-2MXcNR9aDDj^B*t$m|xgd_&w3(17sX-V)Zm(uvNnYNr)@r$Ys~*V!?vN z2@~ql;44F2YM}ulU4ohB9-%-(F%AdXg!TwU-E48_M!aZAp;R}cFYylE7*5SaXhOvQ z)xZKdXRsA%`r~JxdI+5TCJiiX=Z{zVUCGNUP?oTOe}59(CRXhX)j7R=FR}E0eH@&O z-6bRyQIpUbeKe=8HJnbUAst5+MK1KKftHeTqANg@Xt8MqEA`5-)1cUa0tp#Y^oxEd zXbU&1>=L`&P%;c3#M_m3@s#MR7ujq4zs&UqyIl0kw&koGf3R+wobLTt9y**=D)|0M zTjlZ0O-ydE0<^`VWs}1--LIPM)`ITiNCNGd69WJ8#owrHDWH%C-8pS#QSNR-d|C~EJn;GPNzrXkMM>E@ZZ#nnW=bU47F0o)Oj2+UVnB0^oIANkLMxmqVx~M%- zpwOZy&}B#z4sc3TLwY_VDl3YQH2XLIa~ob0?drW_W%y5rocLrwLSky1D>-2e+j8}G z*UstVuD>S=Sk2L+ei5HQF8u9P>*XwIH6bo)R*yH=vg;zhQ=5&;SPeUP)k;9qUch{< zm`}rN?pLKBkNH$y5JCBTx3ZzIC%yvo@uYZ1T`E^EoNPoL=?ndk8ac^FG!zl*&k zLvz~BXNZ^=_7K%%70*xjJ#_y)in&KX5~>(&gzXKJ$S}qxS(EX=;wJU43dz6!!#+Gt z_F)lS3`=o@WwQU9rKtRr?a3CGeq__d#xGb@mS-v}`-RxRrvJ!36;Aua>nVHQS-B?$E4PE6UClGrd2q;0voROH7$VY09MB+PUNRQ^KNV%zizDSPrFX)TkdL$P;jx=4!fo~KyL#;m; zkNno?e(BQ>-N`%lap#wges|*VpNAF<8k{|Bl;_-0rSywk`Zry$Z&OZ-iIo~1dGqaq ztJ{u9Z_};qYCFvueLPf#-3`ze3O7=q>W7!p8^r&y11>DeG!2K8k=9(XYj z$xaQ?m)Ypi9D>fw`_={Sp?=Lp)T$XzV7uvF3VkFaFe?yZ;&Iq!X)dWYj|f4vqTfC2 zLs1j4x@znbGwoY3)W*mkKiL0-p;nnk1S7}a;PU7d2$@0k^PNDW7jJ;^?S9h67n+=v zkO6MlybtVJM$FyfO^;Yjk@CXs%3I4Jd;5xB_CY|dMMHC}VS7z;K2?)g4`cv*2Dny( z6nR|FGs{j$_3}|5m>i`)f(;I5@?=r$+N5*1s}#6nsLByMxe}!c83PAb=}-gw0WQVU z5{Z53t>+RYyh&!Z_q}|uVg8uD~veY6;@Jxbds_E>3i0+bXc=ze3*sGQ9Bj&=cB$Bc+wl(9h&d+O>ZnXA7Ua--I@(OCEgVfrW`12j9#WL2+{GP?L)N3!T_}51W_& z;D|AGWs}iE;|+1#F$}*QVtdiAuvk|5KmYuH@-GBF&aKc&A3|>FEf2tI^bIgJ0Y48- zDh9myIPU&ezk;z2#?=3R`4x19k}L(oE{|akSlL6L-pCiV#c|vZ8#pqfFPO|ceq_VO zQwpj#h(SYobRETYz1g0H@s@z*OkM?t?p1Ke+-h8n7?&KXF>Z^BWtix4&kd2N*@6tO zf*A_{uY${BCZMVU=?~at^4280cUzVY^ky`=n6$ARb;U0Tx@JGx(?#kSKzquFoAGflU7|fOhFINss z?bKsOKXLKzSOCht*xG;Ip$)l9@<8!x;5Vp&S%zbt>$M>1Hz9wHfh?1bCWCS;9M6vk zC2mn19SxO9GRXftZo7zrw)@uE_Si_yB3qGsqOiqm4e|Veo;E7xtBf?06aoFsFk6@( zmKrB4p4=ujKmsL9J(+|WrPIXu&}tw&HG&16|Cj}rWGDu3N&M{+UXO?6Z)MS&x6MaM zfQ+laEqwKDJt_te`k8>y>AkY=vzuq~Zc-01L>ZK`phUtN_tC=jT8O~Y7?fz?N){c> zufLYo{l29wT}d>jBDpjaI8$KQ(AW}~tOZv`@w*7l=8GSS-eazT88`E94(-B{#NPuZ z(!pVy(LnEH(z?OR_A{}sZEwZ~^aC#Dd(_pT9*h-juWLa*Tx0BGEI$jDNs27UY}t21 zOF{DuErc#HWvMZ%J0=CmGiJ7~@v^cW1q8X7D`1n%utIoYbyy+fcU+i}&kt`wG3py8 z25NJ~^FHD$+0$`H?lZMR60(~Q%B0SYZ@uMVF{(!h^mi=0;Y<2g;>M4pHjk<&cMqy{ zLSo`{{v%K4I?L&_pyv$5*>W@$c{H_h`k^a_blh^W<@m^b$ID$TNAy~5PdS{>i{)GcIip+_-mD!j2j5?~OLpIV;Y0XTeuMdw0>_y!MxT~Kk~rE5naz+oov9r`T!2DU=`9CIg)`$XFDs)*;YQ;t*7T(b5HB`L97gTl`dUgx&E%2^zidZbLUJ}6CQp( zW%isYYDHST*U)QXH|7(ASvXAfk1Quz%3OosEtyl6Sr`Xjb418ln2&X|e-;E4)U5^S z+BN1-C)B?C{M%=`^!#w^3Fcwl+NWpa_v_xJA6z`%WcQh6%ieYK8{UNeW5y5Q*SyIC z#*gWbLe4f`bOZEU=!itTKALJcNvtMtMsCH&o8%V!%V!-LEZGs<>t(5foKRN4> z9qtDB89_Ufx1AI)(~*^=44&jd>uIBKqMsY_oE^&Kl)hVX*>P>V6f`_&n3)AsTw3_#&oK+PJRWJzm_Y~KSk`0%To zXn+QnYPTOEOjtYI`wB$>nQaAX5p96vtzA#EwVbTQ->-Gqe1hCnK>3)w@#CW=34AqX+;O9^R6Z_WtG!pj6+ z2ndni1GZ)k=|X;)Y!!<2nK-x>rT;c!KN53^MI^MZ-ZWkp%Y>7aQky61E7<;NJ`^NdE~9*r`FKElX~FUZkOPf10X5iRkfHjzGH1t;wYjHx&`z$N_O4?~ z&$0ueCH+Z|L08@a;|jsJ5;4M(@IIKwW$fPn%eYY60U9I5W%7>FxI!L3u4E_wd5mZB zxT7q89XonVlw~Q?%9LSM#1;CJdhSV9ze^X4?i{54Us$y;XgO2#Rg(iUR?ULmd@SFS zr_ZoYtYR~QOVW`b7{a}np>p6eFrb0ykCbmBhC-_fxQJX~L_x^*h*#KL_Bu5&?;$5DygeaG-n&w5ZZF`+rT0CP))YcCxYXm?^YF6XkAAxCE!?Ieo8A z@(Hj;d^^S}i>nX_ulx241-cv!v1b*4LK?5d=m=wY_kw-AU$OvW11+N8aOcQvGGZer zwN{=cgql-kd^o~Wmq6ew@WQK_?nhNlHpiAcSf%h23!r+#F_yt&CS2m%Doh zXw}IpXGWY1n!Pq#J)zwBv#J=cYTk7&7VSN(RQ>p>$Y$dgXY&Ma4j&siX@Qu`re6J+ z&+<-W-;)jwgpi$bGs{5-AETAmb#TOH!+mqLIIoM-%Aj2s5Dp7{YURTv&cD3WO7T6; z0t+9DBC0g|Q4yP@o}ic!GGlbdnpxd=98Kmc!MpSyUkCtwjv!Ou8WwU?iJ(xdmnis_;u_(kC0o=#_t{E9SR)5 zWIn??(ZBtP-W7aI6m7p!6&uf~rn0j>_B|e6^IR=P$6J8L6Mg$`agthsC{l+rmcp_~ z7LSTys%s@mO4k8exR`t)Zd6@D5OiEtkA!$EjR~t)00#-1jZ=&&c>J?9 zuZs^^H6$UtHY$6L_~(mS3$kNdPF%2gW35^1#IY5#Si{3P>&3_iYt*X4r{!MN2E6q| zmEGB=zEy?|Y7#OfZCjs-(-~Vffd$xemCe3Vdc-ka#2Srt)R1emPJ2>cBMd$kYlM72 z^BNfvz)u+eS|geAQyGBh$`tCVe6cclFe>kS4 zCGffSe8rA=Eyh)9vS-;Iec9@4>y2gOHJ)s~QOQ**7|T{%dnyzXGZtOLRGrg;Di^)ejFGI3G}WC*UK#{aEUYNWaPvR>M?X5ExMFcccP(j zM_-I4N{QYRP0DpNDc8}YTt_#g=PyRz!t)lvW6fcqB{A6~h;m6hy5BRKW{2$+S6lY) zNJ^p#t%ge$^;wnj-gQB5F}^|En6fd1zgl{eEYxavWm6wMzv@svpRj*v4&dkL8xH;S zbNjoP^9vd`#ml8+HFjD$w2TM-2{VT*H3Nxhs*VD7fEqYZ1EQSJ2%smY^5^0cSU~Em z0Z+0*9l}|_#%8~!G|U;#b~fnnZ~_D%MuOJiYDpkELTMx>47%iJ#%fzUPewMe z#_Y1fH_op~g^?o(Lzq*qz#_-Ou1A$!(|Xqn2@ydRVjH-`l?7t@QP!YuUmp8MnPmYr zo+#W0sl(y_9Hl;R)Pe??jA|YB%2kM2!kT>SIgq{<;<3Ovz_;%zusHLeLLnE;Bsg@- z(q+@jRw-#No9q&8L&pf73?0M4Wfdj(aBG)NQy&QNwdY&$J7dAOJzp{9_=*LdrJLSb z;#rh~`hTB`HxgdULU(7D(2G@KV`ImTPZW#AHRl&BFrjzfSn^SPkMW&I(ab$SF=na@03_6I!M?%Zcb}>J*@Fcef8e+;> zNerf(DNh4cP|iM0QC3<>OYQct$CH2U^8=oJ*Lbr&V@LP%q>miY$HS8^v#J#{GvdV6 z&s|r=)e1v~#&ZyQI$qn`T;cM3pXKJ--xidXi)vHJQj38Io$?Q>mGBf%P ztky33P^~f}rezJU-2C`p(Wr^Crdxgcp5H$8p85E` zYJn|U(yBw9Y=BCkE_ZX^s!R3LIJ*YpAk;2a9SIXy^}tdR7YsP7$%8U zrjlH5s3G`*ItA`JDefl<+)t$BRX45i6E1gZfjc!NufFNYIxhEf1@7lkFfMm<^V%EE zMeEXIVPyty8U(>I+|Pi%X+M|XJeJS?;KOFeqLw4-|4sV8cb z896O0qe{zz!$jl8%Gz%A)#tCjBW|7i?9Em!3l6iIC$Hzuo-A%onlpaDPrnQpGkXe) zpFEqL&5C=uWCpE!>2~GCtTqh?%5~?u{}s`$IQTneXigogidb&4Z@n#y+TwbRgNYDl z(7)mGASZ&egiN?Z*vaJJ13RF^z2pLSathirk)Bvlb|=znT~#Jc9Pl|%v6Y1VH0!^U zm==$22{`hPch(j*QK~bsf7^d|+I~M|$doC>y`<+B;vxq2((9T-x0m2ZNbt?y5`4Ef zZDnZzgAxs=E#?pZKT37WLk%CN*)a&l4Q?*yiHv`DQc7N&X$fGY!E#FQFTsEG@G{>5 z{0C2O;Zmi#BKB_oZysM(a>$Tr(?~{+5i`^y@RF8A<&QE(rE*>EmwRe#u-~f$K8S)e z*j)3>;M+CjAYl_>$5VL{!iXEbPAP*@mGI+N#l3~hw*DU$$4~P88`ghtdd*}pgAFau zIu+f`V{z-my)V}85``b%Jue=r7-L_NEhGE?X^h4u{GVgA#=tN}z1Rz3D-#H+B$3il zseGd+@8fY-=I#A$&!T=aRxi&U2B$)13`@F}u;TvQFSqrZ|JnZ7ZP#TM?`Y^4i|x-s z`i0rt!TQ1(YAn{l?o3n?!V>G)zfZ6hDt| z#lnz$0Eo*;LBg8Paxpd|Yud=FPh`v)+hFM6lP@?Th7PY3oLM@h9-msSeJJV$_qRui z4vtrVl`bXg5!-=iBccWmjBI;uJez--BuwtiP=dQ@io1P^yH^T{O;R}w zk7Hh-shnO@Ql#8XU3o8>o`ipwKxcja|8J&!}$OWLQsTzLab&qD>M>&k0b{0s&w zd#3s52MN5oCzjcK?;pM4@#{jR!P5$!DM9qRC(yV{!Ikj0cCQcaE6p* z6pChb>=B7LLuqzaCo#&-oc82IC0Risf~YX3B2r3D?A5GZDO`AkAl6!Jc{nCW>}6e* z)tohYUR*EylZz8gSyHvoWsT1$y+W5YIn^K-wcL8E8-tPGv0j9hnwT`Qh{ zuW(`Lil*=JZ#Zk#RD4qSH5Z3pVAZHcZk||W-|H+3se#BDX14)FUYanc&821)9VK2s zQ}8?6f^ML6G(NRjtWx*GHcGPnrhm$|q38~MN_p*(PZ3X(pYq4%M#$LQxW~liq#9(b zq13RA2Y#^x726V_D*k|1ms=vmF0_hv$${cUce5*~{dfJXyHW2+l$7ZUf(> z{K#NxdY~toO#Cp~_z3K4bRG7o={^LS^=G*}*>acQ+ zyJfH8-qRH(z&hZz`KY6o0E<2hG(Ao$uUChH-`D8AYQeKulm{tJ4altl3(&aCA=Uz2 z6zkW5U?IPVxR@|7`qxQ?J0}Q3D2~lU}e9`;*|b;SAUIck}ka0xX8S zA?wJ^ZGzHbkO}B$MZy16H9_$rcKH4`U}`n7kA*Z#@xzrZUJ$=9 zhwH*by7*$>*D6g!U_QI&(Gl0I0gXCO+)^ils;F8-37IeEPdT=jYknu@Bb781y?!(# z5z?qlmOmM!E=#lm^Fk3&6z%cVw4o?WJXLoG(uFnn>l^;YV)p)r`(>?nks>aN-_Z5* z_R@DRT=>}A8zZFZo!=_Q;2Vgfs(})@W&?sj@(qigX*k?rADR~e9WrFf2*wI!%p6L^ zSWUW_Trg;1uLeSW);1@9$(48_aLZ(tDpeQ>xAoCEr*yg-$KS%={B%JK)^B!%z`B5U(3jZQ z!|XrOnBLO#$Ur|SK@3CiZ|RgSs$(CoJ&G8R8s!{X|#T~j;=$a#_2jLV@fqn z>7K8`DUurKiHu+*ubA8Vu|VA=RRA^Zank@##x%N$x7oO##7{Ms^~=xix2!4yG{P&q z@39Zwc}H)^_{k^iJgxcji2BXLng<&lGA-x&@yb8V!fr=WFP*a`KkbAXmZ&PWg$AA;^kdVTiK8GBeEru~+lakh}q? zM#-lsiadzlRG#rpKjE#2z}vHYWbT9SsXr;kB008w5JnpW{I?v49F?)~a#Y5H$BznD zwLUNuH$m`&U8JT)4H@>~BD=-l*A8Kn=fn2U{UW@Fo`6fA?$KQKWw0y;49WjCrB>{B z{)Ct>Gk|zM_Q{IEo_ZD#odLJF3O>-i#MU{Wp^zhei)!LaD{FptVn!NP+VA z`g^RR5`Jk#jmeXatba>Sh~hILP?9!%S#C+(@+nKUiV8-C6t|5i`o_KyzK6=T+Q71x zsZ*EO39^T)n0+sX5Qv4lDb{%4*E*!Z2&AM$Ktr8{bJe`^&>hUKS5Qv%Vkxdg@#>^> zB~_Pv3|Mbd<8ODYD=)S9y)Z&#b-qfzE(Cg3HBd-({5}NTF&!z}MZhnu*JF*aZ@jX1 z;Vw;lvu@1g8EovbJI9;VoiJnI(Xj`<%jiFFf_KXJG3f&*^yxjZd<&=!O-}8~V-+`T z7T31i5m$nGvxpsEukcU+_L%Y1^4qlyo|zTwqdAevl?C1DnX0d zs;M=eq7{S|ZA7&#r&7W=44NojLGV)}#EpfN$PFwc{H2coY)!f~9l_+{#nB?elj(=C zf~Kg1Rx!B}Jqsw8Y0-^^l*?9Hx~FA!dYzBF@R(fl_4_NTp-An48{H^3h7W(Rm zpYDH{{`Hy&w*Ax5qw>dOuU#+^y!dJG+yqAQ#MfJ0&A#$l9?11l; z-g|IrxLdK*Ce<8)RScaf^9A0)Vcd}zpTno0)A%gl5R0bnKSm*XV}OtpOBrg6 z)u({Q`^E&U6GjO;MIWkiEx%d&7+ z^gm{s0}V7EYfX_&yD73M4P}E#8pDwkVSuzz`$ED~?3RwbR53v&aQYxvl(jkMgy+J& zKhPLv&ZZ-%spNet?dmP@B>NzDRvqt);5`kCezYHjFQWqDegm{99Z`dh=#_lj+Y&i2 z#-hdQ>5s7~W}!mch@LC(LV$&soU}xrrleEw4%l3POi}uK6!lHUL#nhH2|gUI1W#*RVF#)r~S^R?vZ_ip>l+Avg#5kBh|u z1d$bV0J0}jE0smsBK($fay;vM^5jg;zVhA!c;fzdeDPv__N=%Al3T<_cxOk7%MV~X zf0KLi-1*ClILAs9zNMPbk;uIW@{QQ1wOOM1mc!}ifZmt*R3$vVBnc4@FF5o1>Oh{K71iAb#&2DJYOAt!h=#8{h>dvOoxAv z{2Q%Qf%iw)w)_1X|Kgbz*O~MH8eS*Ac!CTsr(oHsZi{)5@44#F)Zoc+zdXL1B z+OK#;TSu3+bSa{b?4e5vT^e#WlGI1DssP=2$hn$`fb<}%W^bNrRFr?RFhV># za~sqO32hMGq&c#T^dba$k6fpn4eZX7sWO3XEv~X3mNX%)MbO0Sk|xM^Ojr`1wFsZ_ zH2M5?vC45@zW*tmR_v$c^K0}=Ht_hZsXP_GKP zAMyuh{Qbvm1EB|3#~PHg4c1CZU$V(WHRj?^E5ojtJc7hOCl&CO{w4=s|;ac$h9BDpI^+nKK8`wNpm)BS&PE4 zYo~~q;M-^3{eIA~?2#*%j9;@b2UI>tj8Q9Nx1v!IsHCq_y03JfVQ2sEgDzug9*aTC z>>=oxj~O(fDV0***-AeqMt=OgxO;QPm5KRlr!06&oLdif##j;R`ttO9xT5_*U395TYWltE494*ysndX;QR4ObZCI~(+}^bnszU1s-AxitH;Rt zwP-aZ@OQso!|UdV zbt5FM28MbW!zJa<97i`W-aw=*&vO$NEC(1;@v0AS3xPGqDLbyppPlmHk^2JodWnB4cPQwIlo zc+WO-a#XeP-ttvApKxu?A8m$SKk*Ge`|^g@m%TB2YkNCNjG#&0bl&=5bkzu6g7Vk7qP!&=<#Hw{m z#RUYfhWuLi^L2as#-nFp%K1?>6!q`3;%Lb0WB7!%eA4uXYuTl9-={Yfh3(pQ;~#ns zU+sK&npa#2V67XCUo7>ir;5H-zsGq?MlOAbX^ztMVn|v8B598HXwG1Az-UpGr5`3L z#R9#8C&dKj(-Om}tR3>K9lqIM7eTjx#*qW+C!P7KIV-lzn)dVuzbTp1Us$u8z0$H{kLAkN z+%+w0X{1NIEUqYj0Y4CL>!rm>P2S&y%Cd>kpx%1ma@Q7)hR zs&6xKZ~L;|?=@;ZYIv=ki>5BXJSK>5>+7Z^nTxSe#q)^wIr=Qb2)S)C z{S9J#WFFWJYzmPeb<=VpW5qI$gm>8WAN~?Qu;kB&b~<*HtxRt{s6)_zRQ?$|l*2b@ z%asA`XKZMZcK*d>z0W_}eDsv~nXm4ny?DOpCub&3Q-ZCZW;1nlu_XG&5x~q~Bu2oL zYz*_6dPGT&vj}djY;c^UHKa#zF4NqpYXRC4ks|8jAP(+yqN19bETYXtq?Mjs+Ggjd zykS1Lhw{U_PwqYV@0!vNcl8?m!I2Y}iEZ2wpOxnM`!KtPK#Z3`!&3Z}G+% zooS?0@H@=mb~DcoF$fdKfZ=FXt+mJ)a)Ur%VRrr;{^H4zK%lbJNy*An;;<==e^1x8 zLnjemjI5#Xp~uF*y_Y?j$RFQp!oi)|g?4$9SAI9)P#*2s_M+R)5!f?y^VY&+=%DKy z(4sF|8rT?)aydnRT`6QUn7mLL3UuPD&@71%g5^`RU&}-9?pdBJ6S~CW;l7OWS>?$x zDSr_++B$kiTe=j{JND2e1($sx&>oi0LycJ}HPrXt$PD}Me$HN(Hq})4Bx+V*QNG(6MhuGs|OEb6~;pQrcCRKwia51 zubK(byM?V9x(-Fw%_bBS9#dw5R?Zh@v!gzFa;O9lO0+#e*x~u`4>_1~&s*Z&n|v87 zvH8a9^=EC|btT!hh*hl2Zsyv|c@D;OGUfkQQ z+w610F!FvyKcRk18=ya%XD*Qu49DkT~`H_#z# z8|eZx0sd02t~^{T&(u@9Z;0QP4dfCQ%HZ>aWDYp%i6-`y+-l^He4PGQkD)LA^y;;=(hA( z&?qAx9i<_Z{L<1;45u55~A0{=6bkY87;Os#LX_pNCn3eg6G6rMHn?NUb1B%0eBM zRuHD-M$MH()jSdKgMmn4KU3NkrXi&cRpxah#6fvaq-3^ANY?VBPocKU{*|orMfa-r zPc9H^#6zGS!^h8JiOjL|ulXlWF4_9d?oFposmNIqt9MY7KqL=m{3@11m&(rMB<31u{TDay46M8+@`c^p{dJQ zlL+xHd%4@Bj`e#Ure96uu{;R1@g4A5Kko4+K2KesRJ1i?d#>4D{GbuN=M6s3eolXG zhOK}9Mr4@;i6P1cj8}ob3|6F_E7f!ofqNky!NsADgI0V5c&*KX2lr48^>&0c&ssWrbpQA8JvG!w_JV^fSL^pk zUQgd+3zX?v1Yiw=riW;b!?9ve59J{6g^|s(7cb84dluhQNqo!d+xFvoV*TTxBwBlM z=Vv${P2UpkSTLxY;^`y4ZIQKPY~Owoz0nq<86Zaklr4h3a%(UFxfjqe(U+>n;MP64 z!?tvBR`W*h^nRVzbD;VZKa90VVlx8ZZ)7vrb8;^lsF8dYzAcH(EJe@HWDO-nR1zQY zzP7(H)==A1S_v6xpiCG$tUy%E`q!AruZ^x0(iZoxLbxMJUk+m;pJO`ty~Rh(=dAF& zHT0uK@^;82tPLVYY9&x?NvbUPFLPOHNd_l*JnREdD6<&Es+g;3lDtPGCjh z-!zB0Jc?ITF5m=5X(fUw5yJ-Dk-LP+IME@>R0t4i@7#>;-9`?7wMT}czLGhtN8&5P zGddHcEGzm;NwHl5?|j|Z!g%5e+nP;AOq5)h$4rw2}0zMr9K15jW=WH+8j%fVl z_QYMe*M7jod7Y8fqXO+z7p3DRiEOa@$B_K%4`Wl;R59aVc7*($ovm zT`5INDl1c&flx-?ay7O1T*5(7)AX>K%l&kLyQa(C2w&jJd%^S)^shF>4{LFG-oCA1$t(&b<;X=&CL$b9cFQB5{P4Y|)Y&>cw{_c`>D#tuuW0*XPWBFO z(AMcQwr-x0y@L%J$j=Vk+qq@)POr3hp$ogvxdq*8{>sB9om;-}N~f01JF|Y%w@;X` zZQcvhw~rscb)GV5`i>p5o4>YW>%7A9P1KQ13hT7(*QaC4wtf3_XxX-3|Ce#EZ+re@ zn||pXTeRuluRZPcP}>R~r|idmxonUKz_Qxq{t$v6d75d6^u#c}KwM+V3wRRfc19SR ziO+Sh+TbEtQ(I3)vCh;gzAe3IQ}$>Q2V#)VM!i%DT(5?ja?;gj`k!TQRAsPShh_x-{CZFqTSkj6^931aq>6_j8!<#l9%|^(I6Z#8vjH-kKeQBBXZtB zD`Co1wOBOLw`DkZWV|oZ2T+&n2oF&2!oVMwD0aAFF4*t5P*@q*OR8k?Af_c6i0@Dq z46nY!zH`!CaYmG6-+6|4KUCr{nr`5I1JMzpifyG9Z_-UHv}_oPS{1$fXBBHEhZVC% zAvqanBvP*;9ox7@KpRXs5E2m^krJWw$SYl(@Ihyx0`&{Zi!(*>kd|1f04D**4f`4& z74D380;&K-H!T^N@OeZ4Vk=h%E2kKp@+nR8PooNg@5melOp}ZHT*k)F!iG2g}qt*-k;VxIbgqt-9ippvV){c73ZqX9-%)SH{ zB#pj=7M)ivp&`#KnQeYhA;~j;Fb$pvvz&$4H8t3U6PqY5q(F-gm-=#iiaAUMHwKYe zg%r||O)w%Xl&QaYQd%fFxjQ9T6g5H!pMcOYcq0W{?c#jx#tF4pi)NFjE(*VW_MC@J zIRA6_qWtp@(@)Hs_xg+r%1&?Z#*IrY4_`i)uRC~@d(rmm!~t}ud?1!A$jM#E!6&vA z-3f4Eg_3|jBN_LK+ELzu>g*H|Cz?x!|GNexP(7Q_p03}3_}kMmVF=fX1#}-Njks2m z*C*sP)wjYH`^-X@MjEshz$KE!P~a%+jHtQEF-P$=GY}o?3jGUuLV$}%*&(ZmK;Hrl zLlz>#5clCo!F|-&!FwRv@E(j5_d)Hr52=a!keaw(ReswO1zHV#9Qf**1zMW^0N+%* zKzmv~AR5{A90145?1&azM?XMT;R#$ViS8YYdoXIAP>**&%KAoOyzsLZQeP>Nj~+2 zwOSq$A;C6Ji!gafEhkq>HDYlIf%2>+SS13yEhcXpoy<~TX)YX2y2b)`16dFo8=Ddf zSrBKE1<*+W$pKgbhtwL;g=1bKP!b@AeY~tR%KZ9@B7pfv#49g}Y3jbsqx*-CAAe7L z?a=VA1gr4p;Mc>44Sx&toh7ERX}rR_mn*K1fo)rA@|-Em!D3@KCR{i&We#%3=nNjg z87vFmOaeIA5q%%!ZW*lJNDG2#YK|0Xl`6|DA!u@$mDq>_wo0x_ag{JVQxc8NfV9jC z^m+wXg}4edeUsFSFF>}MmKhI6TUFPwcNPB5w?o8y z_PpvH#@}q{-NCx-@;>A(JFFGkC`(DHk@ITK-5HrVHLK_R%?{RjHKz;vwi8iKRhY+w za*VbO($~$RMEF?|B)!RdMRq>Ww{pxh!AC?PCW|cjU{abbzN8?Tmw-toU}8@2>;x8( zz$lJWC%z6ETj8Rdztbr6+>^Pb|Gv(C{@VKsyFX=hg!kx^Jgmmw;&zI%#$NiRF>AGb z-czOcpebxf_qE3YWEaV}qF>Z#%p=COSf7V&=V@7-ed zIBzX}K3@EF^~`BjfeovOl7C#DSJF19wsEGuR~GBpABJ}*QsOyMEE)qy58?=$QUbbJ ziP#bV&6&rnOFHZj1QfOyQIgo=vx2s8qxBy$6n&lZ;(4LSJAM)Wc-bG(ZT$Wp z;Ja-_9_zYlL$MrXI-4}PFfXA(Ku?^)4chbZSYbQ-uJ-0=Z#;w~ne&$8y z+R7Z;wu-_Xa}7IFI0o^vgVdPei?_{rA$#W=8TDHCf4N1QelOPZ!pxMm=GJ)*zg_vK zwAVm8K<_An;gyO)#B6{TrlTyuYYfbUBqRfCVE9)wM=2?mA0Z?NEJ$f{_9W;E%F&}F zV~6jl>G9Gmq0PdoOGVCpMZ_(0^cItJ66}dAx=T&xT^AM z=;6sAl4J|T7!NGD(G~GFe?`7HBQ)wH)Qg+r{}jyyXj>jDwm>NvBHZ*4q0(~254HHj zI1rbX6i4(yXDBV+PXy!{(y4$z_~eR!RgN=;o)M|ew@_PefOkwjt9#h9dTsuuo}D`M zU_Co_=qZl8@7?3Mz&jjds~7TTRvkOMsmGf9!yD}BLk9Qi*L%p2J`Y!^!yhg|Ty2p$ zg1E*2B}c6bu2BlPbi?%nBrRNH1^gyE86PqzgI6@LUJRL1oNR$4={1GPCjjIMV0z46 zf{C&7L5APU&7@=wBKrrz8S{k_OEU@!L&qu@9>hT6m7DWx&F`AIcyVS|QF3XwWh~ns zFUGPtVjM3kMzBTR+w472m%aBA#-0o9Y$;+#RN1Sa#`Vfx(7TPAUKW3$GzCaYi!LFP zO`=osLZnYlFMooVO<3_mEkb`2m_uaovxJzyzHn64Ac{pSK0cHbF$U*Cd}xvydGPQX zcVAz8Z^q28XDD9VxRs}NiN!e+dHGSVj$Fgo(nTl@I`7ZL&x%9CCn{AZil11_2=bP6 zDEiC3*S^Y@%+3^j#%JMnne97>At$e-gu@HA_70hEZXzD0jI+S~Wpl6fppU(4t- zY_sn(2=E)9F~a%sGkx%x7WTLBnRr_OUnD;RjJ^Dw9mSt9z+3V&T`)GU{7ix^*7un> z-)CMe{!H=MurGrVjjV~D%H^O1y{bj%9hKq4NC1cSrAHW1DD+LCI2i1HO|i*)I5Osd zJ6MTXX+#vw0!JsU|4BkL0?;V2=;0h&L}5Rho*;z%fio`|DD4J4w$uwAw58W;t6Wcw z&S6d#JN_p6Fy3RfZ|1LCH+SJWwfuTTw0?g6wF&ieB5H^>VtCCX;?vD6;qTxZ%$0k1 zy=%wC``4cd={gu1!uFzS>bE#IPVg5B$P~qI>quuYeVZSr29adS>xMfW)}z@9g6@mM#Gt~aF-CDZrVK$P z)|n4i^4{KcYT3fGycuuoZJE1>zt1l(&h<9IFK*-Wl%EjSQE+zT;N|%!^K6$qQ$b># zCn-M_9#x*>^JFZiAw+U6MjBvyMpJyT93S%Apd0yher>}C`UC4T+0-;%SsFMkp4VVI zk9xma@Rx_xXXVvp$N?FR^j^i54ur}DobK|d1J=McLUTUzEKv&hEv#r8stcZQyC+aq?DwWlkz2B_#6?k%@*2yM#LaRpmv(`!qi)H-uR{6OLrE}xjAj>t=Vt<{8GDLmwd<~@3-4B zd!(mU$uc9Cw41fX{?C?~qmHBnMvhtBZuVv#vJ~;QLwS1-EMm5tGE13l$-%vO9&z%| zpu8JLHYHc>bE5YRPr%!^j&6&s+WT~`n}^WH#4TF!g{UnPVQZ*yU%ow2k39H>#Fm?Z z@Q5Yqfgp$pVGHtA3se@D{m+4g)OCcme=?H?kK{8U$qA)UAVXZ2kd4FEmbLiWwIsc5ur%V zZJ0EY=Rip6wNel%P;RL0@Y#yCQU1?KQbAcF&&Y?dbLAMOxKgr%I{0bVL{OR+%DN+TaiqllO-QLTir4CfPgDy%t*S64T2J7eUMZ_@+l4zMWTgT~%a z)H00pE&M%Puz=NFuz*isCq+Ycl6JOxQBU@Y?N{)@I8zLnKB%VbYoxYQ;oFwqRpTjt z?Dh1Z<~*0I zJqeA+;+)^P^WxFWov~9!j2ra%=e$LJzOr*s_xRH1>ArqBWSsMwc2xMUG5N*!Zr}Fo z+{sSl^<&jM_CDd4hhTIV?AYCho_SE2v|$Q;*2E~u=e$lIr(7vxoR)Q$CV;WJayrHX zDUyr_RbeSqH6B#KgDSM{G|>b+pavK6fiyzsL7Xcu-oywJ3rLrEWM8OX)W3HG$#7rB^1wmqBlWEt zJe0Oh*(tYA-#@uBl@W84gk2kRtc+<@rkMa&ZAOzP$(h7U&m7LlBU1u(!!J}> zR_BX`u%HOV<0t9cQ3~o6&(bJ?#_X|7H>|jZ(lIL)&K07%fW7lO@ z5@U82aJ}E_15YE|wTYJQU*uXa$7FDrg5lG&fXx9#aLc5SN8&CBP9-HLSB#KGk$&zd zNmX559CbN;`kDS^4uYWfuJ3WZ>v>DKWf6-l?_{4p?1htV)Fcq9dcAw>P)_a!;>L)z7c;oTKHRx(>mvEjc`UQOA*EIyb97 zD0A|QFAneg!gJ3*+iAipZ|v#5xmS;29bahGzCnl4?PeZ|8UFI*&1c_jZ39p2CPq4c zvA>OYNi^(eF7A>Yla!IL$ zD-dtELW9M%fxJE|ug&DOrM$M2*H`4Vo4odx*FN$(KwgK*Yo@%8lh;Y|nkBC@I_#O7{X;BMw}_bPCAFK~A*aJMSRYias;*8=ye z1@3f^Fv|V9+-)!kSKdpepF^_rSkuFCE;klyqRTzAz&*+3#*$5Sx#twPvs~`^1@3H@ zdjU9{?h-^w0_ZR@DlC*-VZiz0l0ZfHLB`}11G%ChwC+7j1+n8{D5?#?ebCFDhxHg(rS|<(BTm^XpUt>8jHR8j-(j^g3cF7o zkbZ$hdb_a*Z+DnaC5rDK=`prmgC2#ykC6YI6*J^N_Hp`z@vn~QIeGl6VlPi@(Yixi zP_rjivF1(Nv}u}27dVM$wdwIv`);+X7oKDF&yN^!UYvHsvI?WOZyznb-d=cMd;6DF zrR8Y(?|xnV`;}Aes>Fzo3a36OjJQ8lzkw#&-TR62O28;-^TwfKM`hc~dqYEAYPG#; zXn9;qghr{=D13|9ILwzA5I5>20%}@5MyW=AUtKwjclq3XK{n}0f?X3EBk#q++z3?c zNL!O-v9Wnh1Yz_YMSbijU=S#POMhw^<#=J^!speHm`W|XZ+&y|dVYb|tM+u=9^F#T zpFiJ9Rk^Ae-+%6v!Rk7u6DLsXA*Ds4hE! zV2MN>zW`HyuCxJR(o1=5sDF78rVt}9(843AsFkJ!%SzdVj5EECLq#SC(r9GuKB7i6 zRE3*5JcP&do!;%N`mja~TD`DlD+^dTq=TC+8p@*kH+|}v7oQ%vENl3{A#LBl_$ESO z{#A(pN~yLkaHP#)3{1KWAUrhHE`x?D3agri!0GoB5aUTqWxuYu%KDV%U7nschP1VI zMSt-%m^YAiw&t3mck+crX;cD~(%JYK!y!RZ*=72E@DtODJbE6Jsq$(BNf*8*cfq{X zY}LF4xqQvsnd-b5Dr>s&?Op2^ZhCvy8s~|9d*9iQF?B3@psZBaz~YOubuej4MomqB zXo&0GG*RdU7#35o8%BsFCjx$?HL8RM|d6E29znyQt|84%6 zF9hH!RT9{;D{ZI8%osE?z_*;R=Q`Q=wvC$1Si5NGuz5HYy^4NQ zc4O=jhyql%_0vZ$eZI!%{ZhYbfxBvocB?89AYySbCq`;YRf6$p!DXuw`-To+iWI@v zHA{54+>+Tj5cR;hkpej`Qt=6JVtoGlxVyMd$MjL(iy1$RNblI|Qghv=pbq-5 zX)&XFygEGNSf z{nRKY)CHi*dKz<5c7};KjR_mX=|&jR1V-0vb~02ke0b%-W|b4(@89K7-e$^FwbH{I z%H0)2pChrJht;K6&p7y}_1=o)xib9I@<HLdjOqObFK!- zACq?!!$CybL9PuFB9c(jT()=xdUBz5U(Al*zQRTUB&Ad7b>opCtIgRzIfLd44rtBlR zM8+)q1>aD@%Di4qCd+X-;D{nZM z&bONKZQi?yeMTL+a_iL2AA_{uY3Z30=8qcGqzZqIx7;Xh)wsM*yPTKPtyA`h!C6(y zVOMIv68#4Apbp^ewBQigb{dQ>5bWM>ej4*JoQoRMq2tcl|Sk*RI)pa;?Nw!5=oT*2m zBnM*@M#_@Lf+a#0ahKy%j%^P+j!JF&Zn0lc$ZcONOQ9QIjW&>m*^iD1BDRZNF?Y8K zPm~>Al>c~ExuTzxX(FXhn@d>Qg#Idgp%}meoe7E<=XZ^Z;^25oa zKYFGQ&CC*aM(|aAI<)WAt@ZqUdserdli29e%KW{{+xAKC_AdEQ+F&QnWA0@jw*pQ1 zDw0PUbN9lV4(KuZ)d@56 z|9bbpr+rq5LwxFVw&vA&jb9#=Ib`#P`ES1T_6G5T?!~k5HR@t;ipKw$QyAmaRGWlC zkQgf_XN@cLtQ2K-h%&w=U+iZ;MJs#ytV7s_+xmbGc494puo{qMay6jqT4kBBCKg#% z{3E0rn=-kuh2ii{bLf^RlU6z^*_BWcl_0Qjp~}vy7tVdgubsbeMalo^$B83806gsv$1sRbgj#ux$Q0{x%LE3?=eVhmexLwq8x-ay8{sOw11-RnK( z-Omn-ro0u0`o=I%oBel;s5EklgTqJ{(+4KE+8B%Uxflz&3A)JojD{veOnYVChqPQR}QkIc8!#Ag|q6n zQ~Es|rMzjk7Y@N7F7F!}+MstgT##0OK7LIG z_@EDX#R!iWrF^2?Ei5Iq0cLv+C;W4q@I^~APc@7T*^*~)<3xd_r$5*w= zjTkS*d}wlVH~zI`^ooIf(?V3qBM|s7EhDm#Wt7fZH_}*HX&V!`%_o}@cvta*hwb>} z_D?MCjQ5+r$IO{E^*8d;s|)gXeA7k5l;AK|`wqv5yA;({U%~o^LA!0M1?U);0Nu=^ zaap8}q%5LYB|z#2kJJN)Qf>-DVRUHP6Xhgy0BY<=bO5z}BC#VOEfAz?$ISj$CyV>F zn|&j`&H8@%k1XQguGPzSsc+}5-oYzv`Lo&jjI%)~VIT19cBae~ABg*oGnX%VI=*xD zqqDyCE{FA|iYslk5_-iII8aLY*4uLY46RiwsaI<+X<1?t6Q)=joe^j(y2hAj0Jhk3 z9`@1ufBg8V_?|^io;`b<_>%gvste9;+i@&+aNl0zsMZQB2DPh{TIEP;={4jbwG3p9 zg_D}4mf7IJa7-9T498`Y>*xZc)fVN{rMJ%sg6hh5zW-L-W>Me%z{2>!U8|Pwap$bw z&MP5L^AfAcoX?`#TrE*hWIuEH)6-4fy@J(4P8O)OGgdFq)>J1?$&(E7w6GCFj!P;w zhOBWrv3AWVZ%B&Mnh_R9?R?0)$>ZTY4k<$mmw}%wRfZ7{!7tj!;TMC!&zaOQ^&htI zFUd-bQ5gJ7{;)xR@`3-vFnKn7&DDJ;g^Fn-6c8E)h8jk4Zz8(u&iwsQm>4-*j0!u1 zA&pQLJsic};1Pvgm5ttCMFz$tN2nm*6Mm5@|K-S<&!#T8G41Dg^THxePLA86By1S+6}9UwX(DKN87mwG(eY{Azep0h8x zbD?-$UEuv>F#SLcE0EQf$5$s%0My0+PC3DtjqEA8*yyThd@j z!!KZwDwdOCd_^%QB~}z@BP`-%#K+2Ln@}*@Y>CJpBjH2!6hM?7?^__s?jH7s2*yfz zxq=Zu$5hjBS}WMnwGt)^&hp!SlCL0vl1LKKf-2AtOUH>-*)*%<=(!$UjBO*R6mi33 z*q<;R&?uZ#aCwO9q(Sjh)0+H{^NaX`vyy+j_eZ%yNq?=|;#q&-C7kR_%iFhSRSUZU zjh=jD|FsX#b~tvW-5w6qdd_1b60KUD4P@-C^{V5-{)6W|1AFxtsH#A^-K}^bBR4nd^JWz% zOgeYx{ezj~7R{Z6nZRyPmViQ{Y{M+LZHxKfXQ#GO61y0{j0_+>I3W{dsf-Xply2$% zmk)v|WJ#NAmk~@zIbfn;{YR1$pR#WN%!q(tgB=2a<3FmidC7Z9eEbPIcmnkNi%0xI zH`m3-XL)Ph$UA?6^ZD_ge?Gp|1U}lwA(WXIz1^0oF|^(`Yyb;G^^-a1*+kwLgQRC= zruUeKKP0^q-^BfTx*`!UTy#IBs;hJ zndq|O9)C_l0;?b z;KMLgks&&>db_a7_Wz=#C`x5r-V^s!rf5q_sqBMY-ifx>_n$uLb;IV5~%#i4; zIOPH&eoe*|Sy|W5V#(OKGvY*aS#<$yslnX=pH!%`g3<~*Mc9;*sBEUnjBPM0I#{?G zUMUNYHspg@0-))ibcmpe&2f~Zv7AV_yiK4h+De!x_zR=kR)v2mLC<-|@j1`Yy<9da zm$iWZQrDWE$Jm^}B`fphD216#99iy-`a323V4w7ex1AJ5AIQEj*qrBo>#9H!tqUVam>>xX^ zc!lUmGz&e=f!_}W&xkRwDUUY=LUfOP2;!aSajDw{D7D~_?B49UL>SOawg}6DAC@OJ z5vih+w&dZmbRbxS%Z>y!JF?b*f>&JMF_-xHYtN73Pv*Mq5do~>%FRmcExEZ{)X1O* z7d|Kq8a_P8d;a~xpt0Wl;%X=#8Mr)m#hZcNg(XnTo6&641DH(&<*k^|fN}v1hA!O$ z#sdqhH{APLm?Di(ASe;?g3I1qtTBKlQYjRg1`<}FaZvI~YAEKB%D-n39_5c~`PY84 z=d_@=oqEyIy%NXWx+~%SxAtj*Wj3&FsU! zG_>DdN_|6fV?P92gXZ;&QIR_8{>JK()%EVH*+EK>Uf>bolspzh0=-GQfI5mm{CSI; z!R;QlB7sja-Bdn2;p}hkxNET{Vz#|B@UZ=4>C*Q<`|{|#DfdrZwy+Xzh{g8b&U8WuufM`^@9WEC&HR9ke1DX-t-k ze6Jx(X0}J`!~EbRAZ;_r3^yx8gczZXRLl+SLgmVZPJQ*7eYQO?IpK=Z`#M?Y4!){Z zMj%Nvf8}VjlgJ!9ecIh()Y>=9zzO@dAAc(dThM2$6aB^!rDdv&{g_Zw=<{m@*Oj z#(Qq*KZfiX`00Y@va~~=SC6#wozF_!wh)IJ@36N}k|c)C)d@fx?h#FqKms2KXx;+T#=GiZa?h&sGD}wyEwW(7MnQI9L3FD~efO&`_Sk0! z671J#ZS5n0-|VBi*RFZfBxHF}?HdW>bM4L6*T6WL&#`ogF|wq|w}=CmDChIvD9-g_ zTposcKqoV$oJqYMF-92u9>ImqCD?}4jglNFpk+D;icXFXwd~n5oD>MpuRL8FYYgT;Kg8Bj;z6Di2CnqEZFAgmwWN4Z3@S)HVMK8yn}|{+Xsh=Lm;*{ z$)FaO?*S=d7H;!FPPeVYD=UYJhmP3o#rMaECt&LlH|&XS1%5bHtq#0Piz3#adEAsI zn%UWPYWi5Cni>x*Xg3B-=a?)^w>nhfR7_k`-rZ_Buy3NYpHA2h+8gaJgKGofvd>J| zQo?A8EZziQlxlR32v5w&cOKNN+lx3_m1-VA^v)2tbvcY{6L66Lc_M!~M`Zlf9@wJq z#@xOcp_|X^)x^I2klAK}`Pb8z_IfNq#61x_uTx+-aG4DmM)AA^^tFt4x^VqlsjmgS zcVCkfBMp`>B%(7EvcCHkTmaM;vc75&Nh3$t3*?O&fg?8#hK09KQUw zl=pcl{(Kq8!$-ZVyoL)p7{i>E!G?5O9qqvSdgqxww?x8Ps+pX+!%FCSo>K*n~ zq*9^?breous4jNzeyi;lNR7lPjM@~6Uy-v4nj5{0=W zICJqG&1x=@I8K-%s|LvX%t@aeht1E(W0~7Jm_vQA;z)6*Bn`suD|H9P8uF<3Zu=pn zItZf=teFOE&D`&^W_(F(4PZhxASaE{fI7(fPO13x26!ZW>?`0w-aa_KIG#)yx~!7_ zJ;v)rCfnE(MTjKx*D(ocvOsZ^Mocp@X^br7WbtMaR>r)U^HzoJi^NO8)r%(2ORG%( zZQmCnH8|n31^cn1Cr%r^vvSe4*Ty}#^Pqj?M9JzuX1=?VXdgQs%EbIAhB+@{$rz@` zlJRYApJJyL!Y7Ea>B;n(gwnja+Xp8WC!)Ra3Y6%{e-H4@v|0odtLTq_vL8-e!qcIK z&|N4Kez=eA?`wZhS@}Vd$oR0P0A9m?*w6;;_@6}`e+&=Vo{(*)c@N=^+DLjPch=wv zI(Z)Z_K*^5JYA*KMp9*)yVQIy2S!8!xmszr`E1>H(|gAp zepCFV66l@#m1tleJ8mZCF1ur8#6*nkh@BH?v)vMtAO{nGFKMxfxFJJ0eIQ8=`ed;> zvau2z42Ssj%6@nRkQ@gOli$v>8y)p|`xgS>(qTc1IXIKA1T9jG2P5#60&87t?b|Cp2bp3X<|IAo#Q1-?aXjAEh!MxBkN!#f zXUH0td65`hw*F3gjH7j#SKHmyWTfIn>q%N6aaD#fF_OT0K(43nK=p|`-vrq3VA+>S zvTqWK9kFT#t(pP%f%9t&xuTGVn&N5#kvM5v)TYHj%>iyY=D@7J#aVRk`($S^{ixjz z%A!5yq9^+z0Qu+_Ur2pX{QB;@q(5Q?&2X+{;$KO|l!)PpjQeKbGbz+2;U!QU2|pPn zT}{G%!VGZp@%F)S#c?zP`ZdPKVg9C!e|8VDS$b!cz0FJ*A|8^nAT|p8vPkQ^l<)9; zu)nn&b2!&n6v@bM0}RNyt8U!$u@Rp$%0Tc5B&A^Bwof{4pc;|A?Kw!`o${L14+nkZFMA?!@h$DOnxhX@e^x5bXc=bq}w` zy>zdiI3OlJ`raEI+I}wfKl}VUI|h!AKK|*BTZg|M%;?lvTq;`C7Xfir*=uj+eDh^o zL|mHpy|C=q&*$48M$#a6_Dy_1G(LaxYWp>44~WEZ24Ai?2(}HIxkh*U6X-!Oq3F&b z@Ifwi;~4NkiZL>R&4iign}2=bx5E5Gc5VV~x@sz> z^gWTj+kGqP{Pq$!ofeoqq>@Pa7P;V91>dUf3I`9CpWoVYk5;$VqtpwOV_ta7ELy`z9nD|1qH{i~_a^ z*p{W+GXHfppE_Rnd?G<*$;+3JPU~?yAurs$EYoqw&8~cEu{28-ErCg3cR5cv;tHbs zp*xweDrQ`o-1eT^c03nI*5Ml@>B3A7-_wPe0c42Lotc5hV)nc1o?krZ_TbEKANgIRoZkA% zbC=KTdqL;t7tVh+WG>#CBAWg);q~Z@xStlWqh z4utl5CuUd&-h5aY?9C@DBg>f~Pf9JmGRFVqy>i|J59H^28nNLN& z#XcNwFG<-m=joTuIrpZr8;9O8e9&;@b#qOPlOdM@kNkK~j$G1v=I*CZVAT&bxYa-qkCvN9P^=8dKqMS(8ENj})*eNF%c!%h zC)USD$Jg32@3BpB=InUnCRPuU+YMjXUcM`E&ug2Xc>$}XvPbfaNTiYH&MK~&R@|mV zb*K^h*h6Rw5<+K3{1^}^!oMioE%Lj?QxQW~Q6ww`FcEI3Fg>XzRP*ooh=yX-`m!cd zE(+C>gt%{k$tC3oe$+)DT)~kLWGOXwl^QTH!b^w-X6AqvG9?8{wd|_w%Su5`-9md* zK+LyC)@m*@Q@Um>UmI9eW_nUn%=Hroq)Z&%bJ3!-5@9X>>oTc^TvrWqls8#4;4#6v!5F-X#C;6iFZxh zcQ|`NL;Lu|yPz15Zy`*TW~xKmrvIcvo1#$**zYaW2cOl~)Je%=dEP`tiop_~2^vqC z)TC{@VWybm{&bVoU}OEuHf?!LNV~{wLJn#8ejp-hXw#;`%P>~RhbqvfZB+E2c~124 z7eu=kUn*Vv^6G&%Ts!cVYp%Y;}JhwD>zUan9X5W45*S>At`16EYhu(|*(FN6IRPNaU?|5Wklo{} zIKx_S#aTgW*z7xE#4`KgkeImAo_-`%oc-X24Y!*iXt6mBNecA`m7fL{4UC}@2iCO} z4$8Qq*sc}tmg0vKxljz{d-YtDBEc|MqrpQV%lFdVzmXiKCM8_H7gi|>5GDn66rIDx zZN(?{>N*$oo;rWUjEPT&mehLSqRX!A-K&$BCLW)@vC+d5Cp|X#wjq7Gce&zh(C>O+ zzA4ZczOQWDMZsH~6&i)RI%3Fh6)q;8E|nSXQ|d<9!2O8jM@hB^PweIng`}Lxyz_Rs z=2@xsiLA@Uj-R=F`kbdfd1rFL6{**Bz3|GOfyUF!kFI#^o^emDD=m2Mi=aKKb(5yo zURN~fa!|V?)_g6f9Wn|e#_T{)7^~e%%82D6gW^(E$;8E_=30C8Ix)cRWR5U*MlUs= zco1^42-H}P-I5Mn?=0hfIXYYSYIL@zNrP@4;+DD1^LHF+eyn*6eVBJ7H_vXKkAFmB zXwSTzKH>JVEDs81bMn+gYG*aaFC3>8jod$$(jD2}&pbME=)-&_bj=oS0JT!5LUVdhRH4WF87-a2)`Oy(ohM0;&q ze3WlOP9SM0#l@UdM=#IfMD?&Y=0(S!oK99|N-&HJo4mGep|$w+(%Z(*Tm8VpO9S@Z zg$t+OH?UPv=T4XR7TqqIJ$d4awNGAX56uu$ZY}HC`I^4hUUCu29fdI;efJUq)ORn< zz-i=lQ{aU-S^^2t>E&r)dS;p!M93 zzzH3t!?N6*D~bzc85PH0Ma7GU$38gpvhG7}>2cZ6>k67TYtyD#i?*S$&;PB*EdzV? zym{cI&Dx&VJhyE-%p7NaU@K0s1l%6XyVvLNQPNMIOOYc9R9TK66+U;UFRPH)(sNi5 z`Q$}CoYQ?n^apdq>BE7>(IsYvnPlT0TI6-(9#WJ*Bc&u2odsK>(@J;aNr_D^)P-Et z-Gz!#T9Aj$lZBAB6FVTe1fIly%$Qj@$eNNe3RWcg{>#;tm{PhWdyy<67}%xu-IuPGMHzjo!|CHG!4yH$SgNmmcwld$Zqj8nhYA5hr) z#<7?8zQ{D!UUT2Q{nBojFySWb%c1D{*$-4rf!*fhk@NYcXv*?gebAzhEN^Y=8zyC$ zL=rgLfp(`StVM|@9(5IZn3;n+hsnv+B)za8klu`M=SfF;JW34$5013|vFnS^c8mCt zP0NRmDUX&sKJ(yZ4IdvdW5LfT!ESIOnJyl-V?Rbei+&+aYQpJN^s(6&Ag zZeuHYNTjZ`qS2Mza;#`J&QyC`Uwm6jo-A2*gdyVNtV*TigV8S0G~o9* z=$wHY&uXiB7{*VvpiQtUAteZYitJqw(buAUrrF2s zvqkI+Ds>6fJzXNU*oo0f71_;(gsz3?!etRtM%ZvtWH_AfbIU3Z8L5iicrLUrk0YBp zxKR)q!VG;V(A-F-;m#I!t~xb0VDGj6C|gtY`isw5^B($j`4y+_Lr4r?{obx_yWRm! zRBmcRDb_aP#Dw<3 ze{nxjOapwj9RuZ(SZa)rCrXXviewqAO5=`%mnm45ot00)vLji?@XMR-8;MKiU>WF{ z;+_mdZJ!~gtuL8bDL`$yFuDb6*?G-oi-mvrWKh2$<38Mr^8V1>_kM5x{@ut)U;b## zTfO9}$vJfw+*Y<#m}&Myw_dk<-gVck4?+T_V`mm)Heql=PdOLfo7JsM_Y@~JGhUYS z##h^p{1jUuhwiC_ahwwf^oagG>P2y6o%rB|=(S`=h8GmoyHIcxo*qFz0V>~&8S-xe$%G*cKdS;Sto2f*2gi^sYp$eYKyW)@}QEeN>Q#k}ge^P=-JGmlk1 z;-Wk#fcCz@J=%|Hn$ax(+QB{YA86gI&Ad%JCIs*AedopT1-Y4hM)WDjwLhhG7-fBG z^$RV4#Z!y4bJhMgh}=sqCNQ9lvpNd(6caS@YSaKlEYu8T#08)#Q1vlk|!Bk+Z#}|pFdp2x*J<#;cz4L@3p#qt+f5PugA_Tfs=WBSAlk)L2DLW}YscTYE(x4=8dm`KgVG2J- zGLpz8qOn9zS`rh5(sL6I^w_v(&jyk5!b{?!PV<6OXY}qYtS@j_geFb1>gQZ?bH84l zE)I8g@b3|NANbcdW-|UBy1|{qgzJi{Pcac9otj9*46)pUXO;=Ky=$!^+%WWfoYN3;hb)wbaNAQD^>=|kt9R0d z3ak%bi4!swI90^lS4ky!7YBFSZMX>U{~zfj>G5aM34yv)ux6^ei&cvr+P)s?G_+Vqu=_wIG+<$Fc%i&umnee$aAB3M3f?A-&}ce$`b>LjafqfS>}d2!oL z;Vy_Z_ciC4%Yx4b!r~2Vu|+t)_+(<`jMTOiIHD+{t%JD;w^D9#Zl%<4Y^4w!-%1I% z{uu9RhRfmcQatB5;P_sI>jASVxC3J~^}ds}A{vW&Ceu4R;+<=GEaW`^H)kyN1F5l! zFxGOy-)zn>_2m25n?qwku;cWhGA0_am-(2vfT3G>->en-dISe{G9!qJe~EjfX>AstP4l;RZCcb68dq}D zZ2a?9U>n>8{3AOP~FrmY8tx zp|Q_Wer57Yw~WOM51jLnxN^srA1;_{AO1&=&GxTzJGK!$mSEA?HcMrDBa`}O=$Z@_ z4P@0(t&vyL_Ndn6=k$fdNforxplO>HGDWd6RN&Sug7B(1zrxyLzxwvKN4~>NOTT>+ zEZ=PJezVJa_A3=;FzwVF(MklGpB85UGvb;-;F?s>|FVe1^e;z-VYXufu=7b_rez_- zv^N5d>=l2uK2Y)!HVs`pOj0w*ze-$_);>6BmHpYaP4=hj%rzos(-zTWwRm~d8~0Cm zeXJO>cj7C@%vayF%PT8I?>7tI5k)Ul+S~U9SHYh)-(K2&g>64BI>m^xlVcX+Pd(Mq z<%K({(P`bx!C!AG;(p#Uq`L;<*hLD*rpLuVKAL8(>P>2&>2!HZ#T9cP?p;IMF;3m!eq(zp?MKa8*@1O)*1tv40fli^6Klm8 zz1NsN)d%-g@1ge^_?2{uKq`B`Ks9d*Hi#-p5<`u#K!HE|38`sl2ksz8<>O6G&lJ)7 zYX$DV{oVzY6Anz?`RN{ehB;xH*n9W2?XMmANJ;U_BcESXvE8OzfMk1h;1kTgF8TZu zWbqHXD}bZd1p)U=M92Ke3iKLH@UMF^86tJFWG^-@>_Vz8Y*X9|QOL?X@I|2ii|Dw( zSbJ*ZGkag`tpn}WR&4)U<*v>gQTH z@R_2>=yR_qx4hSB0Z(Q*tAW_lARb~nWXn^Ux$GuL_c&8G!H@Rrsfso-54{oXY}RVH zu+)A|^t#K+IUR4FZVy`BX0Nb<)?9dsK`ZB?9c6_2a-rOCj;HYs!a0=4Nh+7uf`(zF z133XSwoR)>EyPG>+>U;RlRJ!F@aE6<(VvfQ>-gZo#V@a&IlQE)7#FC!$sX6uH0@tL zJ!XG4cFt}4*UaD1k-EzDws!<5lPZLrN}4xqNIn*6Wj&E?_*R_dBI^+j@_$5ERGBwK z8wA!{%}zcM?229;rZLU>yLlk=o{@<7I_{2Fw~YTcIt+qXu>bh-Mc7EVo;W|FYerE9 zY$UKW&fqM*o4A2T{{-hZ_IzxRQl10O_gjbQHE5;gft536u3XsrvYx%?4ertLp4Ls< zTr>F6He^=?w+_=qBC($2Qv%;GX&;H0$ zKY#ZFT0f=emQN{g?k}<*P7DWz#dG5);)(}x!*y3{C8S3Sbelvy9dj9L60|wdpv3Ds z3}$d{3UY=5LHd{PFo1f|#CsS7q3R$$gv-DZTVzll$9TFPWcJD!XhusXpC0sHi%hSFL&-MLBl3<6&?#SVL*HXyQN> zW2~g$Zj6P{IWp*(c{p8%6d<&9z>aF_z+zf~MkLK}IV|1~+m1E64L6^JjHN~mpAD#i z0ym>z?0Fd5u?m>>aC*~xV<+t!#Z4F?mmxyNnm20!bLV>)m%ay^HTFZ`56&ub?pgMw z6RF4wW1fu&S2naAuh|-@mrVv4lFGJc*ULvbS$UkkuMcfuVX7lb$fsqtaF5A!sBi(e zxcO|?S#x>fwX6HxGIrL?Yk$0Q$U*CQv1jjy*KV_a+OXqq&)###vjz7Jx%Ik-MP3*C zmuSfR<-noqZnbwLub#1aAFLMBn1EG&7&|rr^zI}nuGiRSus}KUHb;P|;?Y1M5L1jM zMgSRaO^kG3C%!`KVf!1qsr{7@^~5LmuPU-^NyOZ-{x`=1tp(%@w$pAXhu zgn7>eueC!oBn7AU?U4*ww-5gYRcSjl&pPf)bkM3dec%8E>9L_i?xB{zzu0-=iY#dH zXY?Z=8G#GlIhQs~b}S6jm4+RxcBA)B{Y!E3ipt_Wdw;H6J@>i3*KMD@+Du!1dd`9e zLPqqpIrc$27T$hnquqMb2gHdtgAOL5 zf$N8$jaZXqf6VxB>zy;-eL3fay`Xb{&>71I`v$%aC%d$~zKrfU_t-IM6~a0rhUm=4 zCeztsr&x zq_;~%<@(v%uD)jTyr<3F<)`O8Xtq0POqe_O)S^(u2g}9Jix1HZ?CUK9bZ!92FeQ>L zEgalws_O69q*v&(Bvv2zpr@!d%|+c~gP0Yoeg42nq>g-whAJLQjvNBM^vQJUESyzH ztzalqu#n5%+PB9nO?i8%Gs)jy-#0pWnR(*q^>1EQ*mZf)g4v^AdG%!Fx;fjfxn|2F zYt7tWMcXH*o=!SuM+@7|Zq>GY{aFu)^G}4!|GrXuzwB?A^-Wl}?oM?1d?^=v>%_vF zgqt^HT=PnxLo)H>_F-wUz&; z-AfV1EZA0LQiGqI-P?B5n-A<6Q@K2O+*_~wRO|T-^VeT{&8E2<&D65fbMBA+7X0x3 z$Lw#v%PaqALG;v@`u>v&$)`Xw3>@r25=RZ zWYpqcK6Ma0-(*GWTV;Q7twCU*ps=Y zH9@rhHN5r66K3lCBbdVgNT7dW4jI_nw?*RQeXtNN%B#YdswGgmZKR$oe8vg*a=raH z0jp`cKFt<~j%TNHYJVOgB}D&B*{23Mv%<;gH^+Qwf1DsDeLVHA-$a|oy}EeCgbSwG zKi$13)ok)`hRFK-$|b+dw(aBNmc4n?l(B)gRxGSH?I~J|!S?mAjIP8=w7?EWJ^-yn zv;TUBXW_ihB54&2a3m2s+><08$&^pCd;vw;;Xa>-UL7`L93%OR4Lh@}HP;I{01eoR+hDJzdsus%U#M?%9JY z#7|4!BYo@!OnXM~ISA%VMDqg1FjO);2Cb$MWL#V0sYvUBc0_~Le1jH#`n*x3{t=6+ z73B~0G5Pv5`BsUP*&oS~zrw9@=u>(Q-%SBS#S=`8WHe@}UI#Hmz%(@YQ@sq@3Xvv_ zx%nYPeH-^kB?jPYD++5;yr{>L3%6H)zPcq&eP zwFO$*BwE~AgDOrvDRGGAKx%pd8;CelPz(V|XH=|&ebAb>(BA&F*%tGnh!JuZV3}p2+W@k6 z2t`;0s%sy!q~cGVxMFfC8seUjWce>l%IiUc!R%AH(@|~7;r(W)woK4!io`=*h%Qh#QuK6wMxCp&;$WAy779tm-M2DzJH zz2K*+TU4ePlU!MEk-DiA!Hd-6UvPqWg8`8o`NyU*xfH=LpJE{Dq-Ijl3AD>XG+Lv= z6Q~IyBTJCdgZpyx1ltjTL(@?e{?xcW3#QCRPv@2QUAo}(o(sj^qOl9NnRDCOoq7*n zx*>3Su^sC|Whgz1UwTT!ccm83R}PDgT7IXwvD2YFfzn%}H@8EhXRHXSQLJaMB6OGi zJkOm#Du+cIc_cAdHC4;w^L%I;q{~G5bgWf6+#FtaL8kmurmtfLdGHV_MqN5=C;UWU z3{UF7O61%qiV#xTJm1`f8d~_XtY`XDx89p>qJF*oPd{qj99&0wzP=LiwMh^}l4{Y34Lr5mfOHGg zF`$EfAggn`#Ae-QaaZ91&u_H*z=`nMRe=$z<@Krh8=iq2Z-GSHM01CU>>~&x6OmNI z`U3V+G9nkL-nHLa zXPQr;?lynANrw9j#%WpF++mQbjVADJmq z)aCn@L%bI;Q&>zxE_*~SiNv~c3*eUabnq1?X;Sw{xfu$~fv4`I0~#%<|_E~2z4Flr4MQ!QoO9%fe+Z&BtQ?)X*^*!cC!hYs22Shw4)zYY|h-Ww<^72V6qY}CEQPQ-pN zi-X<3+0LDzZ2l|tk|iqi4>~>#@GZB?^VB2Lb9VV7+aGepiwOj+mv*ec(qV~?%3yHZ zhK;Xs78`B=#Eo(Y5m3@EUv8b-o(W1*5HsI;>)UTTT-40j_~1sjDsph_DtWf3;Ii#DPI^k|nc;Nz&M9e08~an+$q za1D`$R{Ogm@lT-3jJ+;2GaZec9P*CcRAaK%*dsbBO!U}EmIpd?N<$Af{=nE-)$mTW zPH+svo#|ux8>>ZManTvyND7@`hIPO3r2K{vHuJ+*PIpW*dwP6X&#L+}DEcnsfAQ z6CKVOkFKrp;qw7qAZIGB7U!!~QbF#D9A~E0p-a$K`b_Aie*#@*d|wl^vr2O8<{Fc= z#@C|r8JINl;vI@==;5LP#&*_J&?TbE zh2}=7w5FMV0$m0U(~>}so#o7w$L8HCF&S>0aCfVQt_R$kIk+cdFYRR9(b)`gsYJ|j z$7gv<*d#dE+y(ppyO06eTqE8F+6rXpq&-c&$*#H3$Br<^T+Mi|y+w13#wF9C3VE;*DZm`B;nBMrz)spU&Md1#mk1F24Cux+LSuFAr~%fC2jfh5fXjz!oHZ8NK3G~wiSuwoGc}iM=w_fu zYlmlNVJ{!%S)wH@X(YxQF5c`urL|0R@U9MBBCdC#l^eDqy+&xva6v&X^Bi%u7TA7Z;@?YRnm$$0z8JG5k6`I$Lry%PfrE z#2KA+uh8e9q}0@kjWg5*L;g!7YF$i5N19N5&~wC0oi{Ij~ZzgZIN3 zc_pNCp@P;UC;CSIz!^DgtVCvlP|$kpvk4luQ$xFmYmw@};&ALhF0q63W))L4y5$c# zj7Hi#+qSS^E5e?XcR`*bBx^*nGB^VXg;t(~gGjwP;d$CA`>c-ki`ZQ zQk&q3^+YryX$fp%wS;HIGj$kN=eiC^O3Wy5KO$U~%OR%r?V1-9I7tXj?p0Q525QUv z^vI)Bg=S62LD@Vx{J=9S-Vx&4?c>G_nNfB3p7A^1w{mk_BF(yCX|u=d&Evwkm(F;0 z&W4@W-E#NPo>!Ij+quS0TF@`WrP=YDF8kz-8*Zh&a1UhF2jLsQCD4)i{C-{BprjA` z!|43ZcFu2#ApXER2j>w(LpM{ehpYR_l@mRWV!LB=%+Bz6v_D$te0O+w~{b-X{?(% zBZEP$*I64!zX(uoxltxAQP5gfYT%4K2>S2YA=Ah;E+c4B6?i|Gj(}>MC z&=34;%=Z1+N3zix@(BHD5dA2^)5w1OeS1;%plnnG4kp;6^b|i(41u*Le0qeAfi5i> zpJL;2(OXFfrH_{@c_Pnxl2p9_4M}ygmQ?DbFVA*l504zKuSZ^Kyo{MTS>Jl(WCvP~ zOp=O5CQ0?ME2;F@Ili$iJ@zUw#?uZ%WUN7Am(P@!7;ZM zkcr5d6>%<_kydGZ6?7{^;acr)g(#dSFjJ1(7n-_K zF4986)<=5}_bFswC|ux|Z-u!kYak6xw`5ON8J)|`4Sd=2ZsE@K)(`Dp?)&(GgWpA) zSYVo2pvvJ(47e{r_6CZh#C324Ctc=b_qDb_L6?^(#Y_44WMAEz^B<&<$ zx|aWPV}rW`HSeT4JCJ#Yr5*83$XI^2-rJgU)ZV@qa`Z6E(X*L{%ZxW`kVs9PkvV?I zBlqEv%b-&@QbTK#d5+YOBjkZH{Bck)jG^NHpWJG;wtf>U5Yi3 zbALjOVzv`!8+tw(L#6Xz#o#lb6$3M4K4r$w?rvx8j?O3LPhl-Rd=a28cc2@=cNA}M z+Nhj!>^W?Td&B1fM*1z*#yX20KI+qmVL9j6GC&Uweg|l2DXfh(#h$9ZR$;kTv3-CZ z$r4qLT^#G;N>t?&DGxfBL;!ttaFZUpIMy8TRcB?D6{4Yk0Q4m+Q{~tzVcpH zV;=x?%i!Pi*ehdKVHK*6?LZrK?S5iHupH1B8#t`_DjxZ5r|cqe2e052^66e@4~gkv zlD`sSa0m$p2H}o|k*=WHUSx$Xev*Ys7fHkr$4w^%LT-P{rY~)Cs z7`tOo+~~L|ae=Yh_s1O}fFb}y_R}|t;s&8}Yh*ADaU7spfAGw~SS4PF?g6fZ!3xAK zm=ajqRpR{U4mk(xW%6M|ZV)}iFRrhMB`U40@?rB{W=dS`K=%hls_(oE$6^FMgKBD6?(g_RI==vm3K5suRI0^36CxMb!NN!{52hdc6+>SF5?VCqQEYrGx~V;?JcZ>-?_oayTZ&|% zL;t#6>tD(~FK^_?b?LE8(Ic-Eb;Jc{jI7QKIWnCYG%}qTPq=4>9$V>la_mAq_Bz}k zcBX$bH|~Vayp`jV&qt2G&iDpv;door_!Uj#PjK-XNr&~|_Pxw0=*h*LGS%f2{swVM zedXf>za3ysflq;;jU6th@Civ)Ns7ZM>#_6pGN)i{=7=H85!cx9W+`(DM&X_J8$Yj( z%W`A6%PAU{M$XAT1h~ATaRH|s(ww3(Npnv2zL0fe*~4Av%yORVh1NXS(wQaffxu24 z8M!!26O!o8e&D=Enq^C8met2DGfHb3+ripDHf%3!wKGOmSld@vWBF77*6fp9&XRkO zeQ97P(+~LZ^8vBnv6g+KF)uviQHi|*&j&?+^}^$M0qY0VF<)+M2Jbr9XsVq3CzvlU z*7VjiQC{EkfC-|K;KTFf3X9trllW-Nygt|s&-=;e#vXot9%kx+hxheSo@u;&g}K;; zHzpd#_&FRYRw;8W#>WaRH&(iwqA^c&Fpo!c@`unwNJ+ev>^rd_I2+kFk>vU$B`)Gkt350C`T?HsAZ7yw3Vkhuo7qTR>_f98pl8lM*~qOR(T!Y$RvYkWRgL1 zTp6Ut&hU**F+w@^YSG7y5vs8(>L5m_ymHe2NIrQv{%YfE*C(&WMiO6@X z6W!fdh@uR|SrG7*zC66cs?9!V`HpqQQSg%^=QQQv<3z0zdEEr^Q}44jLOW_=TqjR* z&iRFGJJT}bj`3ufA?35ldl4(0nj%)S(U8tk?fz9Xh2m_2QY z^0F1xIc9%hzO;4a>Mii7jo9&isgfKq~96g;|wry)!;3MEZ3!b*GQ2mgN zj%AFl&(FraWtEAk>KYhKo0w#Y@>-GeFz8yEj2_9rw3_;1zs0&vziXEaPi<6x_mJGSzjgghX&!S^++V7L# z$OXT58nPH}WJmgGf3w=XJTk}YfIsjUj@_%H>sq5q0aq&?#o1=v%k%8Q-zY)fT#bvi zr<4u`$rN~o=ZTmE#xD^$?mYFbsqLIOJa*`J>=-%rDlw{tu@UQMnZ{$YS5A(-O2pML zcB(_e>Ub|V*157(>oI+ZeR=Z0Xau=uWQd}sjcFvSH^w_ zO$rIjafSm;ol%|jQA}Ua&7sKn*lJ{LOa(Nmd6|CSim3{k{e#3+t(_CTf9zf-ZuGDN zO((1~w$_uJ6F&A5Rua9S%v=)|1oiKa<;Nz#N{(dLg<5~4?yJs4D1o-BVdN(W>6v8)*S9g)U5CX$_nuOFcwqwzzwLgJ@mB7>mJIxUiTRnG#) zLs~LJjb!Yy4AQ?6wfKUbt}^m zbO(QRhvuwj%e?EQOm*lI>>v%z`+}f>53i&j8g#r!!|IVCSzK7A4dHB#0ylPN zs7H>ISjV0LbZ5evv7jjMmF|ja=mg-cqQIaDdMANj#3xMewZ;N;VesmvOmBx{P~&Ue z#Mpey(A|xnTx|H{BWPtgREI7R_tya3*r7^w=rZ*B(R9&cH*wZSLKEH+leLgX8gC7q z?ZunvBB6&H&$)Eb(9J*>6$>7RnbzW&{>_=`eis|o!)d1J4(`>VDdMehSBcKb$7A@R z!H0KfSUs}Rd>NK8mv~SVTXUj@t_RFjRcInsv>{`DkTEY21uiapHWTLRZ07wLeB0U4 z6s^rw8m*E5IDRiG{F*gkI2kt!6cqNt!jek8jAPU9o^;mww(T6=F*37~_N ziTl6Ds<^@Lg|05XB>I-8*{^1y*>}pmzh|6hScmJ+q?d}$3<0OL=bif{uUQ$+97hss z8KvYO!_qnkf+g#6sml#6w6hWt8Y|&IuM%`FR3E#A!)ZgJr>*vMHWM`alZxzRaxOS4 z=#2Y<-&uyd6g`SQZJiS|wg6|5JXz>O3>#}>^<4?A&$+OCXKW*&dlL^jCs%Q-A^Zql z?>=#YiZi?g=p3HC9D8M~mpglX&V`-Xhv4d-I4UWYe62;F85k8%{Fgp87~ z9z*M9PA)2TLzwESfE{3Y4N~m&4nNJY9+A)4$1)jrzk0pqpp({MJR`toeS5Ufw~F$_ zFA?+Ixv+&r@=R%}v+V%Yf8c$9W;{mPYibx}xv|w<0ll}C^}?q$udY0Qi;aie)m7Z8 zeZ3Nzcw0iRMDHrSYZSE3;9%%bBMrTd*T(TtXlQ5|`hhJ6k@+ z^1gZuy~4^yISbNTTS<&Z7j(jmIl2z%X{5hU#-7&Fv|g0y>@unAkPMKp#kXq-pm5fH zI|)lXBK0Ej@N4W8w9Xlp8W-#q2YNO9o7!%1p}`}n;~jk?gSkqMPab})cWKNKwUasf zIKRf(G{HaC)NB!sDJl(zG_Vv-FBq% zeBJcUAJ%j8ADT8FHx>W$-l{fd^}kv=zV5jG-RA>Yr%z0LV9I@g+9U~Kfdzl&bw=(o zOSeugcWOzU<#3!|zXW$)sJT;150&xGu&lk3E($B^E=@xXn*!Zc`EroW6gbM59B}rD zM|aiVP_l`kS%OPw%7W6k=<_Jm;V%E!*b!H2e$lW>UNrjYq3I#=!Ymq-`aUTm3TjPA?9#Ts2iT2<2;{%8lG6}D@2enIwW90 z29FcV=W*~Mn9a{GL|QnFKY*Wqi}xPk=T#1TTYkPIb^>wHaTxzKrq;GttJn{y4Pu2! z`|K!B6wXIDSyi?u&kj+85X}zjzX9XV;?KGIGjfLTcgz!iXHOM>?xOzAk;xFv7k___ z=E;A@Jmq*Cl@8$s2^pmR~+ab?%2E^30h?fzXmSo z6Kdgb0eH35S88O<1IlkZA9(!A9P>6t4jJ2WG{3MGL*q6Azo2Pzqm=SICklWD0qAJe z5$glb1?bJuPsGJS6#c~Kr_jgYC;t0*{+xqdT*aSn0^bYcCi`-0oYf1O{V2(sd_Lk` z%&6+c{DCQ_#H5m$k$^=P$41i9xzkK2Ko@sNQak)TI5GiX?=aef;TtK4}hc%69KG5SUoAvTSl@8D;X+uq?#*G2|4Z&L-BW#n}`?5tR&?55-h59x6m&Xw=Z%a3AZm@|J$b`{qVx?A9!W` z;zyR3iV^P&v48k_p?!MZE3d6yv}&RGaL@gVHh(&9-^A&q_m95uzT59xGrHohcfUDh z`r=84O0X}g$;JOcE+Ka)u#Y7ha)&Ttgw7@UY)Z+Cl?evJZLp4zv{;!Eai3_T){!+? zqO+>xy8<%nZO*rnn*U1ULpR@wbs8derW29;cc%na)@sAJ;a|Xwxr9H1OYoWg9^Q{(>rB>Glui<^Jvyd_VeZ`FYHLq#4g zZZG?F7B#MVdsnlaZ_yfZptm6M+%v$rU_5NRBn$fzDDr zA{6IDn>dSu>2?FZZyvrw`FkPCv4af8+56vBJ;h!|kGvasge&#Bb`5@y9rUxr2R(hi zKLej+2On8ELot2O12TVQUtNEqBAMDkfo|f9zjilUyRkz)#c6M<8p!JL*Y08tIK`oh z&a+8!@Tfj^iMi7qTgz7!;j2D&iCNzrTc1^GjyyI;U}fBRIOnuE_O!;kKBRp!=VasY znG0m>csN(%Ip5H@d(T4Fw>V~tw?S@o=Nf8fwn{cs)N=ec{`fIL&vo%vbgxKtXfw>p zG0A6WdsIL{A!3JjA22{Nfow(vT!J7rQIM{v=xMRA)w5+t37#LSN-TZ*ga&H9S>y z($F5kQeRplVdH5rh2MFXJ%TEOz>Sbun90*rucjn1pT5|yOx=G~4PL==n(~pQobE2} za?fhL7OLLWpFvJ*vB$++$w6JGDWxXm?MSKF-T1`a?HW44SvhEn!5o&+l{}9@_F2J8 zSlYb9pX&hAk3-+Uim%7>+H755oK!j&^MONS=&v#=cM9J~2)=@Efa5`l&4Kr02h}(D z#3PMY=dXZv{p~(+-5{Acvkx>KU_Hw39&h)FZ^JqYY!rGPoXXGp+E-b_`CRyjo|{*R zrLpHRPCxtVz-H?T%>0=1{E}EDp5Fs_PX#~VzhwBE?W??Xv3PzpKj)poG{H{6_s@l{ zeS~T9B7d%?V?f6?!tMFg{g7w8QvB|ox;g{1o^!@$f)6dm{wD`|kb9~ro-bF>)!)^S zw2?67L$#3KuCaPA$$6Ko3nbYIZpB*ET)o%Yt9|_2E(@MM0`dfBDI~}Lw^hwDPl9Rt z0Mn8)A^5aGCIo1WJq^A0|4hvf_E9h`^mlM3GCM$x;l@Fcp*WK@aN=ceP5rIV1;4e5 zAlH^LcKkW`I_QbdJi<)q(vH;CIy@}U@^c(HEqR0b8%W+5E~41!&NmcqR5XHQ&vm{b z`Gaz_C4USTL$QCo-%u8#`UdIp!@~{n4eSTT>N(V3c+UcwR~x)T)t2|e&Fhg+)`zz~PX(TP4%z702?cIl(#8!~?MH&~DTuX-uST;n}> zhqMF#eI4^KtiegraYPn#iq=L&UH;5}51ZgU{GHYay5EIrjWi#4c~9c-2EKt2cvWX1 z!t1Q6wne?X_5^S^5r$VDS&8x;iQshS8*JrQ=WlRU;3)G!j8|&-B+Un!lBgikJto+i zVLp)7OtG=W{_iTPysPpHb8|scQ^oGB1CMLZhY^*cZ;#_g)sdb<_ z$iz%@cyl47*PCp$f9kA2y!#H;%4h|=dfYJR<}b2`;t#n3L-k%~A35zrZ`C<$uuz+D z7-!{IIHvN9b1aj}i#96eQKhgP(7bi#%__G@Zvv(R`=oU3NzTI01{zaz%Cc7+hlleO|@ z;T?Y{cs)9D>B`$Wbdzs*S-;_YRO0KMtFceQ8`9-BVsC+7--RC5Z#aKsxW+e}=X=9< z`VAML>aT(NhMq%hyrGHn4VuG^p=b3QF1YI}1+VAO3Ug4sVdNY>)NkmBIfT?Vd~Z;l z)8#6x)oq&RoJ~XJdZscl8ZDK6O%2=cA;T`;PX1 z`VJ=-O=?0R_Z{v3^c@`)#fWp`C{n?@!o1Mjf{D-Csny_qVjfgP+Y%zcZsi!&PsU^H|D#`9jzb6BfdnJ2XLuGEZGL8kqaWtx1$`8R9M^Swdoq%?;qVZ@4^ zH(X#mA@XW`gCjMmtFzUMWtx1$1p~B9({DfriSiZ>Z_^ukgul{n=!iL_)%XVL%1PWH z*Oz3Pd_%{ZwM^4*KqoML?lHG1n;tyMUD%jM!F6YFk*XeMUSyeuw~$O5C}o<*f1=-} zIv|mi_j{Q5^c=}~fuSy+YEOpccrum%<`b4R^Bm~jxRJ;?AsL#q2Fu~5)%GSanfDCF zracoFVW2M8c*x%DsmrD3SMXjMkNq6a_NlbGw*;jI+uvSgwc|YootXC?GzyF(EBRgk z?trIV6HF18DQvW6d#-~!Yqfm3OU-%{qWAiAXDLCrw{mc&u`djhj>ltTCwwW|s5RDO zHv^xxbk>;0ui~9Ru)gOc+!hiiBz$TwUtOz#BW8Mkevz6s(O}GG|X}yxaN4rjthEy~%pAhF#Fs*##1Bvg_p7 zYmLv{YIZ$#OD9)^cL1L>G&W9}5&ERj)K*;~Zo^(*7NRO#j7)nv7|(mUzrEP1vgn+} z7`)e)aL2g0kO8PP!}ANA=kQTL2F$|wdl()vu8U)M_*n5=R=<(v4|tLd*0(~Rk=ja0 zcc>z(>xe2hI?xZfR8gLT3RTO4u|thmIzQ#C{rj+h9Ex+~l1@qNM9$2^*ui9`3Q13j z+95wcLF?RshR$kMAG^D_)E!$_?KS+9vAY}ZyJPeEkW6df8=GXk#CsDilh-!x5m4%!zq#`PT9$va-qX1JNbDX8#Q{&DFd8w8peiz zQ`qMkCQbo7bINlao|dKIL$Vw)|^5 zRFQrKlGijgsFI`i55v-0raL1Kvfr^bBgDbom5e<2YF3&(fzKtz^4L(^YaYpO?9|?# zhaKgq0yeKQyD~k_vKKq__?74ZAN1?|d?Dc9WctCAK+jEB?VuBGhYP%Dbpm#OIPkyl z^9sOIWH^^%zy$A96F&v`o=k7V{`fia%`i?^`?}zZLDqt6 zPfF%K%}Mzc32qhvU2K=cZQjrg-_zWU+BSRA3Xnn244r)}Bn*VnTb(Y z@mG~})$vbp@J}`QU$}HYc<@JF1g;H4`}hBw_*2h+&IB+L(UXG_R(pg$uo6hrPmE)l zC+0`)O{|uv&%K$-(I6X_oz!wC1WG&2>t(MfbBg`XSGH~qOo?7B63xxgYt7B2=3Qm> zdDt9~LO)`@do@j!v!m`s=EXKAJ2#ePquN zTR#%N*a_AvrR&$1M&A*e?eyp~z+UAB!Unk^QY)EWS;Jx|@277Pk&vWsDBpWi0UD`C zYB_(Pt>qGzhziOE)_uC`qbKdF4vTxmqz~<@o-*U3znSr+fs|+q^P|(pfs+%n15QKF z#7Qxzij%uIsWdo_-#d{JbJx-Z(R z-%PMahy|sAt*1W>5y!^;lKgpSEEf2kKR06d znfN>1gdQ?7`0uix+6_3%k>vqxqC~uec%GxLC_brBUC8}i;0U@8X6QicyoNkS``iGD z>Fu7>UEJ>GOlZ%K^2MuD-|0ex%!7S3jIH|ZO0Q`X;LRRQW3zP^db>G(E4DpIheKC@ z@)XAoy?{LBoCyRkAZEJRN{dC+HsBdWe(RCl4 zW2M)pC3&v+9le}ynC?5YCk9=vRc=kJ7T~_)cSvu{X7h9R9n9M_4|K=J{XhH;n5+XXXgA3(i+i`T7ARunvhOf;9xahv{T>EHXV7Rl)tM0T+)u`K{h+l zsjXe!-a20#7=0X>4!d`(M&ht(3#sryA6Ww?2&T^PE!fm{ptm6BaQAOVCq>`x6H9%T+hj4fZK-mpaVp8{#>FhN{nw33R2+8^c34o9XmLu-j+^rMJ=EX;OwuWnj+&SK*J_(r0XuUOdi5r44QQl=3nR)yRra%r>nkp1{41O;C%@=}W zeq{Jg4#ym0__MKx4l;ay=lM1K{CbRkFT;0n#+SRkAI5)%;Ria;ZzXuUEj&L(!9$P{ z1rP^eF(!)I0Z;0H3X~!}iCpfQMkN-(kO^g1R$5w>R5I@q8-apA4cx zR(#<+-^=jRL6LdE#(4g@^ZXtA2;l#UYPO*a|1k~}8ox2aH^4ak`1wHxekebOCk@gJ zx4nvgI`D<|VTC7ZAwz^GKFv%(b7Wehg8isD!r~F#KWu%oU9M`2+r( zh

S&jh1n&JeeMOkU4{=4@cYr6k z(ax=L|s)4kSST31>;ptckO_^V^T9xBEDa;HjTYh`K}oyXu_h<1|<>J?*}M z_pN2n9G6MC$WAI?$U?FnI``L8W)^o)BV!P5DDM^Dp<4A|-bV83(N%hr%JcH;Mvmvf zabV=(>{)?-Z6BOhoQIA_ZSX+;dyKbxbtFaJ@`~Go>%13f#KV-=OQ~q$bHK<788q}q z!3^#&4)djubc-6QXmCR$?(J^ht_2LE4uQDY_m6s@>c_n*5h&F(Lr&e$^R`oV+yy%Jm$1XX53TRMp- zTrza)av|@CQ{}l#7(OW#Vfbhu`!gXB&m>3y305S)ZQ{N3ES_8=bjPfxUdXZH$+=XJ znuGretdmtM`KG~}D_7Zz#W1=F5j{YnX5e&fuoLxoru79!-6YqGjZC#(KD2lj(0T>< z7E^c>)5x#ry~UT%Y3P!fAgSc!zFxHNxAe-i zd6O4EEtZ$goR=Xcu74xL9xPTh7L7l@Wz8S64(@z#+}ke>e{uilY2e1{a?iA{3XY(v zaT8V~dvssmY-72Hr~+H=p<-k>>KbgdhjhNA+}(gSc0AY~E6v>(a1wqdzTxNbfcu$rJo9co2}{f}e$KawlH4&%uLYUbl(0V=f z@6dW%%1)O&4%M#1$BYxNyI95W*qv^zGoF`VztTAToaMNLA1+4lbEX{2JDeCajdfwl`a34YDYx57q$G&3m&&*5FJa`QdcW z!F&sx@f-8=+4c=A_4xeY@xM0W08bJ0!K`u2V|b3BgKv#sk})5;*R`m2z$g4K31=L7 zO2NXX&$sCRKWS8oj$*6X&3xbdK9Cvc8W>}?c*23e^;wbt+BN>B-BZ`C1Fy+w%S(hvucm84bN}l zwTV?pSxM89wk91ZMWmB}6r~Cxy(ghd5s}`J zB1pHOVn750q$*87K=dsj5}JUtP(lfWl0blvKmy6V7jlz((@-hd?{oGh5ES3<_viii z{qf7@wX-`rJ3Djc%$YN1_UzSO9RK36Qa+^`mzq=ReCZOUyOds7`iIiVWonffQs!{k zSId4=_V;on%N;7`EdO-{y@Fqb+7%oX)ru`EeqQmie>eXL{=fO#0;~aT0;U8U2{;>& z9AH!`UFqXW36(2UUR~Ms(%>roRTfvdQ}wN?d#cr}How}B)$UhsUH!A_7hkUV@{U(J zzw+rTNi{mx*juwy&9OC4*37Qex7OTRKiB%LmaVq6cFWpRYyVa|wN8yX?dy!F^In|| zb)xE|)h$r>jk?n_ll){nqvOzE=CSA+KF-(5At64eq`E z?(3^w&uloX;hsi*jm9=Q(b&6j$Hu#x6m9Zh6L-_VrU#n6*lcO@0?ns4k8a`DVswjR zEnjW1SoVK3^`Ubum7#Gwt=;NTM;Nak+!4KN? zXt%oE?e;C(zt?_shpHX=cUaruMo0gSA9lRascNTHo&7t1(mA`!2VK(M81P1TSEcLl zuHoHkciY+hjqa{0_*Mwg8-mdfZ)VFW@&ashY zM}~}CFmmOnVx#7bT0Uyys9#3?Hu|N}UyM!|?HZ$v@f}lk%;+(Z@4of!sj;t*T|M^E zxOd0Jyf^s0JL6l8UpC%0q0xlx6Anx`HR1lh*>WaYC;Ck+GqK*pUK6KHTsv{!#1j)Q zPV%1AZqnvSk&~`Zx;weh58-`pf$*-|zH6jSm)mSop)AKl1x% z{zu8vx__*F%-hPJ4E*HSCvl(L|J3i(S3X_y>G4k=O)oXQ%k+fLDt)&7Gj~XlkeVSQ zLMDZ*2{{sScE)Qn=FBWH^YfYaKR^EY@h{&0;`Z!%vj@)pX^wTyr*nRp`@-Ch=7xRQ z;>)F9o|so`UZ;5<&pR={-2Be-=gr@HF&%*nQ zdM#SGSY14F@yW&RB~_O!ToSgl$kJ9zLzdc?wOqF0Yw`7@uN~hE{AR~D>B~DV-?*aW ziX|%|SC(J-&dQ%x+E=~2YV@jIs|&9#y?W&8tKYu$?Y3{-Yu;aTZEfSVE7sLq*L>Z| zb;;k=|8DAcw)Gv?hi(YiuxDe_jZxnReZS}XKQ_I!Y5S(QANu^T?}rONHu!PczpDJ} z%YUW*)cmK$<*y7`UG-)||frQ??2TXt=&zIDRZeLt7_dG^n{wyE2OZhL>*ylrc? zg>E~x?fSOVUpoA9V7vGBQQI$Ve-zp+bYrM(NAn%ScU;+#vUB{-sGT{xe0TZps=q66 z*IT46}wODsj}z8J=cHj`Rls9`rdc;M(k_5 zuhTx;{`&jB+wb~q)Ne5dsydYke0t>TBU_Gy9l3ra z<48_efv|wEMqzJ+jSTxVY+2Zru;XFZ!`w%`j+Q@K|7hol_b0KH&J2f~o9m!JIU(hfZBSm2&F$)7t4$r@Nf)b9%(-Pfjm8{oU!Ur;nY!b~^Qpex}TsSI@LRGvv&) zGfU3=bY}ONurpWBq(9cjt z2A}PJcJkSIXTLvt@a&bdY3Dkh+jTDDT;jPuB8x;;i)<13R^-^o&m&hyhDM%>ycwB& zzR>wP=ifd*=KPHFE6@LO{>1tC^Y<^P7m8e{a-rFU9v8-32)VHQ!nO-1FT`KSx>)dH zrHhR(_P99uV#vki7q?$Lc`@!{=B2kUy?1HWrEf3oymb1~%}ZHP1*6`NS`f7{YJb$l zsJN)i=mOCJ(T$?pM)!%H5dBH?(&%;3yP}Uo$3)+b{`0cmkE`OU_f@~ErLOv4t$wxc)ecvOUj69m;;a9I3=!BT%EXvaV_II#SM)c6E`DnW!$E?(701^H{-J73&od+FBjh+{*Cy) z@$bY>ik}`oFMfIay7(XCe~Ax|kBd)=e|WRl%^EiYZ+5=f^X9uZXWsnw=Jz)b-n?=% z?UvWAinm_B)%n(lThni?xD|S9->tA)*KcJc6ild`&?KQ-!svt<39A!!C7er0N>man zBvwtVm)Iilt;Dg3UnH(g{5A1>qCL^LUFvq-+nsKYxIO*$irb;L&)mL!`{A7ucWT{f ze`na8@pnGHv-Hm9J7ITX?zob?lKhhzC3R04oisCPb<&=s3rV)5$H_j)FC+&f*Gdjd z?w33zc|r0I$%m3-lGAKnwu-hUww|_kZ8L4FY@xPOwwtzWdqMk)_R97;_NMk8_R;nb z`wIIn_HcWgJ=0OZ;qR#FXygcVbaRY!Om}?a*y;##oO4`rBstuUM|ZvNmbmMGxBA_= zcOCa?+nt0C~aWcn6&rPW~MDnTb;HwZExD~vtG8>KYl-V8*KSvs>zwPFE6L?{d%G*T8@OA$ySV$fN4O`tKXrfU{>Ht* zz0JMf9qzv5j&~=!Q{CC_$LW^z!s#!hmrt*fUMsypdh_(4^e*Y|q)$&@p1v)8Px_(s znDm?J*%<{hif5F~sGLzFqfJJ~j2;<%Glpi2$(WQeEn{Y8;mp37`!kPbMrI~uKFCtD zie#0^s+v_Vt3_7FtX^3|v);>^mNh$TY1X=|Em^;!9ZC&Rtz2yL&7ax~@@;y53S-_d&5V|Fi{SxaWv09(sV6?Rs^Qn5(CX5A^BcJxiPz zt^X-TC?mv3^`Q7dJuRwglSDcFOMW?JsVJ{D5LMK&VzJg)6eIm)umubN13?3@8q5M+ zzyvT{af{i?4)L;HRJ7NJiWT%H7ezU*(xN%{ zG}g}xUp+$fQY(t3T6mNlw>N8reFMO30u?!o0U&=Sa+ClWwmWgHB zQSpxcp7_k#k@ziQq54?N)Puzk)g{`17V1p#u|8L<*6xb7YDwOpeki68KTZ1{go$@( z&$p}%L^o?=F`lvndQCGj^gz)^(hqy_kNmdFeCtv1h8`-W>q|sC^6ICr5#Q*i#30LR zQC@irP^<`1XGGA;Y%@VH-Vw6{)7@|B9>n(Gj*?uF*GF^<&mr|d_&?8gK zRW)O`-cStHPKeFgSK=#et5~Rw5i=xB)otP<^%YT1J1IU^ABll#Uw$)8Jo=XqkH-_T z%Jg_3VKu@=YGHVwpqR+4co}8cVc8@qLc@`k7NQ?91b91vYBUy8n#9%7>Y zg&58n&3auGwJGmly_*=OSD~#byS`9NQ&))N(6x%@q|7JaO&Q;XW0|(5=&lbCi!9%Z zx1qrx>k2VV-ylBH+ZeId_M)T35U*HM& z{~^$Et=12^Ulz5k{^AAQPxRK;i8Xo;uwK-(xWroQ9&2Qrc1={bM4I(oqxTUbtsjat z)^*h9cTvImI_Y|YE@HTK4&iW7M;~Q4I3H;}BdS@G$m2`nvCPNIoBL{r8G0`x&Z`Dx z`9rMIqeON1&DX1-IAbj+4q7jXWtK#-+(#F`dR>AyDvNH~4bfS)hd!USFCa?lny77_ z!%JQYdA=m3=jCmwC#LG{DC=V}h4vi@Cep_5fK5U?!1Dc|rqxI6pii%n<-pqg6S8N4 zK1^)X7E`AiT(iJ`8%0^`5b*--vs2$E*6>#9_sE*n)&(Na@;U9n+KzsmI$aPy(GE+z zUxnUTVu>XU-iQ|CpmSrq+g|pnDPFVmho26L<@$1XYCPc>@j2m_);gl4e#p3K=G|AU zw&XxN7qs6=8Q>S%Z?o>?8C{V}EyP;c5A>hJC@`9}o+j{nQ%h~0(+d8&FW$C(E&4$3 z;nrT_bxRHLD&Jn}q4QhIUPncsb*gCTg&eeQ5Q|CszFuC8uxt~RE&d`H-dN7>EY;C7 zL`}_KjMqyD;r%jp)!b0s&u~xxdETG6&p58j@kStp{UwjYS6@SJ14}D-v?cQU2T@pA zB?g1K$_nukVORCC7^U72&D5jfCFEGJzDT^S?iBOrzh7JW^851BMGvnnB2axtj0Js= zF-?&*uWN(pAILH5TcVnWI2a6aV7DD6gB8dL*lO8Twl76UK$OzSE(pTkJ zQdG3i>xeIOZ&5?-EcU32#CxiSEYC!SzsGaui)Pl#9_VR=-wNIjfPVEwKko*jKjoKl zbQvCXg*JtTh`@{XyaXpytXp#%CQPw0sTNP&;)b^ z9kk_Q5#!=I%|_ZLqJn;047YSfcD_X4E-sc@niKkprFsb0sV}m89`e07AA;{k!m5ZPAfueXcA)Zf>Fs`^8MrjZ%Li%pCI)tS)Y7R9vCOlGnNxx00-bFkA5X}to%T?lKNF1 z&}T|Ym^viS`&%gM#q*?Y`P>kF(^8r?l698l`46GgNo70yV+f6;&IrBsQjfKS=LmNa zItinoXDKD!cu2Szono&Ri-$G3t<;myRX0l=lYY8ENk`8?*Cl*ghUy%~LK#Lf&dFy$ zpTCCs7~`O97fG+bhK!{@(Z3ethHvJE!%UqSz4bqaCJ&+8<`2V#k8F3@7XNMNg-$N@ zV#y=2-ArCVH<$Xc)Y0>YQcsn7saY4PlZF_FO-LC_-~UG-AEX?~55-{|M&2$YJOy@{ za#!}7ze39J{E#v}|Ffj*$q(dF{^#T!OPT%*WZ!fc-+(P3g0fh_T#a$S91nJjne=IM ze2}48J~=i>^BRHmo2-EhVW{0%h!g@ zQqL%1d56Bg!uZy_j&5T1586rc9z48P^6X#dW0FT?yK=2Dd0jEvSnA?Z|3+sr^=@=) zb50@qww${#U!v__=2^*7?>759p{aMvK8emRc~m~r%u~(>85`vDrEdMCd@>#BDc_D~ z%EkP^oGVH>K>1|ZO!*}Hyws^>IdbQKrc9wMQXXhh|E7P?55JOf$(#qGf6Fv^y!_WN zkGIYFvz%8~V17;imU@;flbk>1$*H`unzDy-m~$8A8y+3nc<9|n^z-RJpO$l5^#43L zX3~^-jiiSca}7B^Fxx}UrSr+bygrmC*G&DC`IwxO<=yurZ)hfIA!%oZd9ox=ug?pa zr^va4q`N75{~9vCG3Ox6Ys|R^bLzbMK0h=fo*(}ldH$FB<=vN8zkd#S3!u62+do~G zv}64FmhgMp`isBH&wS_Y^u@-BeCN-~+r0Vje+vI9Hy0yEnJ?$foBt`yg%cEEp?AEKX=Ti zq5qf@Bke!3|C#n5$uH6lB*WbCRQ7W@Hs*!7W8>^6{VDI+QZFgw$&2U6F>mIRu`PG( zlFvndE+=J9UYOU1^Tr^{$((wo+^=K|vs5%f)R&A|dN{Idq)}FXWTctb(A}iGH)W_P z-_hsM?WLaoL~cv{U&`d%xq>q#GB+>_6y zUwk1al3rtTc;UjQ!-KLp9ewJ0rj~LZ<@sYZThnJNi-m=i?bR06#+8bmP`f1Fk>*DU z&tC$rRo#StKhFGROuMdK+lf}JqVsjY93kHETCH+H`A%Nbn^;0Q@t3|#@|hMtp8`6W zsj6nRcv)3V(=D9nyvLgRr>Y$2-d6rB7OPdYSosEc-kCRXs;=s~W~OFOa@AX&Xl$&f zbBV0U!)j5j^4#J@Vol>B&(w7~v2;xhKM#$9sFpf8gy^ zz{jGSk1)?T@;uv1#>q$dm`7eb%*X1(O(sobk>nvy2Hqqgt&B4deZ0(@C=Xwv^Rf77 zl2WRoKzl2{$t_y|qF6NDs`}6hw1byd0WU9EI_}YZWOd9Z@pRcFB-Nl4CAWGBA8zq9 zfq8|Utd#l3y^z`}OAYa5>21Cq} za+7W2xvH6jmicp!>O~tp`IA+XT=V3Q`zRA_tI8+IJIz0NT_(*tmlyN>nYlbU*K?ak zmdD)kC4LfTW+(4cd7h+|>l&y+>VQe}e@s<;(HEvq(E`=|%iQ|cKt zUd_}LdXJCht5w$KX$!Qa+6FCD+pnGBU8E>2Mz5mR(S!7^`cQqm{*gXg|4!em@8BB& z=k=?4qMprnq5Ukyc@eHi5KK=#$i~7IdU%|hUe>MNQ{`LKv_&4(p z^q=DYng1sLE&f0I@8JEQLjjh6;sGTCDg{&xs2wn{l3K}H$+uFmN@Xfls8pj;`$~f= zji?flrrtmPP|q<8`Fn-(PuAL>T5q7%zlbn#N?a6nz6fY2eo8T=s#0BfRq3e=Rz~I4 zdcCrpT0c-r<<|NnwZ2BJQI>LRJ&Rf|)Rt-AYdf^xvl^otB;}fMOgU#9Wym}N@Cr~^ zHB_(~DLcUy@Uv0U@G>kpf8?YnX_8nuBXRSeJe`adhI4JDU^x+}V)WEioXGcO4TlKx%?5Avs}P!jgnV3A1i}o$ygY zms?wI{(NiQt+lt7-&%HS&aHQE^}N;nMr!fB1%9_4P>(C8On#Szd-|8eAvIF)yHo1r zgX{Z?9Xe+M8 znR<-$;D7m3KT|{08R|^+b9ENh&5zYZ>SA??x>Q}Jel4(Ds^6%~)fMVWb(NSUzED@I z->PfWwdy+cJ26|#QMagD)t}XE>M!bcF;{%4hN?T%o$4+X%Y!(^Qx07K_AUHC@e6Gu13LTfMLTE|!R;>L2Q#>I3zm`bd4u zH^IKv3TcHkKdq=%Oe?N_s!mtGP}gfUw3=Eit+rM`E2tIGeCe6nwb>$6o1@Le&b@S zv>&t|ML1U4ll1k|yk{OE&Wdx|zeJ=suO(^QM3nZ6h}O0vwRUJb#bt3t#Av&cRLUU{B`Um=lT85U1^qr%BrgT=i=%4D-wJa@LyU&+wx+>iizF>^B z@2R|njGm_SQr=d2>k3i~l~t+$x>f0;^wmGsKhewT<&=I(e`SC&5UKX1GDt74&r`-L z6OeH<^Zt)BI>{E;{LT8odHT!^bZczss#7wUAd3{JpXwehR^d4_8&E5c&J{*47o63UY|Ptq2kTn zquKZD9T3{8PuVAjBl`4dQY%!K*>DG`=k+0jF}WEKno)Hg*9x`N?BpM+RqfWhd+*SY zz_OvO0{fH=2=EUI4eQoBG%T=eK%YLfLak5ANLfD}UCL8hubQFOS89cNdvbWQcWA4! zp`y>cc{0B@dshw!4VgEu>^y3acOLfa`95Boe;bYpTJt7J~cbN**l0L22jKTHAAZh)e0?GGxTMGLN#|(Q|9{5d$afcRzerU_IZoB zJ$vsL)iuY2K4n8IQ&|7G`~09=Zk()T;hLeX=I-+seS7bCSp=5d&#MWJK;jG42*rnZ zr{bs9P<9qo*zYacq;8e6e!`n2V5NQ2+5sgv+^hC(->ABIr1fa~T9uMAP9NB%S&h>2 z$ntv4N~L|}k#*vL;Cf}{k=K$B`ZuX8kG!Xk?AEx9Jo4!o^mTRnT#u3!mp5zRUQ>-Rn`22+lh2TSLP{ZH`(KGnTN≤FHXCuU9Qce=@vmZ{%9^5??OaVQJ8!4u8-w;yt zGSf}w{_*50pIMtH))x&$Q}axgpy4z3*ZNBi`SbQCA6bGSU<$bp;prp9IP*#G5;tDF z!#;m)YCm)K?3H1p9|vVdnN*ofj-$y*meTV~S;HwjPnNth*M{+|_sp6PC(Trj<o-- z)0y^aD}vzUH`&)RDUAL@>zjYZ7)trPKdc188#9mmW|KTF;z^`d=eQ4^87+tnf_oLX z<|Fnq*c5&#`g8WP*%a|5n}RN~oc&6Al_EB=Dg5gAHlDtNO@V6KmJIQvlwAHGw@pm{3FdkUYpWt5%3{x`IDO*x<(6{-@Y*oe8O++&}qWU|jz zve`dUa)g5K2xZ3dM444D)r-B4T9AEFwJ7_NYAN<*_=OQgtxh{2=T%Cs)>3P+udCK) zKS&+Mei~Y=q9$weg`$0hE~RKcqAMxrNr%`UVa-y}Zfej_OVI4>?`kRR)3glsSy&$x z{WI3P6{)GP|3arV(H_W6|3<&aK1xq#pQ%4!|A@DM6pS~8p}FJ}f&3AoCdWl+>7T@A zwk>R1*?ty3OPVuptHyN;e6tB&*d}K;oD06T`4ar82L^ZUDQXYJl?&T)Ka%M?X-b3(XHe;A)T=VtjsV@my z)bBo{u28Z{M754+9r1#AVQ8QlxU==e;ERFNUl!E;mZ43s+y|Abzq-vP&Nej&e7(TyYhKT3nAB!knQ?CV-d|IoHlsKZz>ejd0v*qZnEWM zyaz3~cR?P4LdUyH32NW^V$j0Y zw*v=LU-&9`TANnTX*HBtDK}YWPb-l_3zJSzMz(QlN)S9PuU*Y6l9F7Nn`~`K1(|1? zU~ID#Z&926wjl4UKqF_m+F%8(}EUCt_*4c-$w)_(JU|vpkgE*lb=qV=OX5Jof+kC-2Mq zli!8*{23oUEit(~iO;+K--pJ^|D4uC4U&eovo00jvRPvW`{?ni5PR|qbKld;9`93{2zA>&oJ$v5K zv%HqS)t{I8?{WOHZlZA)t~UADup4~!;lKSe!f5O7^HLi}pyoC9*Nu(FUgNCcG}gn% z#f+`SN#i!{j$g3+<4N>3NeiPEa0$uh=W?&De?E0RSD&{x?ipAVUy5~^x4l(`QOAEDYB=rPnEQV-g%NMm(u@D9-cICf!)l>lyh)F;(xf8 z7BpoPvj-yzT>5wE`JL4N;mVUf8ROLL|fvTu?iXDH`|WbFU>@Bbi2^K_s5&v+`kjNS5gSD*JMf8m(A-8EKA zt;YC4_Cxcpm^m&YTmSw?{~ni@pYbJknPWHU6fA_^%(hF27nno8$oDNvi!yxSxtu7E zW?WTN6V>^8&MVlIYhp2~jTK4S;p$c$FXt+G-10ZYfntV-ouOJjX%wmL_h ztA44@Q|GG-)UVWq`PbH-uB)lPs{7R6)Pw3_HB3FGo={J!r_~7coO)ips7CRnju=*s zW7RnIrkbGMW-Tq5b+o&zprx{Umd>hKHfv=MSSQP2b(!~yc%{Re6|q9Bi21QvRzfSO zmD0*+<+KW#zg9_mNvon&(_Uuntd=(0vr@(i{CsmYdXZh4!|X?Ol-eejQDr5!(y2a!Yt5NCd+j9YjYgE}hx@ zMOU@}(VefKRKjlA3$E$SRteibvQ*b1?Rd!NzA1 z0RM~S5w4Y_%&7BgJ)eL1-^YOoT{N3;!jMY$N~8-m3Rox z77>r&TR-s_-Yq6_;8}lRz_(Qt0pC_rba?k=#iG^G8nN=(R%@sDXN^9e^@k#~lL+wK)04|@eRN`HWwMuo?ch@V8v`yM3r7^b1AC)HB7Hx~tRD-vb zW^yf8X#sEVS6ac*hm>~MC{HLIwUgQ@r5o?2oKt%6c1pC;3+}$6^oGN)Dt+McIHj+4 zOG{D)vO;T9hRan~Wdxk>Rz|Xd`cN6AL-kPQI`5S1 zRATkr`fth&{h)qOxvd}3k0^KaWBN%YNk6ThR_^K%`dQ^3Z=PIGoLE|;lvLh9xu&G) z@p`=C(jB@(aqIW=drG>_rex^ZdN!8if}#;m?+V@5cl%2}}XA z!7bGDG zSPRyH@4$Mn!AMazg73j5@B`Qieg@mXFJL^2;{TkTMPCI7wTvyX5; z;ctWo2oG`35pWb72jSopfDYl404*nqGh&u9$zoqK#0QuLOfHE0WhKqnx7 zpR_CJ4&Zqmp4Z<7eLz1j01N^{z%b*UJ_3vcqrtmi92gHKQm4saD)@jtw~8{rkNO(! zTMssoW-}o?$NTdo^y8!p2WO2iJqp~z_D~<0aYZ!2=Ghb-v^jHuD9&SyD@tkJKx<4G z1X4s1#<(JkXGPRiMm#d*3NqyiGUbZq&sUx+5%&_PCyHpK(Ov@$!0VtPXau@|H$Yb~ zpR}vNT5yy+qRHb5xC*Wlch5-DGK_eA0+UL}!ed5~egd523z%m`5j_gr zW3E@8+JsV@RBCdOnuJo5P->D&O+u+jD5bwh>G>oeCT7$MA2=oE{zyL4=tO1+BR_fgfDmkH& z6Dm2Ok`pR9p^y^_IiZjf3OS*W6AC$@kP`|yp^y^_IiZjf3OS*W6KXi2h7)Qyp@tJ` zIH86UYB-^W6KXi2h7)Qyp@tJ`IH86UYB-^W6KXi2h7)Qyp@tJ`IH86UYB-^Q6ACz? zfD;Nhp@0(#IH7penziS8uoKV0C1)Na8 z$vFNdH7LcCIb&>bGPXDwTbzt7PW4al5Ip9|)CxK5MBX})kxt~J6M5uB9yyUmPUI24 zMhBoE^2mt{aw12Zk}r)}jNr2vC1){8&SI3D#V9$;5vR?>o?FV>O);1IZPWHWMSDzl6X8H|vbjF6d(PMM4_nT#Bnj2fAY8kyP+BSVV=w?GQ#nMNj~KqjL=CZj+mQZo~& znTgcQL?UJ)5i^Pj? z=m2Je#b60o3dmFa8XN?NK^Qm&E&^(xVlPr-z#rfNctj0qfq7sBfHE5QXsN(W=?j9w zz!!7`oxvNR8|VRgf?l9EfVz5rFc1s|-+_J9trfMmQF|M;w^4f=wYO1w8@0Dldz;v2 z9D@RWP{0NSY*4@k1#D2j1_f+Tzy<|uP{0NSY*4@k1#D2j1_f+Tzy<|uP{0NSY*4@k z1#D2j1_f+Tzy<|uP{0NSY*4@k1#D2j1_f+Tzy<|uP{0NSY*4@k1#D2j1_f+Tzy<|u zP{0NSY*4@k1#D2j1_f+Tzy<|uP{0NSY*4@k1#D2j1_f+rq*WLJn_-`BZbYMFCZS^{ zi4(?c5o<)DMYNkTtKLN`f5FG)ftNkShbm)G(47Mv7DPr@}@g{kj7Ex&r;W0{yxIHH@T&k<>7f8b(sXNNN~K4I`;tB(;mA zc9GO7k{U%)i%3czNvR_#btEN@q{NYwIFb@aQsPKT97%~IDRCqvj-Ss)wyPI~&Lpl^zYAcuZ0fM&!g7Q#|S3{oqRQ6`R2CQd=7D6fNNpap0{ z+?#}Nfwuwukx?j)Q7Dd4D2`Dmj!`I%Q7Dd4D2`DmPThhW*lNTe+hdUJG0659WP6Mn zYQ!^dh-cmq&%7ZXshEgVOk~uGW7LXc)QV%&ieuD@W7LXc)QV%&ic=%O1#k&OgDc=F z$ly8DQ@sy<2Y>R6hv2ah!~7tg`9VDMgLvi#@yrk6nIFU>ffE_+;P1&JKnh_@4_aPH(d9b|HB7;%i8 zak>hS!+HTw$cSMSjnn-IOBjhr`9!@GD95$(#5V+uK~vBiv;?g|TMz^~abFkE6?6yi z7UN$G<6jJ;ZXBa-9HVX=V_^&ho4CH2y1@q58>(uKpLpD_N#GXBNF@oqTY4ad9TcsCsGhQnQOcpMxa2Zy`i za5o(8g2UZ#cpO~qr04j;(f8nJHyrJTqffxmE;!l_$GYK8H(VJ9SGwU!H(cn3>)ddi z8?JLR#>O(n#xlmnGRDR-#>T>lZaC2mC%WN8H=O8(<6Ll@3yyQaaV|J64vve1G$BxG4^9a=}S1ILQt7xZ$2SxW^6G#KARj+6^O?F+Y|uKNjwB!#Qzq4sX(fOpXma z#}%N0BA^7VP!f~^xGyVe1HiW& zA0j-4+>#?Mx*DzNPiv;rg5k8(dRl5dEw!GOT2D)@r&ZEvm2_GqomNSwRl;eNaHyLO zbvy6I3i9qNWd*>ory4rSA!Y&sMThnnF~GaPEJhnnl5=6Wa@4h6%ZSU405 zhhpK>KAqa9Q~PvkpHA)5seL-NPp9_b)IOZrhg0)(Y8+0D!1?_X)*d;d_9HBTM?y(Z3j8-S>`vU%1#|`70er?Nc#l!=9{l`t_C1JmIRl?a zzLUXJAZO$2iO-#p>&J-;2WR;r#RV)R7x_v}6vsCS6Tv->(}~Xl+1PFVBz(Z^O7;Ty zOh8Y5hc8v6nR?(BT6_~cMc$AW`y+3GIJ=lKB$=$6S`8!sXv93E z33_NVN+RbY-C4En0eS*vFqGECSYe~2Qm?%vqDUVDc(S+&B@)poY~1hUS{iBHgy|p? zWPxmOpEdbE2p@olAji0b9(xHL_LAabBx5tX%y?r{{Ea*4u$P$sNNTvyWiMe%i$Zpt zP?~Yv0<`5?AlHJ3Z-?&N0lZ24Ti|WbA3$B&!leu)-(g6S;Yjkaq#tLTVN8o)Op9Pl zi(pKPU`&fpL2W~k^+GDgzIB}=I8R8?*D$a3y2{8NAIY-MR%r;xt#T+dQesZCcUP33mq!))HOE9y1fv_aui-e^JOQY|VLM518wHWa^HW%z4{N((VC!!4a+<z|>8Z_$#ew4#evWaWdeXEh-%h_JJ9*KCDxd>w8#dfXnUP{cS4MGr&K z!&+s|e*{N4K0_DK!EkIDX=PU&s%m;)Yjz*Dk+ zr^8cr_$d{BvcXRm;T=1*u~8cvwXwk;c6h@MZ`k1tJ9F<0c*71)q)|f~JR#*mI&E*K zjqS9rowk*7BAvFCvLc-}O`|>2Xv;L((Lo#9X+Jw{XQ$RS+RZ_m*{QvacCyn(c069H zputt6b~QkK&;T?t5}-%|)VL)+=X@Dh33ifp7vXNgD6V5&W$oJ!N+s~7L0 zZB}PZoyHv;HU&s5=xv> z0blxnFMYrl3fkaxUwGXYUibZrWRjzn4c_&IqBi)}7pmIuj4uLJ8MV;yxYr}4h`UbQ zZO-ozW&rdjc-9x5_0=_Wsp9BeFMt<8X}-e8$~S!L3*Y*}x4!VLFMR6@-}=J0zVNLt zeCrF}`og!q@U1VcBS$7VD#;N^O15Nu8u?(yg%5pmC7c|EY`hWH2-{gRsNNhKV-&}i zIgTN`ZrVVe3uMmli<5#Wdhnec1x&r z2j05_@7;m-lAxBfqe>fU61GcTDE=7q_ zplK#FO`$Y$^yZxf*6ml(&sf_g*A#M1fhL*IBomrsLX%ABA^TwpbjXAbvae-Q=S=FH zNu5)ub0$wn;eD&_jL$tlPtYHm(NRJtkh9Y?&fOfR1IBxi1+u|?uKhvy06YXa=oY+} zkJjiy*T_Q8NJm?AF><@n6kUwmF0@1!TB3{5+l^M}LMwEk6}r$0U5wstMr;?Fo(m1n zg@)%s!*ikGxzOBPXlgDrH5a3^8!gR+mgYhmbD@p77@ggW%x<(UmtFvr1D}AE0DT6n z$t5a_rz=a@Xi3>=dmp;GGDqKj6IxX4xssvQwC4r!bRtGm}nX7M;Q@+RZFlzDMFVErZeu zm}ddZZ~gPU8{%e`obq2;{{H>@8c(f)p7%A}%!b{}hEw!Biy!adFxyRGwwuCCH--0G zt;|#l0_=*kaws;jP;6qMyjNKm8xJ4LV0Ggy!lyQ#P_c^h1HfbB31!`8E3F@ftt=E9 z?-ly&7G%?Q@jGuhXh1i%E9OcB_MUR^Sq0+!vFB7GYy-M;eK6q=FpM-Kz`GnzAbuA1 zo-e>0Fc*9Y{=c>Zp*g7E0;r{;J*c!KZ%w(?^UX>0`FgH7NE@VwQJJR8wo?%5CM z5$Yk*@@57$z)HqlMNg!zkkjbcS(8XEU#C@{(FaHhE=}S2lTNlb6&KvdJr(yt1*_ zc;1`$7u|U44G&R&0o%c!;30TyglHCHvE~JQ zKtWI#Y$AROI0+(+#q{J5dT|K7ECj1=2v*$?thymsbwl*Gz}sLDSP!-de|Jc9-1J$t{`OlF2QZ+>*&HncVJ@TQXnJ z@RTb}K9>(=#PEIo8}N=IDTx09Z-I!`@YE1A%Pwe9FY?~XW!6O8tfr;1zEo61FlUx) zqLJiwp7LF1j(CsR$$jH0Eqj%gjb`2#!`y(izE3WA<}o%C1?#34_LXv2uWB3dN?rOmwjZSxVAYRj#43Y~ zL&|8QbDikD@+Bgkcn?PF~ch2)&^wA zw<45s4E5_k{R&aimP(hLbm$nS49aoQV_v1l45GdTsIP^R*M+86@Fpq1j7PrnSj4zs zzW3OXRjyvF1@tDA?>mlz!k_Sl$PCs?XY)P8FVTGV@qXt)u6y3^IEnX2VOsgFI1cqs zVmUiy{6-D$(%NTf?O0m-l=2m=d&4+O3!bF~BjJzAP`5V{s2{et)zEt%e6b&jdU)kD z{85}12!{p{)a^T3pgVQ@n7T#Ke(}^TgVvZzYedi*U8&OoC5HEDlHpG;?s7w^bf}cc z-RV#u6AIiVRWx6}>B#f?G4qXqKYb{xTo>HOJ@>fBZO){c&}XEroHarZUnu;Ni?lt! zZT+CbK1vpWC*eIjzwaCSxo;<}62-lzdERxHl5f^WZrfBep304RR`|xNFSEgkfDr1!{K7)=q9&@nWgf^=cfp5^BrxG zEl%)W|0$?=-S~<6M8M&9o>}z_rDUE}Pbh$OrZ+xi{owxkLZ`)h8G9(tDKRc*A1z*; zT)WWX3u*BU)b~I2&n0B}a zC2qmJ3A9%N(u&!i&}r#Y9A$7<26yqbA7c2Dk6h)YO;kg*!5HV+DvSPn9qJ|12dtXu16CdH zuifNyh^>~8Uwf);e&?wU7QHKYh4C|>@Ix$HeG!L6?=}3sZ1@^F*;??8#xzzGerIci z@7E(}lp_q$Mt+HrUm%y>U%{sLS3A@DtGy!qzB-wHUvHRxU)@c=uO6n~*PEu_7hYfZ zec|G#zKzpr_GA#I_uNDMRmzDAfHU+cCYv5#Q%#Sr5AgWd3}eM*9q~1>CN@4^f44+rZ?Ad)0^vr z>CF{xdUKt`=P+5EGJU#En?7AC<)2^y!K;eY#>zpROCGPgk7j(-m*} zblo(4x^Cf*a0wdKEN%y$YGWUd2pbuS%w`S7p=J>m}3ItBUFCRaJg% z7=JM7c%)4waw^zRBX{kwuq|E`Xve^)otzpFd`T@RHvP0y}D zrf1g>)3a-Y>De{X^z0gCdUlOBJ-b$#o?WX<&#r&zL-nD`Px^3uxU$*w^4em0d4-x@ zUOP-LuU)2>*KX6x>sQmuYp?0$wa@hOI%Ilz9X7qZ!b~r(qo$YF3De6f-1PD~ZF+f~ zF}=JZOfN51@bL1A(vRZh6^)nISvyGK) zh0Il^nEkV~**{C0{WHMqp8@pGD)iN=Y~}F4tj4%iolT*yy^Q>=%~k+Cp$>P}Wh+Jh z;#*<#uh;PGlwO`iun0CLO%t~Ac!f2^7q}Ul^bSMQr}wo$N(Zr7@dFJ;+O%VP(d_@; zX8+ge|2>cqZ?b9lYW5_J^a_2!lm$M>g8q#01K50!3Ii#<^bPeg<${7-*um^zC!31b z@Gi=>n@u$(g^wvIbW>7znUccalo7>{5I3oJ5}QAA!p2@oiZZ68zz>Yg%ajxqkrX+i zBGSSTUZ%XLXv&Ld`U}~azsIn^yjVttQ{0n2I5Ov!t^CAt;|+tivZK3 zw6ro0DHC8ynE+GDR5qndVWdogcu7fA5=8}MO$wuoQ*ok;^0p1f($lnpDR(NEa;LB< zcYIB`Q{0q0^80Te^ZhsJX<7ui^Mz3HiJmQr;}<RBz3?s-o10swpN)$y9 zkSHoB5*=2DRX~HXh=_oK#)rN<#034gA`*j3UU-`5%rN8oJf8}P3b=qED(<3wqM}h; z;ub<=f8SGm@7$RLOybLb?{$Cr^y%8Vx~lqAb^ZF(L0kF}(pl!05pp?rhW9J{3iwCC zHN0QxSHeGr_G(lAIA5j3L5H=eU&YsHanNLK>eujb@n_tiRem9`mX_ES8+;5Rvu%fa6o17`uYrd5` zE@Hjr2I!yvl8}pKF}j|2$Q?ZCopL9!?gE=?!ivqmf?py_A~h+`(&}jR@!P$^iajbuKX^TN-yj>%DtwOFjEroQjZrIl zl$UXT1?|;CtY3kmHw@j34r`IT#XnXB@;0+&E$L(4g}yKE^F&*~*t~oo%+<2q^oQgP z-D1-GR6dOkWmV_Th=qnRIsBU0l4A6Xzacd=O>qAcm|PRtF5Ah;w_tN#cE}FW*$GDH zWtZ%N-z$4bb04!T8G0|)*ksLSiJGu(v!bQfKpA^!krrW#1x;mW6K#SOz^0&k9km&A za|PNQl&_<w*l?*+7@duv6h1Rc|A-IgKr1==e0dnZer~K3g}to zza#wNpn+bG&?DeGfeL#4f&KvgNYFvAN9j@Uok0n`9<4{i9|KzGu@BG%{#Z~$uU)k( zd^ha|f1Dl%-(9=I_s|~jJ+)_)(&L#o%4jd`1^+|lk1~3Ko&bL$^GF%(t-aw-(v#p> zyb%5r=9MzqNBh9{)xPkj>Z$OjG0&9Ie%cTIbmp5f^y_7m)c_qpDGk(t@Pl*^{9qjn zKSYPX55*3W)3fv}_;M|WAExwBI$VdtkI)hDBXuPFC>;fl{x_6SLR4s@1n5k(oQw1l zW+4moQna0mbQXWV73gfVo{RJ{{(>v8F$DPQm3n2=jy1Qhidu#BGgYtFtC?Aur}J<} zYn*viw8n|GSQkgCcYvsRy;B)?>s@*m&-H8lH91+LOUMIyI3=!X`)#UMYB9A(TDY6+?VMx=yF|7$Q8N*&Fn|@QBpwToHQAWklQu7 zhNpWHo$W*QDdtcM^l5ar4^{ryg{v44y{dem6g>w4xsH|PdZdk&3D>}l$Y_Fo^G zmyK-f!YZ5Sreix3-E??#({V>PoiOO8^CjST!avdf=e9i8ihj9O&iOmf_GRL;A(C_Z z+VFO5hiOn2GJU{YH^_!*9L_Bnk=a%n z=wA{`wK<2C-XVdS3rj_?CWyJ&W;Eek2xD&flBl0!^D|$i_7Yq7RuUFEUp65~AW5qE z>&Dr)&8I6NPSRp>F|3Si%jYPGowy!|wxxyJDu>Nz z=SJLO<)Q!Ms8R=aAus1Sy6!HxFJ6aH?h-Wit-l+Dll=G!|5cQwm_t$zdw5epb z`7GOuvoUh+mSS$(pIgo^e@;Rr*O&$hd(!@M_poyZ?u2ceu;nBkxLTd-y1i8LOhw2Z zduppOD_fJ+wq{cz(NpmC<#$pu?Wp^@TauQigd5W?Ofo0ETmiDsu5xlgNY_d^cUWng zl(NmJgf?YY7Rxr1(1~nLWTC~dRUWNB$hAKUmuqi{{7qbvwvx0EOT(TkqNgdSJrWsh z{c$d>zxSNG-+|9b9&+t-|L6QWlgnkUT_!bQWuHHiwrTCt${^pzBqf#HFGuUHEmv1c zosl0glX>NK>Gz+hV!Lv z=nV56In(I4Sr<>3>CT@u>)5Vt3bdP>4n5AzhIV&zpgr7NXiqmEdc3=Fw#8b=d`;{c zVkMvxb3VOXA9n_pc1An1EjH6#;pV#Q+yb}QEpZiCR$J~?yJy`d=54B|j!jXz@0im| z{ruC)heP|9_bqk12Mrlm>PHXmJF?UtSw3VC{P6Mt_)<2x+D+=$QC8+=eW36j7KhQ8 z3oS~KTLs~;)?mDav^UN1RvWdpXzz+q8=*L)j!}~}w#oq8g*DYYfFEshf(&ek?vd~o)9PWF67_B=1^KliWB;STZ7 zYQrVhbF%+oia#B%E)TF9jQl91`Y5!U3S$qh>tgE!RDccuK7p&R@|p>2fl_vJeE z9l7@L@Mx#|*k9saq7}dBUU1L5jqW+O!L4`CGNahdcV%|5ll#d1-hJpka9fy ze#ac;JML}P!P`vRf5W}*UURRySFmfb)qUb;`OExlf4RTZ|HPML<>E7DF8|=Za9?7R zkCn=>c(Dz87u(&R-M4Os+v#>;_hOIR%K*sfUW_?nbJb%t6WbRVEMFA*2L42U5ObZ4 z{K5JyRxq0SX1+OgFp9D7*AmNyt$iDRsLhWtJJwB(lkU<(ddl(AOMWOP$cfThPLh-5 z6zL;<l43)E_T!t~1HbO?qC>bqh%NQ9eF}%RIS8ekMO>z-C|48})hYD!!?eM{fg zclfIQoxZE@VR>Y|*o#dVs z>)uFeT;HM2zp^LD*8EP(Z?49%OBscH+v;`ut@*kwWxuVUm=L)K$D88U;}7Gl@#pb3eDEmN zV~f(w(yi0&(#NEGr~9PK(&g#V>B;G7>DlSI>G|pF(>JFVrf*N*lYSumXnJk>Wu*E< zq^?g-j$+m~&0yoWFtl-Vr|&CbPT#m9knS|kU-VTZ>GA1_=_y=~ci6vy&Ipu0iqhl3 zF*;*Q$EQca9pk&e4F?@L9aN>iKf|BtFYr_Rg?_5P$WQYZ<690wFx}7aGvJ4^hT<51 zt{?43_)&hOAB)x1bNo0z#FzO2exM)Z2lKt(+7I)S2z@ehy^H)zy-9D@TXccm>L=?$ zy^U~#2|0jRlcH9B7~#tZT}s+xNiCoH*`z#%xr@5%_uA^c%?RCwv~Sgg`P51KwlJzF zVRUf_VOkNgH8X*Wc=i%^HEn=1$1$NFv#%JRZXFf6b#ASD#<%sQ{xILpxAz@9IAhv_ zwVWKy!P3=ie~Rzp`}$M;X}%vN{{O2A-|>HNRbjKcRR}iRA58cRFyS)(h#km3vV)@` z{OM@8@G$V;5um}N_~Ykn5aF?)!RLSi|2)E$DOSR+k8X%=iWWo*`GA$x!OiD2NJ}==(a}n_tf@r3}HJ4CQ)u5BhK{in`@8DxJmUqAex3Z%4 zHc&hV+J}ua{I(1Em)+{Kjlo$59BH~ z3O+Sb$ALCE9Rp$mr&Dw~v8IAMT@E@lTjvmRj?UHjdLuo=V%!$!?S!~p?*<`Cffn7z zzb6lZ_4q(tKz$x1#m6|FAjE3WoQ;gr-h_uTru-0uCS{b7<96K*4^4UMykUf8_Ef`E zvs9}T{eOz5;#o6dGd_m(uLOyIo)PnCt9^GR_uQKTf54JC-(TV2doV>gun!1yWuREq zqn&g?BG`B65&WHU6#t;~$jXcXSqWh?OlL*|{8Yk%hSEz#ezKoTe>2n11mn7i9*Yqy zd*V03pNr(NuU6|>Yx`MxPkLEm*6ClZcO|wVCCR*dYkELOoFTNuVf3^a`t5W1&tVch zZ_|+Hri2!^$StC6KjYRBx-I>m&2QQrEpKz-D-CsxYS04ZJw-(qz+Vfk03jtV*r>(53tFwefL8j~L#%4}8h@j?=ox{(0a~HF z58UxBXqEm7%4*ZlO8;?)W&ab`_)mh%XTin(v8j>w%tapxzF!C5KL+1N%te0*t?;)% zt7uIURgvDU7K(Oj=`ApS5LdhNZO|%kOS|(5Xrq__<{SWgME4fN34DOGc zJ8KYIm{lRnn(TdUw@|zhb{~6w{?>vn<a@nViJI7q$NdF-r7VZ__w7)-pIos=u&Sjph}lOtHFKkj{A%X?;!cN zwpfGgz*kbiy+LqC7IODSfnROBw;WdLguq)%s!+zR)J1b>mHq}=4O(oeJ!LLJOC|g| zbCFk}75<^%a!GKpHd+C8Zl!b?v`P+#R)Q{D_@j&$M&HoRf%3NjIk)n!^!652=#QXF z{oBwg{dZ`!j)qq1cxVk0)WVE|F4c3PReCnG5?vJY9Sf~MH^qE^46V}1&`PAO`A&gW z=v3%Zod&Ja3!s%c16rdOLM!wl=u&0ekMD1xmF@}JY*{qOS&0sEtHRY#XKiJuTL~?5 zYoMc@m8Rviab$neM##RS=_$ML|6K-43nwxMWmYs;7ai%>0=YSeQTh>4OE!y6D(e~9gNh|HDcT2Km%d%{B*~Y!d6=Q>KngQDw7r-_) z)iws(fB~5n>Pz4e5=cTwBk+=t7f47*LI7(=|8r+n>lGzmzW4pU?+=nZd*;rad(S=n zo_m%+1VI!MI|zyxXqeqyUH^&seFAmdLJ;JsS+hO$E*GWZ;U_PUjWD@F&RH~$xbN>l_von5_Z-TK25=duXo{ystIpIyCnboG*~TXw_! zVrb7JNTQPcb;o~HeG(~gO8|I6P)o9P?4=fCPen@^IP zk~acw&cbs8@SKux5K)3yrw@g~p^&xIVzpSTLFPj^#C*_;^b(0wBH%o~xkznc3vBYYZy|#domRih^yqcoy&z@Z%v%127qj#b{tg4sFHE$l*@F zk6UpTPUJYv=4DMaBWAO=sHmWzsK{qFjaD};Gn<_`6ZtKbm1kEhxIJ1nTd7ha1b=3i zMGy2YeYmQ!9X(mS#_n*JSj^T#kl9?~D=Ar9z2t0VtR~=cZEb8k-uXu7 z@y5n2E?1x?R&g5E>~6sQYQVjiC`n@;MiUG&qlE!m^->nccn)G_ju`dEyHV%Ti;cA_ zO-3`an(a5uk00))&pmsvZoQ{^fl@3+sJK`J_(~Kh0){zK7)FJKJ`nzl{`T{JGqCtCRt9NJcfR9 z{Hu;*B!_?}UVtZJ@I=m)CjtWM=gE7?mnW}#hTM4!eFlxmow)7D)W@7YSgjg@7^U=L zJ;lrh&yoH4k5^18VOFePC@Y_%!t18hYI-Xw4)@N#x8C0R0x3C4Pw4IW`Pm4$ES8Z-WIX(EXniCyYO%OV zES6B^1xA4$egp8AQ*|!+ z@S~5C-Q4rZ+sL(mfs@I8jDts^y@qW+TMQT$vp9Yfb<+=1tLgjDLhkvKm-nAI!C7?@ zw;uuumq2?Z;8(!lD@EZ(hJ0Dt4GXCGq}60zQXO9mR7X9V(_9&QXl(T1s;VZEdww#n za+Sm3b671)Vx#v(%PQ+CV`oOGf8zcz@81vo=R^M}fIwvMm?@BX=;;rVAE4w1boZ^# zJxAW3?4ttYw&eewC;tm%$0zvN*WlR(c$N$6iRT_f+b5`Nk8wpn9r*qoaK9Dq7ee0{ zEfk0(3jPJ5u=)2-6#eTeR8aK9AEpQ_jmSE(CCR1u$rH(E0B0aV5wRDZuY~7;wg9&j zf<8ouQ~3;)eB;??C^E=BfB7X&Ludv8tGz}*bLnafJ zAPDAH=rx&!;w+>vpWO9e`-$e}YrWoZ*z3Etx%r;<2iuQN1H3x`@cF6IwP7GXtJ`h0 zj77q01E&IOBH>Z1#qF|KN5hddFjLHRn*lR?LJKox<#mvi*Lpy(fR)lIsf;!u?a;aE z>bAD(>T^T1Z31cID{NMm%Vt|1CmT^&IDB&H(tE?i{7!& zRHH?@?UzB~Dilv)^m@(a#dWnnMx&)Mw+iF`9E!zk)z6khT3-~Q2 zj4)#q2qR`n87L0U)O_SVqoHP-I#p`7FAdeMb2=kA&rxmF%|BQ&@6o!td7afY_jdYLdAyaNmaU_~>ZKNo zJNtREtG4!h|J*Y*HM3_|C+=wruJd|hXm!Orm0D>wo0irtx-(i`@AG+Y9~!(P5N?nc z*)*DYRb@RIjm8d3R5fyMG+Gr51nyY0Xivc3AQw2ax;~g)E8uk(;FX8z2xK47DreW_ z@1vKV;0li)|LQMXA?Eu%fP-pirG{2UR!O9=KpCkFu@~gTAE!u*$-KOwVTH+TMV#{_ zs;{hk^r|I~R#i2E;Ikw)xjo^C+r244{bJHG_IPzoYgF>P;;^pP022K^Uh1K93hqf@hbzo9SMFEkK;#pxh_|I3*KbDt! z;kgUvmS*K$IC(xVYw3ATUS;g^KVSrsigM~>_*+JSl}OBke)d8?Siiz6vB1SLms~@? z{vLXWZvQ2!LDfH}XQ79FL5-8;Nq5pf)+aBKzaig-r(^Im=6V=0ei{Y=k1n7uzJp$+ z&Qj>rx5=N9znol^{D9O_`=R|>Xdj05AU{Wu70`{OAfxNZcge|7^83jWl6v_V=lWZZ zU%qA^$ZL`~3V083+KC{Z?_dA~Xi&fim6QYxWYC&e%mP_5OQ|7=#LSpqtRfEtSP4Ll zzbGuOj+Pzo8@(qSDMy#|KrG?%e!brG`kJQ_iH4xhyJv22k;7i6q6+9&LLdZ`T8(4x zz*&C8VqL^{Z4RM-Ex6I`az$M3wdIB?sZ^y=s@i?sPN(bo`L*MY62C+w@~aG0n^baH zt<6y_Gw6S*D~EP?Uguz5HCY7kGFTpoH20)th1Fd+gQ^&*`}mow0e!V(~lA=>W$~Rtc*@YtSTpgA^LTGi4UYPoBjGxWUO&&*wJ%s2>uGLzWtn67 z%grr4<$mAJMpv6kt;4v+ysH9SOR+sW`^wv;p!NmR=C}09c)_M&d3#S~By!Kt$e~d7 znTg>3!Qm5;Xl%k@GOlW0u*GEBvaoHX(PSVk;`KjhZk|(Hy*7(TzEzp%YHE7pHu0>z z;ZVH0n$B&QQxyu`hR1OOjH3>4WFWx%ssjt(I{hMRP|uP;ih$WMiWuC9#jGwRZ#n*M zXiea*;g+kNB|aX-jh2<28((;#u6|ByZS4!a4T}v1OY%})*k&6k-rm(y-4XG*H_hVB zUh`68LuYG4<6Ao&Yra=s-%h<=>Ts;{=d`S|S?yT19RZ$PmQ_LkvIfCW#ACs>EEeEl zMxalR3bX4=m(WSN-)PVcR#mRBOB8;&rQJ4gG#rW1KSK`c;k~W3V>+!?TzDrhkITye zui@B|*1a(MTj;qQHN(lXW~>5`F>4g0JnsL=|Oa4=&>vH^^*7YE8x}ON+%31I;&Vm}z6*3xigYjfHd3wb?rKLX5(c&3!7A^gTZgN5T<4zrdzY*bDeYoU)Ti-iZ^`T}b7ssBp<0h2Ulu6gi;pcjy+X2l`eM zKqv{&jhX6y=0c_42gHI(&{C_x&{taMQz%OX1#-7kRi+&|77j(<2}PLyLh#nffwrYO zof46VOC90xNRo_(LnoGS5=iZe#ayncDomZ{VSZNuCco|tpwG~)AJaVaC5%u8tEKz~Lr`NcO_(kU_A)oKnYd!oT{&|x@+ZzFOBC!ADJanx`EQ`Z)dSzYiJIhO1hvJ-~`GGzJ;&IAFU1@QoupSRS; ztDai7_Q`lvZEaN?Usc6xCt!GM+uGJy@i49HTHDsc1!E^Z=PHF&D`c zFwh^Y7jJ%}si{jY{U8O%a+_1iIhzk%`nuGCauOND)#F%jU{cXOeW*BJS!Fz z+T=-^`-^2DyJWJI?AjLzSBpiWfHwIjeOM$Y1{CiD%0ggiaeOdVG4KL}rmWT(3;`;E zTDM`M8fBlqBn+GFD{F&;A-QT5r+x8!<^IdBkx%+NRXTlM{>6N7vJqqHU<(dF|AgCvR5)u`eC6XY_>^9&HJ2%ew zV-RY<;t`K6Wq4W{?qiuCh_xy?a*6&OQU%@aZH*=Ktk!C&LJ-tw;~IODN~J?b=@D+6 z{%UXXiIH2KE_-%1Ph7Z@lgH)qisiDdDiV|^yz>cor2L17>Ru4LlV2Fi#R|2`XjtDv?oMvo{757coZkU`55c;RLf^ANNJ?_=3LqmvYLI?Pt!Ak0smhzMFgu&uAqhdCR`xf2^QC{24kx5B(gpNy*RXA#~t9 z@-*6?{NLoSF|J4>DTSwN=@!mz7zd6;F)`CHV%;XhOXD9D72-Cnb~wsKr1>rSe*<3n z-M7%&&ACRIyvkDOlWT~Z?g)iqjj`B$eYGpC7Khzv><>9-yFDu#l7E5y1&IHALVXFN0Pj5^ zPHE)~)CG+hteBE6j7rbc3m{!WDLWTL4f`~@eplT(yUk@anZ~NBaJXrrsj~9)z|iT+ z%6h3v-QQ5&4e_9OS=qfqqYspqCoTa72g_sqI=unB{$=%)?~qQfAFiz%H5l|Jy?#-2 z;81C42rQf9WBxI>yICr4DRJ~VM(={9uL^|^%#QZy^+v5$H<$>+l?7|I7QpdhSchCD zP6@$DCM*#EbVFG6=&Q-~nCnZ?q>y)bX?Hu}Vq^R9YlR%35_ zZOs#7^Uv1S&X$UbgW7>7tE*aQNyOvX9B_k-bk-bXK1XdJUujUs;aWdsM9ZqKs>-3I&3$Y$6|3z(+qXTAVz67 zfNj|u49(Z+!M)J+ghIV~otabGe=raL4=`}BKQv@_)W~FwF87Mq$X#Fthl8be4ab(d z-3>CnP3qmAytiuDU@(~shT*E%nAv2r!E4CHkv`m|Ubi3`T?q7T)*JdT6&#$J0-cHD zwh54=MqpiFLE#KrG7~dlHe4gh4B@1_Vg`$7TqJtpa>Itk>gvb?t5=_nFm!Nid8FTJ zt(0-SbONhw@zajl*`2j@Pc80&;A6M6 zxHzaG`_8t+YS6FBz22?y#%*p_X{0oGWU#^m0R=7CN7s>elAm)QWvBp4$*_R@Tu2{( zm%Nh_kMD>kAA&X&Jw(vZyR&O_&2106|Kv>K#0=HG{QGugN|7|&p zqK4c{-NU`~|6EFvg>zhW#Sl>93!^4uRKyn^4>;`0qVd%ZyQf$z44afrzD!UP@2I#(wU(_ zW;wzVi8Yh1rbC1I4;1D*%$8+|y0tdD3!N15MdfC#8}N^AZQJT7af1`JsKVQ>kr)f8 zydM|?{QP{OP+aG{CEF@jl#?&Idc3Htb_oa`gH}5?kS{d@q6Fa1=}TcYJ<-zHYS`|S zi3*KEg>RH!l&33}Sc-&_6+%;CkpRz(Pd-LH3o~Q9u0$q7%;1#o_PP+?>vC@In01ZQ z>G3#S+h@U*%X^pjg9U0T+*JJ2S4b{q!ST)OG6s-F&Gi`FHm9pQc_1AKTAwY3+9^pRC=ybu9byUJt&v;##Zs%$I9L{1Xf)bxEzIY8)ml$cL7_+}@Ms@lW{C-Qa)FfCFM%a46E`o zLW$x`>F^yJ2f!mU_YR7|N-SnI5=sS0L9C~&PijZ`LZiW5;O1MTvT_S-Oo+opYKA={Z--tV5D7};j;rW^ zDJ&F;KqSry#!!KVU+f^)(of}N=c=UwpW0kj4F8D=3e>qb=E-vN3Xxt}>{Y1Zrn!Hb zbCb^-lt}V*BC#>QSX5%Qt*Xn02Ox$ilvF$J@VQ;rcGj)2+MF`bcWR|ms8S!2iHpP9 zk~WRnn5eEQu?h;9PU$*LnL^_d7xA@nMZyRgd=_B-L6A*q;D77^OlM?F2hubAn-um0 zX+aF`D(69ZDe9q5p{+m4`!j57zCfq`!>f?K+Jbgd{jCa^GRH;0XHF zk?j21>gtzw(6JpaS69~*@OGL@O7<>AuP(i{q{K{;mlq)t92de3BRdZ?Q9}$9<6nzb zfv|_={1(?7M;yMJOhlQ{2WiPLpSC=!-C95GQ&A(z9= z$>OSIg-%6Dg-9eIZ!iZ0{380(EDldAE;5kYUt4;nqM}+MFS1C*rb3m9Ur=BWUQ;N` z&CQ3%Q>9(oV2xi~a5Nf?%V9fICNUQ()zz`s8T!Ys)vUAIJ#KrkPpL{6UvqXC)Kv<; z$ZfZ;BNJwgw%yUqF;ppC?UJG*So0>ptb_YE*gt_C%0$?TISGe;7GRKSd#C9SReL+4?s@i4n^10LuQYmR&(g;e)lH4niHpk{c7&^j z407c-y{mbh)#h;6tgGv`oiHFFUmUaQXK}MOkL(IGN$%m>l{XC^sXDbXQrlEh^X$5@ z$EvGajC$*)*4X~8${(Vp%b%5dJ)0|=u7hp(xuF7CLA-Lcr);iLCWpD50=cvZ8G| zXyoX^g-4^2sw0pgF&qjGnJq4C|1vSlED0O4Ok`SwY>rC?nOeUyX341n zr4tjNMM2arIh9Gqi5vL)qsuku5`G12DZq4-2Vd+_m2J=&fx5H4>G>@{F!OY1Bk4!{~ z1!xd$9M)CIWt!q5zNv^`>^2xW3k4kNp>KS*29BkR<^S3CQnH(xahXxsRNO=gMiDdk^fIJZBDIoi=At!dWYuAML19g) zJ>Xt0Nd!i$HvdF*IJ~$0;Jm$le>hxPdT3GG3WHInR%!a<<$XG>{%KSZb#7`p*ty&7 z36zC`hZZ(3HyE^NV|boMt5fOqtLCh^HyDa!zwo`B3cv5>dGoIi1X{IUcgP0hLs6|QLRuYQ$V=?~mEXa0?)!B({_s@J!{Ob<-uLB@dvXOVvQ&h(Xuq-HXW zfN*Mmdiyu_r-NSau8xU;{a!C5op|>TOf>EE`O3a)hq~#yK%g%FHy;Xz8ye5`JNnNz zGz<$r{$qZ%*E1eZ?NlRT4RGAmpj%8n1vpd8=$VDxu zzn5*+>E;K#^9*|HCG5Bk4BTH{UXyd;f$Z$OJhQR|GJ2Dm0ue7Sk6e2Mach>DO;)8! zJ-=$+ZGiy9ccS->(U*^WsOvPDBfP_hMNXM{Hn_0}%SXn70}DE}PmIks2)7y`O$C9V z#Q&376KFTR6@Fhr5B!9F-c7pc=YN78pkF~()4R~ssD9Jw&E$`g7rN(??Md(E)0?2@ z6A;C1h8<`R%sXhbGSd!&*oD1@>Ah!|@ReXU=$_QZfq>fn6Qt2;hZ431h14_OGjLZZ zTz26C(&H^GNx!<#+F|d$!Q=MB*7R^$uSTmy{DK1lNvR(qJHvY?^@$;kNX+NoQOpnc zy!#eX?@vvEpTK00W?67n5Wyp_qJKqihjA_i_y$-X9McWztuTFdtkX=98S9baCHmH7 zvQh^6Ar4wd-;* zTQRCvbL83oQ{geD}z%~Uw z4zuf@xhGCOc1Z|P%JOpfgWjdyo3zq5yDKZ*-gq45j6S1pA%70~BlT=RAS%VA57G!$ ziOii4mq#GqBN{0uhbQgSyJ)L7k?^|l4mo6APW_Xk1giAmY^($u)CLWMl}rXf^1d1E z#!3q8u<1+zcpsXY;ACg(HJ(AM)elLS_lzz$7A>nq6Aq(cz6bIXRBDS_(_rvd@cFrt z^nC8J#+1rZwPPq6+1}WDi_aJExjomfvo{;vH74^ydi(UWN#eih zO&o&LmKwQ{6==+$QmT^O(qV6;w$fPPz)UZ5P)(VBvuPzymM0cb{?s4Oh%j8n6M%5Jz z^xEw$Mpuo=)Q7GEgu4Op5Lp|d&qD(FFPZ%yP=YXWDdbP?{DIB8u47Tj`NzAD*H<;b zb_zm&o%%CXjCgo|Ycvzne{E`PXl-e1YTQ)T*->6Gd(PApX@Iu%oOK)vF#w#vBL)aR zwR~y{{eeDD#Q<&vh@Dyvclrr9@d0;orvA*eARfLm1#%gp>jT75EVGCK687fc{a=vF z$lp*+%>G*D`@g`{gsF;r&r?<9VdxN{yGSYJqkhFiNix>}jCpLXu+kXarB*6hJf0S% zLZwnDTj5HfP*ak!l17D64fh&L%1WBpkP&SABI53;r5xCe0O}^{Aa2g_PA)rq7WPxYJDT;oLwOrtffWxcoA?2OdJiEVy$^#+_TH@1VbM z^yIA&oQLdhkal9o4!`v{x|zFw)pq&>=CUD%40gxX^NnjiF$wiN6)DeeR8t3Hg2j||=&}i!O2bSPx z-h^zH--7jvE$1|K06&tyWuJin9Czn|L+mpFKl9A9^pDARh!5~Hqcfg)XTsxI>WITL ztp^X!y`#R#w9xNg3~S%SDWXnhsbTGxPeJ|)tod@{UWO`M93FK`mIUssm>OX2tiW0I zfB_Hv7PW+UAKn7gGUGkw8859*zZ4nLmo0as-@n9@aSdZhVLmI1xQZD;3e)g;8N3&v zQQAR`XDtKzg#9|kPXyhPN-x8ponx_4<-7b+KM9mDHK7a~)Rys&I%I zLtXBj|6ID_(T2Jq)f#16AaQPG+plgY-1y_(!TS>NA?+H~V13<%<;$2coT0x@g|c?Q zT48#J*ojdo7^tXZS?Ky$hhCPwes}Fi*C=wO=n_P`+ae89n~4HIo)|?|F!$kb^A_t z+*e&UCu0mD;A<(XaZ(EzO!+m8)JS=DsUd*!hSg1B8`X%1Zu7XEuzk9$v1?o}mAV41 zJ%h!Kb0p#dw^v?cvnCWik60$}y1b5@TT^pi$Labp1Nhe!j~-G114;5!t)>t~&=-0JhkVU)KJcT#Z*$A5_g7^-3vAS)4hEBzj_-bz}~UV5vD zzM5%wB;AflgbtF5+Ym|9w;>C4kzPZuMY}~He>M`ga~5)*WF#tBRABVtph1v)ZGyfE z9hyLgsRDGGp7%5zLodTh5vR#L)F)Tu74^yFbBrW|>}CRSyqv#7PbrIGx-SZ3A^GYl zdON!I6k6t(e2$ujlOB#h8pQD)=7M}VORcwl~|VUC4v-GNGf=WA$F)5>Wli$JBM>UTWTV+YU-|u zRB!R(56+QD?dGbQEoJyaw5hZ6s&MK~a7Ci2KU%YeTKaJ9ESbsdo>lX3ZM#}$b+pq$ zxRTmz*7jPsXEi(IvtX8Ih*zkuI4wB0W;AHFhRs3P?fWWuG#DgDCeOd}$|pj>UtgiG zza6-rc#L?JW9GEtwi^+&<4Sql$EbCaw~{whR3taQiniX4w!H9Hk>DSA-p>#@oC40B z(3;UUslFLx7fkn0B_^Lk%P;(d&;R*(`s~hR0}v2NyhQLgLCz!Wle4Gagr9ur%G;<~ zcytHq-i79z{kfp%SIJ*6I`}!Xk-ALj7){G?0CjotC2}{~h)$oRzreGYoZ87jj5SQu z0oS2>$^HkDC$?p|zI+Gnel)d{`U`t^Hk>#3h#Yt*d4hZX%XhL|z=%Ir=2G_{w&C7cjlI?m+R1c)|bG1f8vm)u0i3Y36MOu3So7S4C?tl?cEr_o>- zO(aGjZ^fuLKpM`7-eA1BkYD68ntZSc3;!`!{6h2=Zdwg_IuMvM7)GEeTro{eMnAu( z5E4J3sUMO>>Fs2ARq_X%Gi}i^*fDj%NvmjEbg>m*S{Gm6rY)CBluC)TT-&CNN~B7q zR1!@Vw_j-tmu%y)8``yHY~wO5T(XVJ?3{wi%YO$c7y|5_0POi7&t#O?PTWZxOKatn zPsk|5Z@~+X^&Xk{)^}qMu}UE2KxW4H{x5f!{ld9gt+p>_s+1Qg@^YmG%4%!H0xdZ6 zTJ3_Eu|m#Qqdxdz`;;Pr-5BEt<^Z)^s)(hHQF~poxBrmpl`l&vY$E$?Kea2?ObRN zQhSiGW2T>BH5g|u?D8C@mrl^@V50|qCk|7&sSZ9rOb?(Fz}vk*hdW`vCIECe45ORj z=FK3X8EK%IOl$rQ&)=HK*cqM+wP+>mW*7o?$?*H$lqQ zKxO5CPOVAKuC1uJuYb{f6&1DkYEeJFn%C9X@bVgbrlPB>q4A|P%U)`1nv0$>1_VL@ z?2y;ndms=c*TAQMAv8y$H#n`RE@vc(%RG0 z+5!W9v$b{JyjB=GOsA@;sVaVUXb7g*+zeAJNg}PJJu=vz+>I7J>kNx^`FZ&WYpuo9 zVdflIKAdNSSP2j&Md2~Raa_&JgMuYyVwXLQCyN!@|GN*6*_ z<<(-Lz^hccg#y@m6S|d3uRti~d@ls|Y&N4|Q83tVGTI^cra!3i35CVQLZMHk^ay~c zU^g8&c`~tfe>|op12Ca z3I|uB+C`5YLJ!jm4xhuAf|w5HPT({pbC!e(hyp<)^J}E^aN-a64SoXv4~z5^`EtkJ zj>!+}{`h9s-Y)tJ{O5lVSx5itQ-CY#P<9=481;@taaG?D0}e z1%uO|A3{Vk=F3c~3rhpo!@xmt6lM+n3=F3nJFxEICv)pB97YUmMnId5_OKdd8P3rj zj;#yoVyfA{zF6P%5V9HRHy1fuRZ6YXW?$dXwaeoP2#Q2yW|OZ|YqFv9u|7ze%oFe{ z{VG%=3n%YEiHp>Qk!h$pL>3}qvuE*9q7>4 zS}gN|m8(k}ez()Lv)k5cb}jTP%&iXpTKaX1Uf&ZCMU94byNvEE*5?a^)E{gON~KOI zmP9pDuT18LMR?yi!PKQ-A|)g*OPDq=QVtm##$u7jYDgAe zDY$4l$imCILdM|?eavOhj-RA5ALQHsnZk+Zw`_Z>wWB|aTV7NB&H-Jc(HNKOBq5Ea zQnUJjV9?9uG}lz!GYBcE0Y3*ppf+met8#L4DW{^RreTBA70JmGNF|LvL#0w;KWs~%s~ex1itS}f!{ zV#iQGDW#&Rjv09~sZA6loM+-RXfy=KV_5hYy&yul6tjyo< z))bp zvGcp8rD>hjYM-d9ia)t_#S`&(gGSA_N;Sb^i@iFgpY18XrJ`z)&S7y_ZL8}mHhH}j z6TMAM&kU`2JP~h@%it8IIv}WZ4rk?vrMpg{5C!$-PIPY3 zZm-NP6zYl!Wch`vB2{Clt6im|e_d8rVi-{ntajlt^YR z7TYB1datJ)&L5}^`ep6prU(21m(Sz5v8Qa2S>-7%Foh%G*0y$Wj!ROxWntb{}u z083&Tlz#0_Xc8h)jpL1=#k zZcMlDow^hCPPfncDh*#mh)^?(p_87)RNpb;?6icDNws2V_=*i>&B}r-$P_6DkVr(n z#8l-$e^{m|Ew*|Ynzan6g~S0g2yh)>_hAOR zjVbKD%V3wOxWfxVdno_HVEqh&s5x7ShqovVA0dKh4TjTgxI#|~o`uFj5J^w9UYKs( zN1R2g0e&~Ubv3UE;1E@Y-bbc>f%*t?>Q#W_v-%<&(@fPT3a1KufH5o%;MiWOOb)wA z&NB+mpw1(o&hDCr(7dkMpB|yl{r>mp$gd7=I17g&A%YW$p51T|4bSiS*P&&H{x!d6 zm`q-K_%L|K9d-53ty;0_`TF`!L{cy+rYa7D3y5klbKc6}f`wyBW8wV-m!UKkj%UHb zM~E!cm+l8sAPes!wlS3U_5KH;A54KPT%K+}FawS$lkKN>YA-{XOnZhhS@qmZ5Vg;1PR<@QZijLCC{zAczfiP6e(l zw01327dp3N@@lf1vlr><_wn!ENdgxd;t+f3b1%JwIgP1L#KSZPr>S6MeZ(4uwjPBy zX?R%bflJEJbOS@vEF9A`3-6zLo1tkIj{9NZeMHl@_A?0WG3~PLG3~PL2WG%A?XvLR zsRaz}GVK}KW#Nmae#X%5qd-@QwQ+lc{=!^q68w=&;-&&>LqGl~xUj3dnS& z%#zS5wszZRP?fn(BXtOw`b+6~VEM?7Ya!9i$VXh4>3v=~z(G-ByrLITJG4JbSK@j~ z0EY-6z?T4L{1M=<0lph}1i%Yed{2c?cpA>MXW+yN;#}V?43#7ZS@EJTWzy z7<~b@3e*2hjJ{yum}6LYKWI_RF)SQ&3=8ig_Angt^?nASJ=Pd3Jd$qTJ9P}L0!|u+ z^?8l^6`ZB7!4e5f7RJ1^vMENaS;@K$shtuw07tRfBengVY2|WyNl0#}mTC3qAyimc zV3Dh9tj;ABYtB@5bXAsDym-UzXX2G*7fHR^$=ojDT&<>A{-wZ>QeXSAC)FL2bW6%GMim?y}}%^`0l%@vjvZPg=Y^s%T25VGob{fK``X6H-fYkC%a zx#hvwwu*|?KKirs-tbl31IepTbv)A+Umy||+2xZL6;YLY(L1O5-e*%4;!J zm{~Gb$1hXVwH3jsrxaBzPGe;uaTsj?_(h=hwNSj!f@5h6HEu#JX?QFRA0a5zf#E>y zPzNdvUqT#ZsGX_O#85RvRG5CQW2l;iV=IV-j}Qfn6|{k_#Z{eBaIk_p{{_9Xtud{$ zt^0^wXf^P1D~u9XNditvfL#N8EM|-x9H@EZWAqX(nDm?9JVAvIquS%g>1Pj5{(!== zgn5pKXCY=rh2M^6!PHY?W-UpL_C99TEW8rhv+#bxg>Ff=uSmoDhzHQDG#q1(Z9fR@ zufU~gc<}BCac5#P?1nr;re$K zp2)knz7o%#g^_Nx#*md7{Y|_pvmGWLzcTu)zobXMi149GJbL;XrV3TXcaC1__Xqx+ zHezS>ZPTfZl|Q%`QK(RsQ^cRCq6!OyA!BR=4vlfW-l8=Em-uq(SgJ*OeU$!KvFUY~|9Brc$CXbF=rYB!n>@U_r?n4xfGs{dmQg|l$X5iER!urVCL!VA*y0m91A0#gBt`x}6(QvF;@ zh@pQ5o|T<~XQBQ)JbD(Im2TZf>?GvV@N_R(XfW>z^n-LCy$5CKwoj*_i;A*AdmRO< z2iI}vggql-^xx6g|GO$(tMp;9Mn97ni$#*!63c&Hy(^dgFQ$5zM(x-)$X4$vl=zh9 zF53)p)kNglkn}&V>!l7|#n$yoNR5V6U9V}}WnmiyuT~Zs${k8We>M#zmwhpkhVoNT z9{9O?2`#`&K(C(Wz7KGqX@>HT5E6hhHMW>Fgu}=T`~X7<3>?Iig~0GEG@?=9*5Ktr zMXoe-tc9_ypl=WmDJZtR4>I&I0&?*M$QZH^+V`MuVg>m=^hFX^L;JVEbu%;dY=cHf z62i7WqlQzlmMh*A2p6cSHDix&MwKSc+pkVcO%=NojM`hV(k~XHa zmc2o32o(#xT5T&FUVs8yoVOuY>@b~_T8rdzT)Jd4TX0KUEO2Yqa^6l>vBkDT{Nf4| zookF1izT=W6X1ZUxW!;n$6#i`)fkTqG%E*iz(V`8aEyk9Vt!(wBRpjqigC+A`>w&b z{d&KHSy!MxPq!V&fPR>U_U^-+$M!IhhAzs@0r~zkc*RoSG5l~|v%`vI(?xw7^%;z&RU z@Oj!|)o1Rki_=G@7*%vbQ%&N@m7sP4;hAi%Q%R!wy441z;@P$mpFgwU8Txa>m{Ds_ zRXaofTJ>CAT?fazG&qB$E#NiTh4TIY)Iy7Tf_o}ZMlCdSu+(8$ALg-D&|pQF`ecke zH88Z&|2dYYEEMw#3+-EhSC54rn1+(=ydGFR@VYqVz$*|BO2CmamQ$unGlg&v^6Q18 z1munMUnU-VY@*n&u`KYHFN6(iwmws2)M>3zvZR06JzWA!uh$J%Unv2mR7m4C6a~JX z`Uu|bXM@-Q!v|eiG z8_hEbMJ1P1nDN?FR+uV{F1Yu05fcn#w&LEpNJ>JXdIWHvO3@^aX zL+!JFa{mbMH7rJncc(5h@HnnV2AH-m@Fm0${io@Ecw+#E^=G8>z33Rg?}QaNm&F0e zYW*Q2S6{QJ%VPlCEk!8QE!rgleGM(IIvi zWfoen0^1S{l+hq8bmU7c&mIL&11gNcj4%|(4Gu7#2HP6zEw=T-typU@mBN^w9c<4V zaL){s;VKr|PhhTMp_r>!Xx}ExRbTI6kkL#m6x%3l+uoaHKu;S%FFndR18We+^pv@C zgQX24Tf!;heR@ajyOVl0UMW)Mu-hu7awR%dP*`A9#EuXArv=OU?ynV;n(WIHC3Om! zAv;+9-NnmNbL~sxg@=ZT>oaDHZ7+7Vi#XZfId7TT3{}`BKn8hn-3(^FyemF(DmmL4 z#?CaWl{4lFKC}5sWR=~_+H%$K=+@?v{t#1`O|6tQI7hcOxkjTIv$Z7D^ck<090O&) zuGJ7zp*ccT9-j|&+V1#E)0>9{BDPXn>z?x90^^%^6pJ%vEP%5h(PL}af^AS=^n#>R z1%*PHnZsPTzXraFuyoQYhwWVGg@umQ zevxjADVFV{53d(+<#Y|O66}#-cEsjUCOOX7^$kL)0VAk}=~`g#z4sl}!2a~7bUm<3 zhcAC?L9ok*FYU$n#Akf2&3YW`%-KNVFJcWNSOf;0Zel#Sc#z!x9U^@ zztE+-l&>wetZJTFr!4Q(fm=8Sf?X=PA^Xw%j`1Q7fSy!NVW!bj9`>>X0Dy%X6mZrGpCIa`& z!g0?m9HOEO)@QZ{TS0{TW}&!m+&T;O9>aZ~fHB9IzJsa0?_&C9;b5KM3L}^^%{W@Z zLSIfp2TWYZ6}^Y^0_EbeCrBuxh;p5K{|n9wD3|^mc8k$naMwS5H-N~lr*A>|+#k@N zqg(!Ug3YwPne#tX8TW7C1*P|# znf&(Y-R9|JGM+DcIF6FJPy7g(sKSvoq{}}JjlDkSy#X%pnCx)+>C3a zg)kbEGmajezTz%m62@aO&y{>}SgA#nkNeU8F^B6ey-L~Qa5SluP*GOZ923$*o)!7eFHa+1@>kCu`X~2O`tPU!|6=TUflBU9Q_L^3 zCYNT^D6ocT z?QQfd3V{@aIK?2yHs189{IAP8@|W|VF1YS%yH+~bwTdX~A%9}#PL-ujE3rfG?y0$? zjrjCy=i_YW&E};I&8O>|TDDg_O(rrC?j`@avr*rr<3T165dX=M#m}p%7T7sGRvd_ zmvTJ_;ty1A5tNiNn|F-B+y6!!p=jzNynXA`C}5gLY$cwDe^d50ygeuFuYjeUt{t2% znhaF3W9rY$cZyI9E@5aWT_<^F9YO($PEaBoCU)0$T=K(V+NOEemN@J%iS za%J!Eyd6+m!0mKy@8qli0ZDYw?NHC}1=uUMLarx`ugcE1Eljlx|x zF@#SRWzbh9Pm+)RhAqzw364;nS?UyPD)dZw=1KmaJ|%B_|8-m;8oN?tb!NThp|-^u ztsLQc&Br-eBt=Hxgy<4Z)pY%6S*mQabGm#p!s(fuJUH16(SOjk>1^Hbw4EwO-@jxs z8W-0>VHByk!90R%taHZdR)d(z;@*~%UlA=k4OoCW(BO|bU_Efg0PH<{_xuqDEr;nX zfa=pqWdQ2fI3?Qu&J_N%C zJt7l}BRXwJjK!%qq}4^lV%c0|RVgeMh06NBP)|c7h9bKmoh}TKONppBtW#U9YL(eS zpvl)c&n7R!IYBJnL3brDZ-4A@&aH4KV7%}QkIl{0Md%4 zrY3LX{LfT``!^0)0GDY@vB~+I_vpP@>zR{00g$!ulNvWK<0v;TxoQ_uMI;iX&uv+n z;OQ%*QuuBrVX2<;UIPcxSVIyt88923AmZ$a(+kup8J<+SqKOI@rZx> z)d$Fr9)0eu&pE%tIya?vNjv9Xlq+k?^w`O(61``#w$R(qPDm3&B9N@qyhax;%ZA^#c6`%BJ_b<8ge5+MD_pPSJYFWt4?ou;@8YFj^Md0Z5-ihNXxsO@76>p4QRL zs7|9qkZgi^^sA4km*@9R;tEvEE-z#wQq#3u^kQx>BQBcyGB1`Zzk3gtYNNg6@5pFX z)$_YIKapsvCdqhp)x@>CzE>VEe~Q$XST^>|+h}sx%x=s0yzY%guZg_l@L~Rg8~!>l zuzs+2^vi1>-17P0{FUPi`aeV3Q*&Qxs_p2iXnVE$)SMS*#k$(dI$i-BgkX+vz6NLK zB;dRaq=J!Gq0P_H%9FI}InL3`BY1u=m;KNVe?JG-c%Y#+f*e7q-~H5sljq3KCZC}W zK|Gdx=*SVMAAwKm-V4t-;2F@7gG^2ceAkp?a?=C}UlN5;{M8@vDy)W9tDqI+AS8lN zJ`0YT;hy!BoQ{znO$N~KX(cr^`3Le%Iq5&LFFBEXYTpqUM;$zO13ae&&oGRiomp~Z zx=;s8>1-u{FdLsXvwL@JU2yW{iJ=E#vD(^L?17<)!3Sa$b#)c72a-Q$Dnr7t9HWVR z$7C=L$K%6L6PNSh4}Zv;U03()s@2cd)phY+d4)HpzV4aTt1iK(>YM$TBB(iLl}&|7#*NU#l}r zKs@|t|HPu>kx12qQLCGe-lu*%>CKP0TvvCHW68TVz*m#B=F!ih+Pv08;;9W7*0Bw~ zg9P3=2=81D^jHqNGZ?SzD>4Pw^!ZEhQdvSxow}WIPKP{&UZFp)9D?Gt%{8?zjE_HG zQ`-bM8m^?jLa(&1(@Ui8633Rd_Nz-AE~sg<>ctlidv56HxG;a#o{zid^!Lx{{&>%< z`R6-2Z}1#`QMhq9@!)Xh_{B4f7)nBT#yVQ?@Z%=@O_kJ#Fnj-6mfBLS6kV zl~PcmP&xTZMS=Kck^=@~!1BJh`|gVJdJyrYT3RO(704U{jiDmozaHlJD7}NT0M^99 zd`lIZ5(}%Jp3t;_X5&@tT2T-{$Pm};l&n`&=X6O9a#`QWide%XZKK87X&ZVFXgd;w z8uhxc#B76f`s1T>_Qv1~Z46LKbXkqR$D|L5;ow4lxD1X+!SNQ&BJL|tN&i6CSF_L? za&umO9+akdU|IKVrD_)+SD;&3HV?j5C7L{R#BZ~Y1*?`>tR*5oWIj;k!9Ih*YSZiI zx&w1G8Us*Y1K4VZKs$rXNTdqFz(f)CbNX>PlF?_MM{it0uY>vZ*P$Vbe|eI2ptm^C zm?Snq{9-Ii1vw$J;aNuKN`WWfX9g7jt&&!jkZLc}U*d&T^)my~rMf3=%@hNJ0W3n;i-%BrQpqQJ5{8 zLQ8)w<#yASmX6zX>9`Q<@c$ki*^xuIZ5~&@&DNxJ_uM}?j=moYxLb zxz6cwaQ_}0vRLD?O39Q|>cp^o{Wm%~hK4#izOi2Z9a7z;45?H>g)xFs5J7o`qWnfj z(B*Kf8GBjYyC2PINL5cXc72n)Z(k(Z&=8I6!SM~0wrl(rh5Z0=X5%GomJat{rF=? zvqtR^n++&pT-Af_W6off^fsr@j;=b_(3r89)U9q$yT;`vq5kXK9Sn&)D^o9W%$ zAUHXMavD5{EeTMZQqiq|sOjz{2Opf9h)muBhATSkp#OK68S8W%!}q-dpO1w)y-?v9(Mk6c{Dy zkXq(ZO;fbFoHLw_JxZmfs-_%3hut;(H>OgXGvqA8%^yPa%L`6r`g~MwQX>~W?C6Is z%AzLgjA+xS(QLjLucXF?`?+UxzZfEH3WxJU*xqz|O$b6vo zt?#sVT9qNSTi@@8wyDw*#(H{U$t`ZTrL3wfd;z7f%uX{1{;#O&?6$edDqAI0C6Ry^1CW$HhOViO zm7VI8ln7h`OG2Tr2ov;X>ai`?(mfdz?^?=Bk8>y>V8*SSr(L6NSGXt&6C{-4tE7o2C#KQ*D-I(EL%-qGW*S(m06)ZQSltuJn< z_co#QPEZBRNZts}jOIfnUgbt^5 z!Qr4=64vDXF(@a^my+f-PupOz8f2y$%f)3vVZ=urg-m!D(xe?HPMb#Xz?6V(;njP)QSsnr$Av}8zP)vxP6NG8(YrUw2*>E&-l9f7q~vSR6l?OMWXAY~+(yQ&8QV_KG}E!R z(~#8blLoi8&u2y(7cJr3bKiZr$H8$tw$d`bRI^y zAma(;%&L&GSbGJLOop?K$>iBJJ0GvDjiOJJTlsRdjRON>O~KW14}_T zRiG!9!tBNBAUifnklp-HG#aX_skynoc?6{kO*4@NLdmLSjs7KoQ$%;wnwoowr=pUt z5C&QSCq!wX1PbX8`HpB=Z|`$U*1g)*In8ac&I;vvrBf5}8=9l5V$p@}o16=4yB?dr z^!c9NWw%k!E3H*!<*o6=zV@cwiCQQxusvuO_Xf-bk!-Bs7nwUy;3k%mPd%57%`h0v zS-^?hy(oMASL3lz(Dbq+OhG;_?SiH_P1I7GxMvypEv>oj{odZ0LOMp?6s{N;cBfxOdY(6!+QY;mN<0~h;ho9e9tm|dnYTPKBcrzB4 zlJ(zY;H)bRR_g|)Ux&nSd;R4Eo%0JBWp`nF-ud2~*Z&Z2x0e5)sP;6uUz`@4W>mobp zOSi<9`r~s`EaxQ$30nYYPANu-kk&8+t3N4y}9X$H9%Y#Nx?R>*e<4% z^{1CEydO=KoSnI^!u&~CoOLLE?pOyUliGN+Ou!)vX6_4xB1}2XkZAE^Ev>V~r@PkK z9ZmwZY;7AEr0B<&J(5hN(OB=y>Ib-D^0B0*10odt52hsOGvZ7jumO9^_%0seJ4q%Hvj6R_LU;ExDJdR_Z39vRo|Y@{UUQfZv#j z)n^c=8LP)zf+NlFCZyI)$bkQf^f;_=D5g93H9r3)ri8x9++&wctT4Md-13;uI5k=~ z+rki_MQ)yz=)cA+R`L1#3N%a*uj6nyWxOhhr3a9IlMol< zxDA*7NakEHq)bhtCa0Y!RO+F6YR>K9L@h|-(N!29ymTi%{VjEl;#y3OYkN`)EEfBr zP#|*O@|9v7HI{_N!C*FIpAKCt?=3%t9u5v zK+i>=+gIIg*Oq~v)ponDB3sef*mQow=4aEH_V!Hr+07fyH#K&WXZkk6K5R)QPmRtz z(KB)?S>NJuxvuS5^R-l}X}G`tM|y^ZbWRPgzdN`a+XVXB>F(YS;H36IMquafF%yS-kY z&+FYiZO$BT13(M_P0-+*gGT1l+IGYnmb)FvYPIHgz-L*Qif(PIy}pI2{`9x4spNwr zBM+vMZ6|8n?&a~s3b%);aEDt974GMeLyi4Kzd`&hia-HgBLA06@?2Q=T=?{Na{wzA zz}Mb6;R~N$pjL(>ZSaRn<%+gQn5hd(*VWxUd-jRiL?g8#x0l*7_L*Miz7vZM@0X@&*K-!JA(h2hB z$`j#mfMAtgVd(Zj`efdF31`IAg7=cCaeI)g%92Id-0vA5CVi5i9n+^D@eppM^8QYy zhdhfI4v+F=z;LuWFZSPK>TC z;GWtiNNew>pAc*G((pIeGn3u8QW?wOu1)n(QPX0A8T6j?^! z&3&7E{nD=Bwe5gE4J>9>2kXv#49|wzp9VGu04MSr4c9-4i5W7WE`9sg^nd*J-@exP zOxt7c|AzkfH}5n)L+#IPrhZ7hpW8?s$bUO??)-cEnS0KHKDQ3G)D?b@f~Lt%qgV~P zI&kUufq-CV8E4_h&zjxY`ON6XSJ3n}6fjMVAdRvnPTom-rluKrDmn9s2{9 z!{I3;fIfFc>P8&Cv|J?Rfz^|Tap$rB49{})T<5K=J8sb1*7hd$ber33)H>ED@5N3$ zh36o?yk$Z^LU4fTJ6_a4Z<%IA>(njipQogh@6#W2cF=FXOa8+numSHT;GC@yR;oNU z+h~;gQZD(#i&XZ~H>c+VOj17Pa+*GskFJcLFDw9azD{#yoR%KW=Kh&KU#G|=UNw8Z zNXc2u-icA=a~qIDRM=QXF&kIoFmY$L^Ny)?)3iEmHW7`USTMS`1@q0dsq}iQW&L#W z23@=LOgtXes&WrAYkEEIiU>~LL&*$aoMpwcobO$D@4|ceSVjJffBI4W^BDITe4eRz zVm^=d>c!Vw`>=HO_+|7d@=t_0z#ecZ#bnRqd!pz0Lq6AaGOL`7tZc0zVbZnstI7I? zhWg}J*WPhQ*K&&)eVr`JyY7JF>eJsg*Vo-YIv>2Dff~+9K~;+qwcrgP5>Ck-cvcTa zDF$~aL>ne?BZ4ts!IeoD8W@ZostMQN>(D558j+T{i9WSTp+Gmx#9YoSYWUN^{6k0Y z#Y4xw!YmE-O`qMqFaZcFR}gc21kWAHKi7!$!9ylJl{c9KqGtkm`HLAK`@qW_@OTmX zL&tc?_Nn8ty-1Ir+0?wnRk5l$bD$75%PJrGhCXl8X8wG~M>#_|V3x%u38y%Mjkz$QpB+ATWgq}? zIpapJ&$}U;-2fqebBlYyzS3$e++&4XL_twErKI^~E zSnK4uHPk3M*MNwcJ@J_r7|@WLvL*W8WExGTc|4tRtM z0qx>VF1S^wn`@W){rx6UCKTG%zG$^SQg^~(w#GuxLO(?*qM% z#OU2&U(5O@7@q}aOwG8l=kD?QVTMZxGvrP@W`U|FLiVb4t8|B zv|hR5iA=iZL?RH})~f8?%X;_s_9|Pq1p`n=IVBHajYF^zw1{8UF-Q?b%)nV3__3O0 zlaGyLGNX8i{Au(gy5+zm?5Qa;MyqZ1ruyVVi|OHuBw9NIt;IBI^t`3t{&T|uIJp$n zrd2&ytBrlWL9;0+pkT3@ric61*=)AlDag!}6B?jWMSetLu-j5@V_2$#JC~F^gAp^C z7MJ7Yp0NWLVKbmv^(pLMoc<>FF8p4Smm91G!f?leT0#=z%E&!SrKhQnzarhA>k#t+ zft4h+<)oi(SD7m-MaO9`X+)EN_c&jsS8jT)wRM2v{~=fATi22~!HfcV?%c29DFw}g zOlTftT=V$*`yZg^eAy3PNM@7I{{UF?@BjS$WS0DZ_D~UuPdAg7nD5*tzzxNFQuMA4PH$1Od{_U9HZ{wfgOtY%$L5Y8u;FOy20UOalX zIo(IHL6>uRJ%Y_Xo&ozy8%&uU7k{HisKN>gM!1w>vUh%q&0N)HAAHGt(^lz`m( z2fB*9`wM#Y13=?=hmKHTs+^9Kx6tzC9b5@w2aYS-iML2+;qxcVni#ay2-h+883lz! zT&d`(b93S=4|+6~G9C{JX&1Tmbhu`g6Zl?8^muscK=_8C(UI)DBT$O7@C^;d;)5EkhFYrCYKP+SA^eB@{N>Mdbc~AWYseANXlKVWD-_Ew zbau=alZ$k_WVEC6!cIkIdnD2$9s9ksI}+JGOVPR`66uz5RLqY%m^q%IU*z6_IbNZ) zo#6ydOLt$Sg-qvq`bF|@oG*{f<9wO9;s92Kp)@9+aC}l%)7-H5h6bZ&W+1)Tq%~w4 z>+2p|JpB+dvU9)VS8+>AEjHuaM)Waxb=k?9NDcZAYt) zufHuQ=GU-up<2i$Te)Nj***Ul;r|J?tH{x*`OxC>68jWIOs;|qW+ z-BUjm#HZG3r`6L>u@8&}{MUDOVVdFNW=zp)%c#``X6GR7>k%c1;k4xKVzgXjVR#DE zQJk?1ACWpgQN>3+W<2fD9Yp2yNu-QE4;dgm({;n6B@u#WzrCl=~ zXlU#{A8^>`bDoa-{hOLjq<5gNyTfdnSu^$aXe>3{)BVaS&#G6tyJseAYQEGJ=+o;E z(xpn)V}!dgLL;;Dj4lgRtys}49uvQ)u;$7<15Y-)yEN?g?-`!A7eP0l-@k9(^c(#C z8vX@wb#-er)`pT*3$IuIBW)r6Y>| zJ8Bb|OrrLVentPG1oJY1Ix zKfi&5^1*D*jDGMl>VO}AI@YV!sJQj$-n_MQ<^2x232l1jP=B(fHEJV$(x6r~BOCvB zs7bYhk1=ZlXWCV(8YjX*$V4spg*fa+vPztUT1tPCeg65Zve9N8^fsW*(STw|_$4zk zks)sCMS_8kphx}w>B$v7dy|HnJ%0ZD@k<>XsXZl^DF~WIEl7`zMfx?>a(?NN3ckEVZ%So|q;*MYe(R3!!AkJ3VdhUu&~DcnnS^&tbQ% z0eq>|>L%wh^~qE7w_j*z#Gi%>+h^R@(9kvB?Wr~x*xG!eX=^Z0=P;Xx!&8tr(}2+2 zE7y2dz1-acdJFIAEOdFIn7TpqDXeFs{AnhYwSZ?*EGrozEYcLM)%<8A}jKl?hn-oz!q;TXQz&oBo6&BK{LN~`7RU?vkx%d4e~Yy~7k|BgVif!sZ9 zF#Yv4s;HKct>91FJ36<5Q_xRaTG1wjtT(FF%?z#Gh>_ljk>VNOzOdFC^4S`km~dLs?bEY3_9=8GKN-nl`m60FY?z3&+o%J$@;} zkvbdYQW-IsO^b`FX%R(7A*r2t_QUi&d07VO$J+jR)xk_gvSeA7EQI!umw)$=#f;2h zuDAv~gh7L2a$BsdUw14cBy(!oHNN}-gBRsB)!hD!&d11hmZE>rb{nV+WY zgcDea)_x|PU#io=9lR5DB_*Tq`)uPT+0jliM4?$t9A+dM0had*N4qVHdI^sfE=3p- zFDhh`fL84iN+ekV$m2CFsN>i8L7}1Map~QSwOVWt zlaSBw8o^#T0`J2#>?$-xXKYBXM1Q908j~hvnSW0rp0wK>JBDgDH7{ajem5w3FF0ve{``&G=Y^wQ%tbK| zARDp{jyyR}POUvQ`HtuN4Z z_tUB8ue~Kp=k}^q`-z)=pJ5%$|HOR)D3DX=NAo%e?!=<(sS0DlNzAV)gZK<#;LMNh z)aY~cWB0%N-E8)e&Cf4yJM#23W&8!?`?L?9-d8zE3=yx{?^EOhn~gSMluYLJ^BAQJ zXNaBeRquUv`t$VH?tkx{Z1(t~=jRRFn_XH~HjmipA44_2joO4k<2Czzilnt#`bFG( zI_`ZF*3wzx%g2RS^@AC3fl}})uL(F&FhB}=9buxlGqO1PErF@ozCepYp~!~NF>D5b zKr)jd&Wu|E!7%mFTNL#HIomKrtybiAQa37q|Iq+7@a)2UexI9A$8sy(KL7rOoYl0O zgZ6kB&kBrZ7Ut?^eCQkV?V51@B8=IUD2zyncNry<9Z_CEkz+(wCuc>ry1Kf-)4;n} z7V)4^sv3RIT&@ir9HJW=Bw9|_7UV%vk3YucanJoYpFLVmw~=Sd5e=Tz^5Y*bxGfk6 z@yG|XnG*~IZeK*bPJe^XCr!-$4q{wP7Q3JE4v$yDB#PDvo6KrVRhmLF*YZM z>^WIkiP%E)q(_I==t6Q53f=-g`9n_m2YT1pvv>3Ou&OEEE{fVk{)V3PGAYU3K;0k^ ziECZ3+y92fn+^+gBDb7E*#mWhP%oCc6&xGA3L`wijBp4eWO9LmOoisgiKjE;iZeWH z(i)DB)>#-WkJ-_ov-fbzC@zPbtf~@v4UV$A_r5OZl_;HJfi`le2Z@9 z+W4DF=9Vl0Esw{53f3}MO@YE#K7>2~h8GvcpVy`^lfv%jIa=X0EDjhYMNvEOG>+3+ zDB;r`I54oo>-D2P(5bSICIUU%yuJ_#I_&d;-O7YoZ5D~8Nq21cM9N&Nu-HO=^Xz!t zT-3~&jfUy5*!8XSw@FPW+{&m#l%##80TEji2(&b_*}SyP+N{&~q#8Z?S#)T)4H%fx@Ej){fq7eoKu+(d=@K#2T;hdD;|8)d`!)G+bj%Y0Nd^$|}x7 zY?UEZ@-wVcd8wgfA=4XWVx7R5|9@Pn;=5g5E#H9y?krx3t-W*h>_fG+LmI7`5IAtA)YcvpNhKbwfCOr1TCKGznzoQ@Q4TuDy~UA0 z#`sxcu_0w|?ET%iFT?$qTzuSr!T3!v(}_$XJ{TOH_lEP13jsazA)nHsdt zjQYGx9SKA6JN-1sD6>VMXL2KD6fzW3*c?E*1 zDuKXjGWmsqs;`S{tLp}JT1)v~A6m%gc0F{ziib*dx4tS+MLr_G*!Wyarn57HF6y*# z7AdxME290Ig)QzO2f=Ub z?c0PGoOY%G^_Z6FKSaHDRSvUXF#wkuv!jOwic*a+-ac5yd-y|;n>Uvn#{S7C*m*IS z37(cO=VJ~azQ!&6{Bea0NW@rX1Ou=5a3_jnMj?8Q8_6c5LqT@|3Ppa;CnQNE!c*qo ziRE(yITm9?E@ZlE5TcZ$A`Q8wRZ<>@bCJU>m5an~l|CSmR`W{v8fkUhIB;V?ZmtrG z10L8}!6_ywdhi&erm5lf&8RYmXt64SH?DU1`z)45gR#j{H`}1m?Si~17kCZTE_t$jPfk9*xK(A!9Tb`IS1KNF-6KG=mZ9 zJzb5$6oDU#9LIFxM(4bE&O3Y)TvU3U#KY7d=?!pJ-84+S7nxhj>jz*q28 zhvr1<=NL5FH^=vc+jqQLreD%Q-y9u44HmpeE{)h$&0teV%_3t!BoN4}HA8W-tz)si zy81XWKkl4Q-#q*!IH?tKfGsK%&rwQy_-cR3R^`6z&pMl~q?L zod%<}KSKY$cpF&%`AvK-cwRfR2grg&Uvx;R&+P*H&gBF1U--g^mQ+1L^yK6PB7sYc zA?Zp95}jTo94@a~h;}hLuL%Bvf@3T&Xp~z7OoV%CP2Mra=dQZ``c?E-FC7eatKEM~I0$kX0%K*6Lt$6*3TCL zMeom%_<&L*O2)r^D zp_EX$rG2ZcRufa9*c+W~HaX5U)z;oMr+r&Ze5%f5j)cR9Mwx-M)h70Lc-r>YCfeYC zpBbKddo0#qHJV53nvV2)7CzD0xj;-KRSI!Ih^ISwjqruQ6T(Z_L8f*-FaI6E&(!R9 ziK&zR@kb9oNB^{)Sl*$3+)B*9c;snfZ6z;|)n~SyB`4^o&R&0(K74ljS#*R=pF#Ey zDfuqV>~72FQxK)t`=jx#*2Bq z94Ci$0*uQK;_&zymiuij21}z+>lX=xH=rTbyriqsYU^~mX7i5il#2y=flwwA0j^GJ zi}Y)a#ykTRVv2;k%u`U0mk#H1Ig|+?#F3Xz^{2>_yq~<5-gFy2L8W*Kj;HVm0M^L) z)U6LbS-)vmqTdsn;mYR@1i)_W3h|2i~{?hl3fwK_dDu)!OP`_~(E>^oY+v+I3$W8x!P z!>d=mk$Y+KJ34*tg$Eq#x;LnIAoEYL!l>QnzvcGU>Ou4_pmJch0>#=|}9}lk_6q-%ugKMGlBC z?*r5$+slcXBP(2s__A`)Vm4;Smz+nGXi`UR z(lGfL5>!J#1ui%-xMBLp9&%0YTg15VTqe`km&u%4NV~Gc*t{C$PJWkbRWtb>`S*6Z z`dPx%!>(BIAnIW$IKV!jY6%MCS7tKHY&IvsQ@@6MjV5p@4x7QZ1kXoJIrPawG@769 zQB$PCT~T{jq_c73^s)8inyn~pdSH2Kxkn#Sls}rS4r+WW8kgP=IIOJRVBa*&*J)r1 zf@_S1E^q%jyUj|GYe(q))GGPbZ*_IS=_&DPul>|Pl_~8al^RC!n|tM*H%FWLWD;4e z1p)n9=)MV_l#>gr!iQ&ulMxR$+q+ zN>#xzDnYXIfr~#vz|bGj84UAVL(7f?gT8?_^hWwoK0S?m=ytlU?X8{fbS0&C%9dp^ z2kX~=y&;t*TFmP)P}nP=uxiYU&1)JDTSQ_JxDvQ0{*8V=esoDTyX0t`w0}gyv7Ljf zlKzak2_$pm?|;v&0%R8MgoGFFPK>&6CnhM)dThbK^LLu{b2{fcGW+7i*+(Ko@p~Gj zX>|O(_t<2qZ+;WWQwS6iMj~=gV?;uX2m^qFA)jD^C8v&(;4kUJr0sk3FBc*?zx@Jr zIJcVmGxd$!HtJ@kB<3)#$4$6j$2caghdKgBCUgCBr1JOl5z_U+C%bTY8=J9JOHoY5 z7AkL%vW4q6;QG7pd;y-Hp8#_yB3BY*%x1~P3#66ia*Wg=%>I_#R_tbOEv{OFtM0*5 zKYx`ISMA7>W|~he8PBW2#?W3|wVM8U{#o$Lrwh4Npsf6P!ii>!7#Y=&r7wuZ!m)>8 zNBtsq+XceOavbeils6;M9-Y@5hU%D!ijO!OrYXcCl}g=R6W-CrNmFZC%o*xFqEcw4 zC%jAQ>Q;E;11hQXuu^5;IxW7tYueGq^l}@rd@-&~^sl))FdJk~GhT1zZxOjqs^7I@ zbvI={%9h24jYbPW@?>9KX!>@K-%T;dE%VB9Utw~@$gcn(vAHV_Dojc5l#Zm@jyJ46 z-`JQTPeH2$Dk<{NU(NTeE@_KFCChV;#8rGkfS{Z(4ht#5SeHn_)iq&VV)+!JiIJkqpek-cNbOwU4ZS3sa=yG|r8da+oof`rJW=B#cmhj6e%}SBAn$*m& z1zM{`B9+pxFdf^~nOYrcx|2R4f)Qyl2GQUeFeg^Iu|_Ia815BRA=ci0Y9?8}{K=L~ zZ+}b6lS}BY4IODtF7tA^2Az39DtcX-s1r+k_4D+S>iZlP&zA;kceT>L>%TKS?Lg4s z)oYDr>RvLl<+^}B9`XCPwb4i0Zb;bDT4S#-hQxiUWG*bd{oDbp+G35RvKre>br z9)?ltuqhkt3cspz1JKF+Ls{)HImsj0eS)omuk~ep)s9zn5&je(hQDbiLhBh@V??2e!>NJ*?wj}HC zo0G~Q8XCSY=rIoZBb%D)r`?rqT5flFttKcoZL3^eMsrMDSt%CDTAYR!>#F!7uc=O@ zYje97fXeGo&G0l%x~Opa3RM`e)a0a{%04VTl%)^UA`M3GCk zA1CB$*XpQePD^jd(tAG4ZD;c5CUOF+m}{wtS`N;5bpkltB%eu9Q3uX}Bto4&N-46Y zwAnc=P*mL!8nRh4<}8U(&*fUV+jDJ|Qhr(aloai0^a^*&zZA$CQONH@V?!RX6P<{O%OlEQfje8fjWf~gJ4K~cu>vgoc z+-EXPtDau7BhXZjlKC#`x)qP7o4cCplV4sSzosv<#v5*~BAZ|9>>A>{g`Vw;U;T5Czd+EHoy>SYdCdH_3(f=_LoV*$?LJM=>OUYd&l>4n?Sq z=aR{WR9)R8D>*T^*%4=lti7u?7Q1Uv{W5R3Tcc3j+11|m)}Ae|c6H4Y;ptD(G&cp> zM#aXJPrr)RM4clCKA>;ilFh!}vBl@Df&DSu6kf0&nOSv_$nDf(=zWD8ZWb70j?G>x zzEiresqxm@`NtCRR)zySjT}HVW~|3EnaDJgvsT!p%Nc(-VrH@y8o*y$(xn=q2#sO& zQ9aYC(UYY``5K~|a+cxMh~)A(<82yC%X6!kCp>{CC@`!4{t2s?Cv;@^c!Ca3h#8PP z==R|W`Mip6=YBYQ<>e>%c2DIVhWY_YW4g@`BL)*K>BKBp16x_7w1cETOqXSYf{_~d zx-4QiY_uz%-IG6pd_cD~c5d+cl9Hgh!ET=wi_fw7o2wPoW6w#XnuRTi{Zrd_C)!tA zRT_iAys)|cU{h+u=BgKzmr?tPR4UK-EmL@}D8yoG)vZFKph~P!>1Ic2Z|azLx@%9} ztlPZsya4&{(46MO13#MmU|Y|%ZkzMd7xY^5{CeAXt) zr=pX`9ZuVto+RoU-Cl=%QQg`HqS2bVMB=`si%-SdYRSJ}dxF`|z4T^oC-(DyKN=r> z+kqK3x@~r5G@TG}4)?_=nam@@E1zUWQ{VD(Gin7e8qO(<#^tfC>P#+Tt1TDRt{ETA zl7)BIrJKl~uRY0N%w|Ac+j-}?4zj%m8HLo!&jdhi;ZtirLxdN<--O>o2gL86n!^6R z>9fW8--G-?I}5<$Ls(lea)jaE$rDe3ADbDnXZ>odo?+%}p}z~jyF4JuQ=7?ev-EfS zH?ZZt8~RC*vzn=NTQL92$z%qH;v-*o#JxU=QMWmIL_?ncJ}|6s)5tm-Y$0i6S8D26s5SVdRc5Q}P$S@$gV0)e^OG(8%f<8lS1A_*ZPVVSJ*@k$vVHUjd@%2g7f z6|s^EfebEqlC4w=)hSEuJgbp$rIGgums2hmNPRjaF>1|aKuh5BwGvs_h-}GnG1a`M zW}c_xS}Q8Ybt1c5?Lh+~;`aI$G$;1-H1BF~^xBDYz?ItGP`A_<>eI-Tq_RTbR-=O_ z_ru&DtK?-ONx)-YTp!>5o7~$@^D&Yf6RRXEj?Pycw54EZO=sJCzK@7@#EdW4loAn<6O z5Ua5G; z2=&vtc;ddrzb^fGW<;-6$zOb>R<`|P{ZbRvv&UlQdRMMEaB{|Co$Bo&9w49nZ zYTG+GQ^{&2;vc6LVDbP3P%CFtR909N8$Hdy_j#D0Kx0Out5q8eXBwNH-bDX9sqWv1 zq;`|pxqHr+sjYU4bz$mDvL=f)Ehm;vo2}E_WSbkQSr$Cc7w~UwSmpC&s--?n;JWUm zcSoVOV*3`PQgclVdU46S^qu+2MkYga(iSPNoQcI^#nsGLo+A_!tC?S%cZJ!g{~8_C zsLF=R7z7}Hj!RznzAeEYDT|PIP>mXOn*p-K0S)EN3C8zx9R&R=T#|zgf=P& zsx=`Y<(%fWwVB2zB$r9JRs6t^*<2&SW*7%TeOuga?=hVQ#RHB8xm?NYPBqTpLpX!b zyVIOJ+2PDxNpj$51$3|RW5{qS&gH5%oTn>`^*PicLLuuUpx34yFy(aaN@U&v1F=+A z`2^ylTn<-IE(u$zWbke9kVKbz9YEV$iKx<0EdeUMrGCV&4M_DCJ~AD7s!RWj4Vz5} z-wH@)yMJ?sYd91d4aBdBk;QKK|vy5Q0{rNXR(nDtGN))0>QdIXH0nL<%L0vK=%iXUrQU$G)AV`8si{vBuVG3wLY zn;b5utQ5ISN{*vu$Y>5pDn(q1^Qu%R^6PbNt`KnPQZk{0Fw#n>Ta%1OAa%&6XwUnk}shUYr-0 z7Yr>72WQxWeE=q6R>7h|)Qs8RwV3G<>@PfL&}13m&1kd#%Z;we((geF^%Ai_>eT5v zJuMr(Do0g$1=vV=xkmW7&O~t$*((=^Efr#Zl?eL@SK=z_2$cJq=yUN=gBOlp8Me-~ zv>~vxE;K(FyC%wPoukWy#QAuNl<}(^>S`aeJDV|P#@onLP)($sO`=JL2eQ1Qc=;L4 zBtJnaNuh;jgGbwFYg+AASgOh^31z5$yjlSi6mBJ7=+mR?XQh}|%FEIpV`lGsH8^Mr zr6rJ}>1ON9m~U|{mUZN%QG1)woKy#+q@uh6?bdx7pf{tk7P-OiFspkII_S0VZKYUS zjydN=44Ng3B1aZXrppOd#qKQX$o#LNttdq~s-Hj}9dX#~YdZ&ac)WhpPqp~0)50@u zPbXU*Ti3ch+OyWG(V6tdk!0OIIUwsFSvTvMS+k#-ow>&0@{;c6<};(DtbR>6wb-Q5 zTg_kc`OHS|-l@%p2kB3z-_z2u+3A9PKuR9K*au;W#^Etw^|^vd#f}mud^|s+xFP}! z4U5kTW$MW}dpsVO-R0UgW9YWH4}{^ERkQttbh@>pwdJ)NhL0u^&6K$EabYO|(ErRE z*|25%K7=g|x;rEqK&{W}?gpM8Q=~S_?eW|&9A6WUuZ{UyQA&t0_JR_Qf_lTCgsT%R z!rI8VrJA)i{lYc9$mDBV})b60#wqz+f{}5oh zFRjF^0jSKUeu-`ZRapBNM6g21&+FIZJ5L3JOFVGBD0zDhADeNcqmvQx%VGJTPv9T9 z^@NXm&HoIBI3h#y3H%x*6cJIiK%;#bJ?4eXURnw2HWA&3nMp#z+5IS;;E=b!d_4D6 z!c}nAQqyv`jbyVs$y^_O9W$05YCq>q-o?DV#PHa>$SomqSwbam?>l}A8#bTT_^iC!R4n?Co`%`P9(uU1Vn_g;f8-4V)&*%oU1zHPi4e$v{}DHh!& zk)S5=!FfDHT)Qwjrg|un_x4|dFTz+-rcX!~7BY4F)7;iT2sd=FwEcd^55tEan9bNnnIqGx?Y7n`X=W2E4LZ_X|ZL zzmd6I$hW0fN_xD>+XKFC0HEBz~wHftR293Je$hnvqLmClP^3%`$!1)-R zn|w_6P2S_rSc3)Cn5CcbED1S$yH+{OPIjd@lfodZQq7sxI$Nhx&OaQfiNZ0tIMuYw zp>YZY!r=1y+T~&7UEH_3T8?ryxdmFgMxz;!-wTw&)vd$PLsNzx?Q?f-^0@s@o4VNn zo=%$u!t;VE;gZwIREGKqxutQgPF9VubiLhMyFM15kG%Mht;>H%zk}+a^_<&39poGyTg|=nZC8f|St43ZhwIy}PBfx2YPFqzd`{sK zp?Xg!aw(ouI?*Uef^rA?kwlOqn*15jj2 zYF2in|F#6Dq(9U0)UwR(dd64dp3>3&;+jPl>0kBT8qa-LciYJFrW8p=NM~^4YlwWi z?e=Ajjkou1e7m=ARs-kt#t8jOBCpLLi)HY$e{i$B$H4RA(9xHctjzN!o;)Gp6%B@6 zU)Gf{X|400VFPZ(>1mgFhC=a0?-Lv;a=)1V#y2^~81@WbAMn5A)m2s*x_H4kfOP%Z z#6p|Zt=9-`)iRSnuGj`;+pk4LNFYM>Yfz^RNC2pePE$@51L}bmlgQIUg{N4Is4jD? zsNYhrt=JgJ{p#o8ZWJn_Sg|{7j4OodigK-3nX9vML6EElQ}%izfW!EXG%$C8jq(Bn%QHd&$)q3~e9VOtQX zU+l1Zm$xelOOI;1xS;ran$e9*|*{x8_alPr6@WjQv%sg>IV zkX_+sOdE7=V64y5W$WjzldgTgL?23X^+pg&D)Y661x|6+?1>nasO zp-q|Y1bGKcu@>HK9KX#zF9Otdvk5xWoH|#Fw3^h@Q@FMNg55mE?q-LsOi;?>x7O)` zK#=nJL%kZU)@e4+3dM&F2BWm>3wQ^T9fC$1VNc zH`df-DpRMsy01a7qO__C6@#XMQ13b*jsbpb9S5BVGEc5huKz}R$B^G0J~F5AKzH&< z5?r%phfjT6YwBofI=`Z=_k`bM?$TxIRs_?_0S}EC_{Lxi01p%>k(BBo`qk{;4^W2Z=?!W!M4rDZ zhv?{~Qz*sdz-RFR_b$#)p~EdLxeeD9Jb>fIG_#PCV*umn8CsiaUd9- zVYR?CLDK(>XsFlfj)=<3ckp<89gGlzA*NK?P}j-fKmz6aBmhWFq9`rrj~!-eFl5r8 z(GXXn>w$pzi~xNol`(@MAeCv62|{&yLL+XpG|95SaF08)6U=yl}q0dk05)hQuW^ruoW z{WnqyM;e1B`#$x%-6_CT?epYUb3( zBKe;Qdwp3R~UqW9Sx7%iQQNo&fe<&3(YLtetLuXbsa(16-SUt9@an%{l z?$r%v=*QdJ^;+#MCr|$5NP8kc2g*o536snQQTsDDIrHp8z&7nm560^zRwAq(6 zyPI|Tgi)*)iJhCyoEiHnms_!y%VoSG5_AmybLl6L!amRr_#avZ%bZ0hnFUTy9ZR#q9r`jAptW3<%E73w}wRi$2R@G0aqD1uVR)o7+zN`HDD z<{k_wq}C_|0tIaCv{@!WX_Q1M5GnxAlt&gA<9^7!So$4|$TBgaM4@7d<+j#I?H@Tm zrT_RG{iidpy>@T|QHNn8hI}YHD%C}+Wh$9ez(3C)W0_~{QwY9EwIeolWUVE)t?Gp zWU})OI&B|(R-bSWQhzB;=CKk%T-ZS;VxymAwbpAwb9tiO*EOt6PW7C{?BOHk9^@W? zoMmKaUJgcNi9ABb($&w>o`-4cb8Av(48})@lKvg{z@?2ud7M1fH$YFr4Bo}n?YLS4 zlnSum$%vriM*#B43LuDlWTK#P0Oh6-W@C@TyX5uZM;l@dHiL0m*wvxcn1<4+`+Dd_ z&k}iX2C8MeR_jb3{kLCvXWQ)ouh~4sSzS>; zvnynTDkIYgQaoFt!Dd;tgvpUz^VwgxaqhiLPC}Ypcg6uQA$kiM(j;&jHc-KmJ~y7N za2jkYJEm>6*-V639ggd!H!rZ7Jr&ui{#3)MkuBNgbSoj5rpBi?k9@hQX$skzm~ODy zNn360!LFq}x7Q|G0E#@je&k3b5+CmAd42towXgN`%#763AlZ+~W0p?Enu-{Y<3=_o zEKw-BH7nU=8&?6W&ydmW@Amc1BxHJb&pTV`qZhwE=eAHVOaMVvbNk$fNq=X@TRWS! zztz<_NS!40(b%Dx>3se=VI<}o*gq?t1*dBoxzbRe^52|S(Iw)<*jdh;+(Jrtg7lxf zwDCCE!d&qYu4up&<(IDj3g?Hpm#Jf8XI>&7pCrriVe&Ea*&SHxG}am(%_U4SH&`CN z3Q7yB>ioCsou;EiPSlNNOS^e^r`zKq#O$zd9d2A`Gq@|C5e_vp+&gRAv*lN70`xwSoJ&mXY?Mbt7|m_n;|r3e;^PX?&ggQ@b-*bH zj3B6ZpRg+d?-sO8Hk`)!89uAm-kdN0mgHI%bz& zyB{(+m7E0qJZa(Hc53XY2Oi+`oMJwI?6bdceuMjKv2L=Wb%+&Pr3o3wMJTJmFc=Ka zv{+m(ly%kDofsZGQdiqbQQQM#zptBxMl;BKo7K4FP$1}w)P(OGd{v?yN`4Jfn6`G4Kj+@dm}9$2qzI4{6C`wO|E^QCmcNsM_c} z!w6Y)#V;5kFTpzH{)=%gA?=yWi`%xnkjbFa-IW6Q>O@RHAeW9^q6OB);Z-`>`L1WHzWef{CS zw%)__b?wl*4m0a|2ll~-ePB`5q1}-p#_VKFD}=*~nKQM3a9^i?Rx@le*?JnAPEDg{ zWdB+-VzYVuR@V+ARwe~94Gf66sR7kjrE|#N}jvy!;aQr zy?U{qMI>Kv#3;vAa8|T4JQ!~AB4uL|#8|-(nja@;7l}Dt^IY+jUPLjT;~3RIm1#RW zqxa$VdyAizOcbr<8aJ&)y#J|g4DC6LrqQhPis4#>YOt*-F&hwyvJqM6HiUE zR3?;b)KeRxv+RbNYQM+sv~8I_eXG}#kXE`4(mGc-T`dy>uBksU`)0q-YcV@_&YFQd zO|;{7n6zMR1YV~S?h33hY*lbka0656F=Q7Zpkc8S@)Y_1iu>~TsEXy^)7>+ZKmtkF10jTv1ri7| z$z)+CJApvhLS$!10tsY+>?_$>1O*iY1r<>Q1Q8SvaRE`eDu|-E;Q|QaiVK2QFUjP6 zdzs0EtH1Z&AMf*+PdKOR)TyejuI{et?$b@MAbIq{b7+U-Kn*}Vk# z_=*L4Tik;)Kr{=w{+_7Za$63uHwWqY*z=t`J^R@A`_{R1+V$|>PA>QCWo*QV<`%Sw z3@$gC#gX`*b6x;Si$IC@Cg94OA^s&0op1f-b8O0x^98{Y&@ZgnWcBCt0(rjW)t1BL zC2KsLKx6PWUoTjPx8a3hS1B$v-=>5bULq&XTVK`dtta@hm`(K0mJ=O1(6KGu@Xm{P zCkF4JjmhK-K>HK4!{jlnNNs+I*0&V1HJe#=`9saCt2S$tOx0at4?8imFns-i4dwpv zEnPi(0rQ}Gv0pmIW)OA4hZT($cVkg+=(CfL?zUd5lF~z6MOCCCw9k8XSbfHd(>ckh#uD-7Uh7RmmYr+0MgQm+JsL|C$KKsX0C-y zb)XiP&pvVy&bqt^TQvK2j_JaODpKnP_8+kMS1hpT)!WJNNJr4m6zzpv8@hF1R_mgh%ENOS>=JJi^4!!6Bqe zV45j(S#(i5SeX(%0PoGJl5z`NE>D@EbXk0fd0bkt? zbt2;fVm*8H#R-#7=C!!d->}(~|KQFYv44d1vo*cFxf6>Ag70?ka80dGCvVfs%1$0nVcfh_3j31|jJcIOCe zwBiEWQ(&*N&d~K0*ocrh_8-7*JqC>rGV}nAK^$AlZMiOZmRS9C+4?sC?OMAllDd6`4K1e?naP$=%PV@qvY@O(U zZy<}t@SM-Jrtsd-5I`-!2GmFejfw4OEYcq5GPd$~0?`hzr*hhKiS@bGvVVr#`dS-x zy5#pupe~|VeJh|?1rMIH)<)Q&&35SQ(l8s;;Ae-9bvdnT7#K!w~AI%hVQ@1!e0TmjN&^b$Z&Lz|zxslL}n^Aziz-y|3LwQX#kc^i&RN5cchqTIh-*J6N zTjlzY(D_%3Y*76zJ9NQSKGX9Sw8Y0C1`1z9jzRp7T0)?08=#>5dJXjO^>44EzP)>l z@LSb3a^VrsbzoXhd;P@p^&H$4LmD=zM82e~8Q<`HiLncznN*S6IYnGE63X)>q4QgK zzN)Ojyx&q#-fli({RnU|rXXm*Cx;!*(H<4{CTgdl3-EdEJX?5q zodmv{pfB61+1gn_|D-jPk4=ESglB2LSii903tQlD*!WCt3$hL4HZEJXd4b%&msk;jq#HTpJRbYePck_v6}-P~J~U8Vlj&@P3lRD}|OM zJRPTa@V4Nvb)ATjhoLPP@DDL^sM<`}9+k9fLow;{kMT3ULU8Ho?B?Fy_Ob5Q=(DKi zAHwR+eLDAv@@iKRdJXR0$J9OewQ{ae5j_j-XFwagj^8cHTl5|RE_;tT8N5{P>{Be) zIrK&7KSq=*d_~ZhjrM?-tc3ISlEdNc1=+DhX-Q~CFfT2%)Vd9Z3ixPL&?vK>5dPJd zfG;vs=mInYq9G?z)^bR|PXoT(Pz87g1>d2;PZPf6WJ4w39TogOoSeb`;apll#~lGW zr}MF(QwutK1-!SyN1F!h+klsFIB8V)A8@#K6m*7jImtaDPvuu%vPlom)4-)idLQ0i zAkv6Xk>>4$S)>gjU4V3?NMk$|&q0gNz2be01R%X1X>XARb43pUF`gqTL3$&eCyO)& zbCF(;^k|WG6X_+8(gM%ffS8FtKaS_v(FbV{kxoXMd&&mxDdYJl`}5u+4ISv#i|3vq zE#z_XLFqsqXwTC60hw)k4acu&Ba;RUwhq?sRecNGF zUR-+43pQTX!H#R-(o5X3AG!46WOM=DB5}KOsIU#8Ti6EIp{5tQJq)Rg98M|)e706g z($L0~Aa{t0#O}2Yvz>plJSgX)%yeL8cuaOcK#23JbZT7Of%!9@j*Xo*EhhT0iQ$DtuaM3i zUC6SLX-Y}v3vwC_ zx=%^Et$&eqz`L{V-dXqWth<7a;8QF3+*y}JWG?DDB6HBcvu?*j4(xXiIqSvqJL~o~ z=#59y27bN=-d}}uyh!_q^iI4VsQ(@LKzj}gpMNtNSa;SjwPzQo1Ouz>^dHoy|N4dp zBOd>_#NK}PPg?N+zkdDv{EBD%ADsH*@c9nb#HIaDr~W|pIDBJ{Z|oqeS>*7}`Y$w~ zzX(f#29RTmfk&S&@bVP(A0-dy>_V=sXYJYx5wuPND^>qGX!|*&d0UbzX_d;PwUZ@*}nU>}fszSc>!Odf#m6EzYx$?Zba$UEN_?H7Mrc~tW? zH}STp?|ig`*LVK5%M8dSv3U1=6H zFOkOEmVvxpN-gnzQfkQpEpf>`gqC>ClUlOe7@Y%Yn!mXe5eo=YO@ z3i|!7LK4xk3XMyT!lH4Fr$ZMw3jsEMN$5hI5`gq&krvOL`hW+X!&WR>H^R$R;OYMZ zIPkd6-VX;v^H(lH1fNg90oJcyx-8lYUlxm=${2+ zJHdJ-K22`DMP@4eZw&hszJVtH))Q@av!IR|ps**j{zS$B{wH!7Vc2TNp75PP(SFHU zXU87TXXO;_qlVY**yF9dW^hS1I1jaBC$;`g_(-ReoJ%NL7n<}7w;-Vxr*R595Haa0 z^ip@|2WtrBTl{$gO6*QO@t#cRiDttg;qATTw$08hySa;9wziy1#4KWk9s8smd!lBK zQ=OCTw6j`ok|Z0pQ%5^?)~`->+Wqa=xvl5P6dSgIdl)j`x$nDiY;F(s7CR8_JzDJl z%D1iM4&W`fL#~flpA#`=fNFCybsuPa{B0Gea(_;?xm-)=?Esf;G`A*$vYJ?JKn&{%m-2$=!7d=uUz)$g!M`6@vVOf>*KVaH@soT9{(4& zCy`TOeG)sV^?PA`GN-@Tv6FuIgJW|&3hU$8+`6ij^*QL%;H2IPAahpq7NsDyDtcj2 zN707^UZT5OXjS-yhGl32xF5*j*IVJ8Y5T#ANBXwNI#c(kD7Djob3ZbG`;nZ!9_u@S zzr?y-ME15`18oLsDEx8jV@u3lm)90Wg`_>ZO8M2t;13uNj+YH{z zaCu%7Z3f4`?#yiXD7C*xp2EK%+6>Vv(d27rGhj!YI(ly9?@eg^mCQwHUKiAPn<23$ ze97AkPW!ks{4Iq&9$o?ON4PA{h&DrFkN=&w8A5XBI6HPy>%Ykag*}z`$%=N;RbIwI zFHUn6HZQfcWEt0+qkcr2!Fiz6%4CZnwZwbRQ%>Cb`BZ3Qw$>oSNNH_4@Lbhg+=VwJ#EL&)SVU z=<7k}5a9hpFK1NnM+KbsaxqTF?D!?RYb1f=1K!WD9B_$WqC48kX9a<;ZxFitN$Ijk z=o0vP+2_JXr!Hu=w1-I6xgD6acp7D zqIOAqu1kqOSF0Dgl+U;*HQPi%r*6sQR{dabJUW<6`f^PI=^nHl z7rC^2L5fd>!# zy?`I!_~hw!e7T%b9`5^b`K3G@KhP0h`%2K^_&6UObQHe!rGOs>9DBZ<4o&k*g732xf?ayBg$9b%LQHfeh(x0xk zzW3q-5ByK3hO+^iYil<*f7y0!c!Twt!^z>a^*lIF(I2p#{EBaFy5q~5J5GN6FV0hb zeewKx@_LbN2fiZfAEe`?<#GeQ<&&&`=q7J|`|ZuQ5AbdH7Ff1LMUMQpwIJBfKRs&L zV!6xR|2NL0wFP;GHE5mM=5=|Vu_U>|dW*i++;sHlF}iOv`Djba1Dji>(|wp{K)vCX z@;C34S^uIhHsAB|%g4yY%_R6C>&VU4{^TN`o4`9Kz^5Ddi1mipnE>;hO2uH;(_!5a zaku{Z;-NzhtB=SSaX$BEz5=Ng^;1^_w*|DYgLb4O=j4G8o+P&aoK`Usb`G`O?|0YcSx`*+lqLFOt6Qu3~ z>tSj=@*$~x>}$&QF-p5^W=<{s^z-JwHnW!Bs5=4)qN(v3PA6W_!AYf@maqzT@MZG& ztJddx<2#dmuaYNU`0AL;X12fi*M$q2d-K%I)}@#WAmW@_%&fG@A_?%43L|X)Bc$kp z^_2CU&q&eE=d4AalUNe-skN9sx*1#2zDY;4G;PMhfP|J~6rHo&{~a?Q|3GcWy=2;F z){EA!KPJ<59ky=#6#HZae~hi@FK;F%t@-$zZ}etsKeEl5fx!WkDtb$P@*c(%))KXX>ek(UU97Ss_}1WC?I8SzRaoVSl*2z}PkUkCjSHkd8BQjXW8?$!1MN+t z=s221Yw0aEnC)a=>!#>3benb0=+5bW)jR8b^y@I%8Dm&!_yFHFRXH7Wy6D{Bd5rUP z=VzQRbqMLu*x^D)(lM{&?v5XK{K>_`#psgZQt9%E%lEE>UAMX(>!k0rxKnv&=gvzy zS9Lz_7UH(m?R)na_hM{0J=o*>E)iX>be+<5b=R-DHg_xPcBXrD_s!j}_2}ATc8|q9 zj`!@{v!v(7p3n7i>y^{1uGi*XPxLy_>$P5=dNNOU&%T~Po{^r3o)*ta&rP2DJzw&C z%k!pZb8lVmgx;%rZ|(g|?_c`F^qJ9TNuT;Y>-+5Q^Io48FAuM9uSTzjy$*Vv_4>)H zrLSw>K7E7xzT^Fz_X+Rw-aqy0-0x8zSD!GSYM;k_F7z+NiLkc@bR95gK>CfT;oV0+s|k9dIPzWWd>g3jx;xZUk5Zdj|#th6auf z93MC%aA9CxU`61{z@G#EGte-wd|=bS`vyKTaL>TQ1792X?!Zq6ULB+javjubkRNuE z898X&p!I{c4%!u@5Aq1=8#E{=JSaM7V$jT>%%J5#)j?~5HU&K%^i0svpf`fv3%U^W zO>kLoL-4)94+lRL{9N!W!EXhh556>b%;52ZQwJ{?ymWBM;JU%<25%mGWk~9f1w)n& zDH(ET$cZ6mhI}&Q%8;Lj{1u`LnG!NLBs-)yq%P#1kcUEchP)N>VaQh@KZM*Gstt7+ z+GA+{p@WC68@gra6UGk4Zblzturb0IW1M81Y0NM#GafX)VtmW^q46u@_r{y1P}68r zlIc;?9@Amd3DarQ$EM4s>!!a#b)jydeL@3p%+}b@l+g6h$3pjoeiHgKz8;+)RuZ-* z?6I(S!bE3<_MRF;Sn(rQzLRC-iY`# z;zp!CvVUY{1N|7nEhh>Q_Gj_flsZ{&NUd`C?kbfmf8xf8Cnx?oDQZ&Vq>oYtro23P z=H&BJ22PnirD2M7YU$M5)9#u+bo!0dd8sF6&>7=q6wO#YozZb-r;%8^JC{vo1Z>^%lz*aJZSN-tg-yO@UDd)rjJYCk^b+D)Qnvje`b!zeEJ`M zM>5~f{4%q7k#SMY;=YTk7hlO5m~|%m-6cDhCM|to={rlWw_sgG=zcl~u{42}M%a$x_SoYAeUzR&9@3%ZDl9rw^hI${aa8f#;*G@zOWaC&mh>wLE{Q0KDVbO@vm~=* zc}aE2nv#trkCp5#d7lCveBm0T_PrQ~+0p|ne>cj=(g@Y1oR6G~^4T1sG9HcNMJzlo2?5DDSm(%i2 z<(}pK<)-qH<%#91%I`0KwEXGvBjqQ{&z4^(zgB*uyrsgqqH9IJilB;!is*{uinNM_ z6}c5<6^#|^D;}Ta&zSqmHR7?RK8ky zs`6~*$CY1IUaS1M@{dYum7%J0RgWs~s-UWfs_3fZsBgRn=8ZRrgeFs@h() zx9UjMYgK2eKCQZ1J*PUOdRcW@b#3*!>MhmVt9Mr)s6JZ#M)jHM^VMHeU#JHYuQulVYuGYR)4DgT>a(xU+bG2oEy3|_%sY|h-!#yNNJeeu&AM+p}JvB z!^Vau8=h}C-f+6%lZGn|zck!#bZYF{*sn3DF`_ZLabn|LjTw#kjWvz;Ha^_Av+-c# z%Z+a}o^QO=_*3JbO}ZxcCa9QXudhXLsVIFcl_KZ5N&C?F!EVyKxWR{41K+@o?6=>VEXYq6`{EID!A@^&iwGXi` z(rL_5?#KC(&uHr~D)20JEI0&OMx25Zr)|fsIqz$!pf*8!30#)JKiNd;$w+umUc?=v zXhxh8;Dg!e6s#Ig(sqN&F|2Jnh&i4IwB^`OYdgHCY^_FHOsCj8m*Q~_&{zf${M z1RTPY02%i1iP{!j@o7^?k6dWm-Jur!86?WKPt| zpuJ-4Nvw=JialJO)n;NQu?+L_&tPS399NZ)rbX!9oRdgFEDpttzNM9 z6lTDx@e9S=)^6yz6#LoLf#M5z&jT9ViTg-x4X!)2Ea>EE*g+l2a2W7+;%+%sA$^Z> z2+-;=_plSzG#}S$Sa%>SZ#m>mMJ<>N+j|G2CF!Vfy>TB8noB@!BEAM$jyE==ew1L< zYBPDAj>Nfyhmd|oSR>Ebeyp?{g74`Ikf&U&2y)B;e={V%U%LX%0a#ys2-wfV_BTS- z7~ohyI}=zA#kTaRJnz7}H`M^WPYK(9cwA^Yqtbuwyr8cKH*>}W0Q z>?o}E7_8!Dw8D8JRRKr}-sJ5DucHg0pM9`ePVX?_J3#9mSkeyI%Tj149`v>etL3fw zdep-_q+WmqdHw$oJmO$I2e75_Mrd&gX7JVsD%{R_t?G^Tawgv3+8>4bwI4Ob1OEe4 z{PFhw2%gVEJAY6-V3pu&&(jd`Vlz*h1CYPeQ^-st2 z-`;b+4u7pTT7P#;LrUI`ofcAXN%)`Edc9qmpYVC--&&OQKmCnD4s3r_(EXFh#ZKrs zOXP#+iT@pjhPj@2P28__X+4i$OY3cv{`W#J{2g8|d9Jxur4CZKAK<*O3duk4(*NVx@kObGujX|&Z{^O=oXAD&K3Nl8R$ zc4}>rJe(KOU>}VM$(z|W%@7rHvN+R&In_y19QU%%y+YO?0)t*JAkw8-ezan z2kc{=ziudIPE&LXb@{qd-AdhA-6y)sx?6gpch`6ICf;Mbo4kMX{@wepeiMCMe7gI1 z`}q0<_!tq@5BC}A6X#RpQ{(fb&n};R{q=nRkp8~?1Nw*bpXW<`4Zd!^U3|Oy_V)Gm z4fY-9yTCWo|D8Wxx3-E=e+_x?LBBK{ZP;vBc_n(K`^k3lEb{Ol+Dql(6!P#X@}SxC z(1<+T%N}4)u!A^({2g`-bXsO-3tCDIAP z(oTTT7x=q|zn`_6Fuem5boihAM-L%|zqWt!89hcn#*?2VoIww468=i?w*`NgEzrS} z(2d65RF1>{*UggHx*57zx-1>?c;`PI*Xe)#(~bWxkPSd!JxkG7PlZ>t1?@@^dejHe zbIeCCz5w;xg7&!)?OqewzlYFb-;LI0o3@C|f=_ZUdeTQ|A^OZ|+H};VRJ5=UV^z`p zS}a+jt)@lV17sfNaaO=aW5{f*tlp|+kVU9>aU=&GOFY_*d1y@@MGw9U-%9S(#-Z&w zjF$a5^zd(>{eKNL_C@rbpP-%p2tD`5=)phNenemVz4n9l2l}STglH{VD=}c>8|>gr zI>N8(Ogu;zj4JkpXXc|N!~ZrDBMHSQV;I&)N0D(PjwFyoGEJLAQne=t#reoq?McG4 zJ;YhtM_gzzEzzDP9kgeNtG1tX(vFbs=waNnL!_&Al=RdN5_jz-;;9`Yy=W;d(_SXM zwO5Ik_6kle{TK1m-Xy--3F57tA_KJ7NiNAF`Py5=UwfMbpznK!1R`=6q@5-MwRcI7 zb{5|Ldt|V74!-02WC$(SULZX%E*hzwCnoJP5`mavg7y^|p5UyH3VyzmR0@cQRS~H<=DU=RYJ3 zUfE3THn|J)-=AV``6t9mPZ7Pgn>cA-kYVtaGRS@lM4C~{g{42 zDoGXnie99bNHx7oza}f_6?&D_&~HdBy+-QjxAZ$wProM(q>=tWf22Rr>-1+FhWac0 zjoeFrr#H!ZdV_2r_o2mlfNVre^dP-OHj^zZh~6gKXft_)woof=MdSS_eC_RwvS7x@ z<4i|(Fg-JnCzum?lI$c;F=y=0ybJAfN9Mv@Str(+xslz>oq4b>fSqR{Y$&-zE|agx6=q~6@;kZ7K4TZyhwKA*dw-CBlRw!<>|^p5`44*+{X-}V zW5?MEa+@@>PuQoVg;?2Z>?HaajS@;JqdI10;p}zx28+N6&X?KO>`iux>ZyVKi@imi zs59-r&e4w4g}uk#XHjez8%|wmCw7rtqMfN5`<#7&p6DI+CHsoHvv?NA5?CS|$CB81 z>Os4()9ehJ&lb?Gv>UUqh3pEuO1rak_6^Hmnc6KfRl7k_w3}p#_7|Cf);616qb7LL zxmq3#g?GISp8N*HD48@&OJ}dr2pX+bA0@3#2Xt6Wr@=q?fcLOE-e2=JPTmuY`X5sF z6#LP?qwX1ct54Ov4(ltrsC&KEn?$L5tciwHl8%!$o|LHj4w@@Dq3&Ib{dUhZ@vfQzSC)IuDHre3wyVA3WrFfpp;pt_g{mVjY=z}QA zsF@HS_@Ir;f=B6twlW{BVm_YcBQ+8K=ZU+?crPDqXeRtI{$@1VQXbjl173*qKtZtx z915Wc3{Zndw!kO`y%4;W56yFEK7NZ)4w=}8lT$Ck8$$(k8{b?3&&SvBe=p5cyt^3u zbI^hpAi@bjF8FT*+T0ZU5^?AFrY#pk0CjxR5yvsF?XhOy9iGQ*A(f8b~YX8n)Jik*Xp`r}9O>qm5AOoKc&drbm$i+XdCpu{znV;gU4U4neT z#ll~^e+(WHYK5QX4}U)pmO2RI3w#V}2&5Y-YCtGT)r@)&0gpcl-t%x+&`1$U#G@}y zMBkkx`l)2ph)J+8K0Yv2cm}D^MVfXOqOe);Hs-*S=nBugJ6egJSS{d*2(1ss7k<-j zXuoTZz#F)!-9q2K0baoYL}Wi`*U_I*tR!Tb6MT^-_$N=HC)1(7dIdX8JfZo*hj2q% z)EDC+d$rdQQ^jcR!iVUHDB}nsnK#fL9Y(K=v(q%R0N8zFEIf||+N)o#2nSVe1}uv@vg?@9%{_q_Z!9NH@{JvQGkPIY);5*z0-{Dy@ z7^93KWTc=@O$AQUW9K{iBYLbqzX2_2cGTI+9#x%@EEU_)M2z2UI}R;E6FOdnyevr!{=Fx z@z{GXa<`uQB-&$bUJ3U}$V22|vXyMpwvtDrH-boU2YEvJA7mHVP4-}1cQ4sTp221W z&mxLENS?F#9@6u$`5oj{a-5tXuaT4Fb@B#zlbj;|B5#qm$vfmUIU{`z>1~h?$$9b- z`51>Deo8(g7s%)23-TrTid?jLAml3f2IJ1(lJCg(r7M)G!(7ALT#aa_;p$lm`&7hfd5nYUx1KD&5T}pFkF4k1#(`9ryEue+8i2L-k zRNF_(XgRH*m9&ah!*8y^N`pFDPa9|>ZNjdBtLSRFhTcur(slG6dM{m1H_-d&{qzC4 zk#3?7(#>=WeTY6xx6*C&5&9^7jBclo(;f5)`Xt>+pQ5|yZn}p)P507$^clLJK8twp zAbpNLPY=<<^ay={9;G-+n!bb)hF9pT^f*01U!y1K>+}u!COw5whqn;3ze7*cGxS}0 zmcB>dr|0Mg^h0`Hdf$lWKcg4u=kyDVP6#iY`{3LIr{BtO`B~#Hbyuo#s_fd88a{^c+~B@Y3WO|uB;pD&V6Rq3;uF%*2m^4GasC! z=Zn!1KjzN@SRfn72C*Rc>x1Dt!(-;2vhaktr^~%u;l;8MY$O}SMzb+&EQ@9_EEc|N ze4F>mCa`2SkxgPLY%-g|rm|^lI!k3USQ@*F&1AFKY&M6@W%IaqD?HnD?$NSEY%$AX z*=z}0%5qpP%VYU$8C%W@SRpH7#jJ#tvNBfADp)0}V%2N~t6{aQj@7dU*2tRJO16rv zW@|9gvlind_h6)CJ=?(U!>G>#Y$Mym9%P%@7WNQ(m~Ca-*dy#w_88la5uqIzPk9on zRi0wI*lxCmJ{T8g@R&fv z0Py{}-!D9R?z!_Ag8TjFFw*oP;t#|YJdO}C2lxEp_47zT#0~K5;niPb-?H!6_v{DP zrHczPGxKsRc^TR1`oi?AQj0DpyTD>tmRXotWHIDs=M@)a8VWPh^Ybzc(F+SQOER6J zb1mrw`FT#!`HS=OGM9FYF38SXY)J>AQ*^wzy2Peu7o->GF3QO)GsLA^fQ?H*zNN?z zFK8Iz1+Zg$Mt+ecJv}q8$SGb@W$}57o#G{Bm&7(Qu8HaSxw#gJtQ)t`Qs6SK?WrM2 zik75AOOgZ(Nk!Q?8JRBQ+b~@xIM6X93vms}LK@d(hex_(h-R21pHGs{CpkPfqzI1s zl&t)MyiO@u#ZW;(ac+*KxX39*O5u`Hm}4o-k|d_8i=;Hwk+0 zM#X|raXUuED!aw1yo*J77t8W4m1IlX$aW~Tmv^Zw@A9^XhH_cn72>sy750*G#@)iq zocvN32cc=wc$&RLpOc@rxX`IMFWVRz7Ada@VI2e^ATR77Xo))^6Tj1C<>xOIsA0wg zeNld1eqkq@SR&QIl9ykUnUk4q(I;9^#r4y(c+-Kd=EN{%okN0^d7 zOz{a*a)l{=s(j61ieH%G6Q=ltDL!F}Pne=_R&>pZo>|c|EBVceo>|FnR&>pZu36DF zE4pSSzgf`_SM);SoDoXS2t_|a z(T`B{Bb1yGO3nx+XN2M#spO1Q{2~>KT72* zO64m`@rzRYq7=U<#V<8 z_|XbKTJeomdW}~6qZR)cg&(8vW7P8)+jFIt7{xb6@r_maj8*T)D*CbN{Wx`vSMtRx z`4SYp1gYn6qkJB&>T|eJ(hXPjI@~Db3OCApgc~KFaHFIjZj^F{8ztRvqm8bVH{2-Y z4L3?Xha07y!%c!OcG$FCg?vbha>P~0fvdUS1O9B~f0+$xD*FjF*=Q@{%ks6Xj)+yrjs>6nU8{FVo~@y1b;y%M5u* zlb5^XWv0B$5|>UQKQ=AM+}Shh441mF5UxW9yDQcq!%~=S$uG;cNL;&4UDC@7vU75> z(~GjxU2XTAu3@rZq@NIC$tiM6%q+-7Ft9MEP(Ve~XpEL7Y*Hp}QYLN+l~o4|qFwSV z%km403i6j_Wr}w~Rd*I@lGP*BB&$cLNmdV2M5rrwBXDhFWQquLkzs;>ImZ`eiJK1G z9Ts;k(V}S8y-caLr}8TC-)T-(&;!h;*me^vn!s$s$rN@ea&n zU?bq}@pmxm9N&(COOnHTNe)6L-T7Wp`}Zcb$?THiKqJM0M#>#DTvFOz>YUP^UaA9) zR0kTVchKmZ+Wx(nZMd!~*pmv80id{dRneb(5Zm^KyF=`!9tyu!-pgQ6Qcw|}d?4dO zaqrd^6)HsO?kdDKYVHp4p?sJqs*AYya10m)#N8oal;|=%6!)$wbW{)8=(ws_Qao@= zvb)J5?d}j+3V?@WcqwVg7*jrVj4;*Hw(NA^A*Y1OxKqC47*Gk7W!h?mZkMQ1!Hps*4Os1>Akw9oEumTDwXz&EYi}u}X3>3KjRRDo#}o+H~7d z1grwwrCnsJUUMjScZWDuJau=7T?L@?%=T6+16lFNtu2}rX_r}TLU)Yj(S(RZT;pXJ zAQEVOB(y^k?>Kne0%d3Okq?r>^iVcg+%7!Vnk#T%n3ysB$W$gJd*^$)3&}rs#($=QK>wGppxjMaQh1Q?tSkSMYEJ z54XXUa~!Um*Kp;uhAX+l<2z)Qr6YJnk1RB3N|2mFr8{hjmlEJAa%75^?j){)qbb1_ zMkzPftlUv^xRN+rX(m#oBbB6)O0q~L*)S#9FqQLYX5eBvtdiK~zUSCLO#MLtdOF`_(iRs3TV{}{zzhIx3d_{S*zF^azo`|w=x zk5T+%6n~q&%kU3)ihqpaFYV0~FYOIi#b1VrNUQwED*myGzYH7kT;)Gj@sCyfV-^2c zmH$|k|5(Le+OsKM+B2>y|I(h3R{59qjI`n}?HOs6e;KYKt@um(Mq2Tgc8#>+FYOv> zm46xLn&M@ci>u1Nv~#3Y{-vEGt@1DJ9BGw*Y3E3*{7XAWTIFBbInpZs(#}os(#~;J z`ImN$w90RS;+LTEEA1T5MSeq#wsvHg;A=Dw6Ztlpr5)p{>Wj2Pq-A|DOFJ`~rJdm_ z_~0t|7|pW$a8>nG%7OZ%o)1&_!&F+@72a3%SK6P^9Hsb0DLN5CzmU^*Rq_oJdI4O$ zAEojWrSc=~3-Tzqv?rw1d(u9Uw&g>$ld@hxZiOc!X=At?RtzJt8Y!l{5S7L6p5i_Qbiy78cm^FP8^Q=!`-n0&Lt-CO5 z%I8~mW9IcK%%Rp}26eBPL%k0(sC)+YdCbLPbuH#p+D-O41aZh05$Z{EY| z*>tf+A_waue#AsV`3jkPwvV@=B+SP$BO6`=-gC2_`@ln1aLr8m}QcwueHPOK|2 zW6dj8k6>l%a57FiPR3*9$62iVn2Z%D)3AE>JXU+m)IPyX`U;!}Q;T(|pToikR?Sez w*G>HIr_oXjC~?EfF0$lcbqu}%z%wQ+u`|8}&?w)%0ju?3nb^{jRg4g$b|GS^xnX|LA)938Wnb}#Tl~Ro% zex(NI<_((De%e<`nZ8QtUj~mFId;yaJ2xrS_lQyteKvUP_?%mgJsGc5gQ-e|{XTMR z_w?&KPPhiQTk&Q3*m2q8cC}ewpX)VT=gwL(Sj%)=slwEqUKiH~YGZ(+%@{*PU}P^lIN zm8!qCuxQqd4e#7FN2x#w@!7p_#}~efX<7c-3?M8@W_E7*tW1${bQxs&I9Y3bR+9!)IM7 z<$Ej`!2d^rS-(~Ji|e?tPur(V%LmSQW^apgqWb=40c_Zj<~eb}cDWB5%H zW*8B;&QJxT^F6OKl9YPI-Z%Bk z8yHDW$m${Z37{#kq1*T;g9dOu^N#s|Cq6RkRhTI?+pyIt$~y%)iXb<`wg* z`L}t)ylwttJ~Q8%?^GjVoU3}Mj)W11FItC5`ZTH4dN4j%GlM&kwzK(kbTXto*z9iq;Lo}*?$XRC$KB6SV)Z)z3v zR&_gcHK$f<)Qix6sdu68t52Yxs_oF7YBzMR`Vsn*wz0J-FuDQtOzTFPnCPauDKu8M zhPKh2pj~uVXgVp=x`*xo?WIqLo}v3eb2M?&XOdE_hw0(ak$NO_w4Mt+S1*7r)Qh0! z>E+N%^p()7H8IyW>KmcA=v$z-X=1L|=)0hQ*AGG;)(=A;)sI1+&`&_0(nq1k^f6`3 zjfQ8=(IwBTB!Q%QvQ!Q(V3Y^(WckspbtRMTvv%= zKa$!r-1B_5TK-NTjAD7Fh?pd)be>F8sVdXMov!lnC12&K(@*Ko82p(_91BTFvByZg&94-mvbo%yxD|2EC4L2J4%{Nit?u|9%GXYmNn16E9AAuFy;|L({;sOjF&)s8 z^&I_>-lu;t+MI7Lqwc-yi}c0%ru%O7-RJwK?czIDE@eLH*y{HOcp`d{(C@BiGt z#lI)4W!S8+qOiY(y%+YMuuWm*VO3#A17U&2fx&?>folV|2ksA)1$Kppg*OgQ2=5r4 z8D1Rze0W9p!SECH>eY*`H@@ELdJolmrry8neOT{{dRy!5t@l%eiD(otIpW%g*CHE4 z=0%Q@Q()1Hh8_E zzhUEs2@QueytUzd4WDTEa>Mr;1se5kl-FofqbZH%G`g$NBaNPG^hQ+usOC}0QQe|? zMGcI~k9s@m)2Oc-k83==@e7TQHwkajw8_;?ZfWxOCOaMuoe_Iu?3&nr#BPn<)4X-_-p%uxk7_=p`P}9YH2)>8Yh16mYvXQWfxeTJ3K2W2+O1;fa$I=Omt&cz@!a#Gev_t?RdL z-g;{5TU!6U_2aEyZ2ea2gRM^_g(o#jDoWbWrcaxT+q|D#FF7Z9X!45WtCJr~eldAV z+o-m=ZEtS-W!tj06>Sf;8`18`cHg$!*Y4-`Y3v~1k(r)d$&F%I~x4kKCQP}uzLB~%bz@rRv>s_WY58fB)8?ctN;^MoMcUWt4b!`&_fDUlep~u| z>6ICsGSV{oW(>}FFypt(jLd$SXJ%fJd2{C5nZIYHWcA28JF6gTVb*O~rCHzh$mnr; zkD?wA_1M_6Sa)7fkA04w zo^txVef@pM^nIvr`5C93G3|`I&iJ8UQopYKmh^kQ-~0VO?e|Up#QvT7XZG*ge{lcX z`rqIGiT?lU|4#o;2ecW`bwJMnIRi!vIBUR+0rLmEF<|Y0jRUr4M`g!n&&n>!{(JU6 zvv*{FpA(+bG$%1ZtPe%Scs{g3vqwXEGXLR!DE~7I?pD}vS=#ir*jh-?3+|f%$ zUo7W6qc@D+J-TZ2FQbEF>WyhLX26(5V;&r{eQcw#XN+Av_Tab%<2sJJb=t;P=+ zKY4t~_?yPB9{=3k~0G@Q_5!rTdWPN z>8nYH&l+{slV@$6+;DRC%^{lDioSk#_^=H3w_J(O@TKcpLroDYm z+Br9z^Yiqn)7Q?(n{oS$pJxu5`R2?cvxd)lY}T{0zMFM)cKYm++5eb*sGx1ZxPsLM zWpi51xoXZEbGFS5pF3ynopV2!yMJD{dDG@SGw;Xw7oU5^x#ypI|GA%@``rSypuvK; z1??8}T5#5aYZu(R;ME1|7i=$#Dr{TWvv5k`tilC_OA0S4yrS@-!Y2w}D15E(y~1^c z8ww9CY`idTVba1r3#Tt!vGA#dA1&Oq$hWA+qUnpSTlC_hvZDG$9g1>`mKQx%^i|Oh ziyJQPvUu9!YZt$;c*}YEyo~ckoOi`}&z<-0;wHr_i$6xnYY?f{(I4^U4jMjAJvy&o zX0dv-a7M`@`e7F<|G?~VNow|pk;9YJ@ciMqNh)*9$Pr13-aGwo+qa+(NpG(nd?fF6 zMi#Z~!%tx!IE8&!EqmL8cKi?d&KO8Ss8=0w=uX2~vlcE^4+~vAdr{FswQ5fBj9KdH z!ufM&sFjRF3e|FUbpL_i=qf|E7P}K|>eX7~?a3Gt6oU_qs3i88$m*xG>KFWlJh8Q> z1NL@=L|>aeicOtTG1zMpWt{xf)HZwHxo5{(+O#ls1VfBXTY17sb%4}zS06VvpF?rf zDiB;RZYNgT8OARS2%)iQVw##}CfdZ9Skv6ZnL%c-#}%m~td=I;B$!qv(X=*6rj1E9 zLyR5I7$+o2Yfb75@inTadR0>B#4HkP@+}ltH=BP=1kHJT*=2fQEdeX3>XF zkP(J?od;3^N@NHdb*h`HQDi{2Ygqah;ve*_fhILEWh4c&3#kNeO}FdroYyE zbY*pX3_6EKj6pY=ugurx8?(ujnayU4*=n}gQ70ptP^yjE#~3gj*jP7)MoYSFs%Csz>ObGD5fUu8%vL z$^rNi&4|IK*ZQa#oo|45?j~bz+mv-_+o5NiFE&pSb9>kvv`<7!=+^Xyui*L(;uTPR z887r!EA$L~slJu=9dCM~3HaD-^Z9-Cd<}d}eI0#SzFz*8{rZjJ^jib?{CL=CD%XGf3CmSzsmoB{|Wyy{+Ipl_&;U*vem!SU&RQf zUD$KP{#|1KMcARlmBju~QnRGEq=cj-iG6R0eM!X+3&ZPL@t zPAXTd&+7U(div3SX7)pzjw-#P1JKtCIXFl#0aD zKZMuk5dX#gm6+FB_;2v9Ql~up%l~-Gf3DZgKi#?VPxTk#U$MW$e?AQ&+l*vvJHhPB@F8ml$P)_@b(;NKJBt+U#RtHnZ8n=n>$sh`a$hi-=jI&^-fGt%b*|tYcP+Zyn~_(y z(sQ~E3A~E8&uaQ#cT(dYKnHaJi?F53AGEW60}ARcG~t>ZJaO z=Hy@K3SXoj@eEp`PCopL$1~q25NH_%1T^JvC5$h(2d6 zJ%W$bM{2NIhlXc^8mj(-p65$7R(*{oWQ&@twyCqwolRBWp?TaX@65B+c620r(X8x2 zQ*s#X%8zJU4xm%{3GK>3G%JVDtsGIy)$iz0R%oMEYQMTv`_wMp%QvXeYNML2%F)AA zn!RSHsWji4eP)+=&%BS6T4z2rYt6ez?oZ4I=40Jl$LapMvF?fPvA6D{PuH0`OZU~S zbt~OTr}7SIh~_d%$Dp%ppemucuM(O2rZdY+z-R&p`g z$P&Gj-sT1RLVXqb$ZPdA`fvI=eG^*A8_-6giPXht*^%eZ3x_@6p-xS#!`>4%GMQCc02J(~ESpF4DRBex0Wu(1Y}YdNA6fA^IO^Ngt+f z`-mQ{AJZfB<2qme6K&}edX#<=ed$wrj2^3>(c|>9`e{90Kc^??=k-MWf}W&b)Mx3J z&_n%8Pth;ysrnUtwtiJl)Bo1z=-2dg{W|ZeH|ROPsb}f8(BUpb_P?zQ^gC#--bF9_ zD4No@^n3avy;iT&>-A^)OI@lr(ihpJH|wqXTm7Bhp?A^;-)(y*dcQuPtMoyANFUM1 z%s`W?&R56K68?&|?5Mh!m+NA4o+&hoOp#e=t}s`carEVHG>@4(%-_wu`hER@{!o9c zKhdA+|LD)@*?*AJtzVe_m0e%)jUf{RQ}5S78?B z8I^CIQduX(+uZvPI(=)d$6SQ@S8x{Y-cUXMfHLgMRC|!GyTCu{nD7z0OYi{r-r8e+ z3x3sEiaHoPVs22atFesdEOP5$7}vv91E2Nx{Zrr-;`A5cds_8m_8}kHmCrL-=0Qv^ zE?4c$$#AvGHD{~lW{ye_)AFCFA%7VUsRUoD>QEQNqp4YgUv+VwI?Z@-x=J#C0lsTg zl7%Ty199K&q$t3CsOn<=fob_{`2U6+)m7i8nwWXGuZyLGwKVvU>-8Qots)lV}E`+|xV6W%OTXZ&UK zQ!RX%s&!qMOciVTsg{30Gu6>}aX>Zky#%&WCI#R|)kM!%U339yvU1{D>a~>tt?Qr* zJ^w4Qt0W9hoA(E3i&xWzk8m%9``|6URV}6c*>-N*nKf-2vhu~Yg9WO-h3{hS+4}sj z$}m5xd>`*iW=^ea$Tt_jmtdBw=Ke_4(qA9mwzD-V-Q2CZx$@zJ$b(;0bKg$x|C2Vi z8~^{!^=-5}8}>hWwzV7Aw{U|D^Yy}w4et@I4+9&o>nV>l;7#K4Ja|}*v0+F$ZTwtW zDzeh%!wag%AMzk1Q{41PI(b&of$XqtS=!`y{2NT&d9yBpCrkqMvV=1Av@hQz6=Ryy zzKggQ!s+l9Q&bDmBY>9bVqUv;OR5BBo4;eDJ0 z=JQVLg1H2_+yQeD=3Eu;8-Tz6zr)?d$jsswm8R*(>#OLwSJD5kqW|xj%+&X#Zg z!EVM7A^+#I24g;Xcq8e%k>_So&u20gaqpAg{|1kvNA09P1h0da7&ZN-IuY+wGZ%R` zm-0PZwFABM-x*WwCJt{SI|r#u^BHBEi<}!qKY@8>3+icCeKX^lql`v=SA+Ejsu%0ceasd3eS&&&9d+_m+Tv(6O5T;PlTIt6EpFsl(o1{lV6LIQ<{?j$ zY4^S0+p6CB2yNmBbt|5F6;GYcRK3Zw9NJV4@1uVD9o5S>TV;}_CgvXEd6|UYi9S#a z{fW-Bxq-gMsXXUQ_(g&v5TJ zc~C@sD#ARB`6;HwDwVCfVm=EV25IDF7U8a;PvewH7n}FW5CTV#a(1}dl$A%Um{Ol0 zmp(>6@H8ue{EQ@@QLA*AYEORt#t3;fbME2Phf2}Hc2osAQrBm!x{jLusU7c8*E*{s zjQSeuCiDlI>SolSXtiF)sQ)nTKBhirG|0#i*}#|($reuyOkl*As9Q6NWxat;(rt9I znxoqi&o6X4#=LV?scx@3s9W@DGLFw+%$}*TbVtUSomCIstQG1wW6EyGl%9+$Q^|u~ zYNJkPoIg)}%{bGJOnc~_lt>@On=)EOC(>8#*L~IN`V3`P%06*ZnMJ1UUpf$`XC1I*$?LFtigx)ltTgBXmCP ze54-53XgA8i5|^3?E10PSleamzJ?K_f>dW-y=q27o zdvc$?f-$uniC@E5+>XSrXB>Vb<8eFYzJ)RPZSENSP8plu&Di^1#@+WZ&VGO~_Ct)X zA7*ULdLc&E|72ACBqQpl8BagUNcwq3(JwNB{uiU?R~R|}n^E)YjF{hKwEQ-sw3nnpD}v$4w=izrCL`zVGIlO!^t?;P z&wCj`?~^g~_l%-{U>yA;Bk7;?Vg0lIg%R%&#=OT)I)8MsnWvM@*PLv2rY-ud_ND`R zu8yXY>1?`~uBMwwG2NM6OEc*v!(^H)(*tdAFVT;kZu*)tn0xDQ2B7`Rv7>yGXJ>-! zJP$g*VWRWPHzU!vjW%P@q>V!(IKfOrk9U@tj9zf6IonJ#=a}hchOzp>*`~nELB}`G z%s0$^p&wXitcCzx0UEAoG;6U)yEyc1EqS9Qm}0ZUl$fRFeDsaW%!TM3FG8Pp30k~M z(c)c(cF$@St}<7n@4FVwq@C}(0WHH#=4SPVI*i8Q7KMHgJ>YF&7Ei{??Oj; zkGU5;#C_&|^o0+ibNq*S*gRq$ML+R4y=xzu!6(gA=4tbcc^1vc^JoF1R4cS?Pbxq9 z$YivV@1v!&Ga9^S?W#a}*)O3}dy%&Us{>ViG+EQoV)dh!<*XEZ4bA0;Xy5*YCi4}v za!;Uhdm9~NB$~RmXd16YZ+Jag!yC~2orCu5a`m2h9WBF^Xg$Nto8~Q68op!RRaaR( zm|D%cwnpd;4>F&YgpRNclJySsrMIE`n}H568=c|3=0h}uckzx`V?IKg_z`dBkI|id zB09A7vTpHn^M(1+Z1}Tz9rUL=O*z`qU1qo0gGR0rZQOperr(JOYXzj)Ttg>woEmPHPnT~sn-)~td>C4t;oGjNSjTr{I3kSBfw z^29PSui9C7p6hF#<7@rFHNJ%}T{J&6Ej>MQaJ6&)keM@z>kp}MW6m5JIMa#WnXXS~ zIz9!?ESX<8yP&@HBsDWTH6t~x{?M8S9nUj*1cpm00>dR{u5FHgI6(&Tom=^yTbWtD zyhU>ZBPGa&Bj+t$GyvFNqIKbOM@K6Hvs& z+4BpE3zp1Z5|}u*c*gk!_0Oug88}O<0+Z#j$jR0D9+=_e@eDVx8EzezQQ!nHtHzJW zSv611miPx)kU<3s6v$O%LG}Ic0@u3&xA5oIc;gm+M&#UT=l*%N@aNUI3CtId&HQu2 z=es`5cME^Mq+dKq&*vf98_dK zn5nMW+jTdnhhyK{b=N!7bssXb+~-{PsUd&e=hAw*?xhK(W~TLa-RHXY+3xi~_d3Tl zL)Rg9d9M8+$4qy_OLxOdcjK2HN{8z{-Hm6u8_#q%KIv}!(%pEahvF6T-wi+AjZbrnbb;kfZlb^T9u!%KC;Ne%fIik}-^svG|vA=CB0r+eMg^}lzhJY4^JyY70s z@y&MgCEI;2+j%Z4ln>e25#wnhGv=4fx-iP+Y?E0~FzZ4mXLH;Ta@-Jd+zT-QF=4Kde^X|C%}uIo>(8^c^T<$13AJlB1m>pstQpXa(8$qdCajKsSeS-4Ju#5Odv_=DPmmy8h(4G0b)S$#dQ3x$g5^_j#`SJlEYI*Z)DT z`$4YzL5{nuG$(#pX^#I{PKz6qlP)=oDgF$~ach2-6JM?!f3uvH$+eTNEGHdYJK<$H zEtYGigtMIVaV_x}l;hTrET=_d@05I&oBk}Pg=6owNGCtIcH^HbHHmAdMrGwndboDe z^*(R}eBLj1F9 zQ|nnDg|U8GCercMvJv91!&#rjo%LJXIlc?)_%9A61mPqMyQ)>f5XOdK88-~eI$=1R z4MW`7FvOh`hOkZ;;!wh{oP7hOr#TWDGc$tJh&#TsuzBO~OX||Q_3$rwWm|hQeAnGn&Y&i)Ew78=iQ7u z*T0^we?49Qdb8$#bKB;Y z_qM%`U0hH+zi74{<}s9*onqG^v&uUD;q*2AB-w7AGAiX1nqb6lUCEKE<$mL`#&>U1)=c0#1g z<+*gX`;?lKRbM*Lvx*kZoONMi4__1gEVoCJmE+2T99Isw<&@==Q)-qI8B7TT({1lL zZaa5-JXvlzXSqF}EcgA8mE+1M_x+GH(0MMy?b~If_Kc9exQ%RtbjK|xS)7rYBZHlc z)PX583QJr&rygPFUOVy0NX?Frp@8RUM`C59xlg#g*9^Beo8k6iGu*yPhTA8}$mrF; zj^(ZhR@EfA6VvK9%l7%86mR z6J5!b^qhe$Y<)_}8IwJ5Sl+l4`z7o-Y3W(#q^GAQSb33}lX=pcaTg*iPxh2T{fo{h zpLJi(w+Wf@Z9+Tu+XTPWrn5@Yhhe`!sJUi6e64G{*4D4W78J0$KUZikp{WaJ6faO6 z7cN-10L>B$Z_pe$Uo_a&`u18$9IVfFb#@r0!&@IC?;Ur~OQ$DfUfk|b+jMgRjphVd zeQm#CwQIq(PGa?$R_~!7i$yziJG!Eq(0yIbc`>@_#jK*6jpl3;`sU$iy9Y5N(FZMi z3eRlyje&0QErhQ2T?bv~y9Bz=w-S0nXqnI*LNE0dVZYlq8@j=~2i;`s`sUTlk88Eo z?1JtVdQ9l6LRau5uU6lft)$dh9c{k@wyTWocfj@SY)l(gi?(5&qRj!oD%3XH!Dbg5 zF+T%qfwg}RtO2)z8^G0IC14$_LSIu9tB&xQvQ+53@z5>0wS%>Vw~ zv#kNnn)iK_lV9RyYley5sN!N0RMVK9F)8N#wGCoK9K^$Z4@J;h9BX zp7JG41rq>saY;i#E?~YZsTW8GU0rm*Oad)IG|%!)0ngi-@jMz>TQfdW(dbaWLp$tO zZayR>wn|lj#AVDlKh!EwsaA2^QLUP`YT)4m9^T${-6S0I@P|D-GlA7^_Imi89)7D* z3CteaBjF1VzYd<*ioKVA%VYoQDfkyW_D{h-3LXG=dw6Cq6K+ai^>M=G3DmM`p4wE+ zQ|nI3pX;%o?ct|+_(>j~utG<|80?2TR|$jQ2e^D651;Abt(z3rO-GM?GJHb9wF$A9 zUcPaSeQW$qh?F$?fycimcPMu(kI(h+{c8Axsv7=O{wM6LaTDK*_|TH=5#JSbaQP(o zmJVOjHnFq$8ST1h(f6&A4I|XmuS2*r1ZTYSH>?ZgPU_JQA z!(01zT>IBrzU1V8%V%0X?%^Nu@b`N7)gFG8hriClU+Ljjc=%-=zL@X}-FVJ{pYHOL zJ^VNipYP#^)bR1t=axCnv++sfVN2VNuvW35fkNf=;{1K1+p&H)$7VEr=Hb_R`1j!7aNSt`WtV@>WB)|l!*Tam9C%5;Y(b8k%#x%&#PlU({(@9V?QBoRNPRmbL0B4r$P;%ehR*;$G(Gy zPxA0BJ^ZQskM_7}MELdGxEb=FdTx)nQ}E4CaF0B+M+@qCh;ROjhqrFXLwhto2==*r zg@^z46ufo6$#L(EL-P&TuXpTQuWSBM^LMy@O{qWdFFE$jpK1Ph!cEqF^M{(>>)}_Q zf?wsazs|#7>ETy+_+=ixxQ1_moM~R@#IyMv_~|Y`*~5?X@cABoh=K&Z@bIY~zB7C~kO<;Dd{YnKpoXt`FSlx%u#SGw2X6cWj%g{wuINm{P;@RxHPD));$_UC)}+iUuhwbH)Wg?R~Zd?yKM3 zM?yUB?(H@H#2kq^6mtMycE@b@J{vco#!W2w7CK^2l|P=e*GiWs&bHrT58H=fKcedQ zdo4db?!9Flv$>|+TYP~Zu3WHj3zcaLPaTfg=!9iy%x5*_7_-)6|6UExPDwTIbWhr2 z-l%aC^RkD3&ci?9;UBIkms;;c>TvVvj=PxqkSD`q*2LUO-2MuG1MaSlSsAk&)617| zy(ng0%uGx#Z{19dnGiF|bsyq~lJ6z>lZ(BV@5gnonDm&gm>ob;Ov{*POs{=Qt{cVF zi=qC;c=+fOeDwcI%*yD4nER}3gs-T>f6MhI>^ESp2OmYh6a5L4$D<#r&D(Yz zeQ)&Y=vCyUhmXDv`zxbYL@&ei^2OARcKBb2J^38pA$ofB7CcC($$wl@2keDLtizG$|t*$2(uYW6Dk|1UiG(CjI~eH7EnKfv|f zPF+=+mE=}mSI!zsyX&CUORlgsdXZuG5WaQAhTS79PdUK7EBqCfs%>(&r`X&tPo;?u zw>Z4Gxtdjvnw9F-=YZG`VKpjyCkh`c&-PWnV$(|asp4{~@b-HRt@~J)>Q2k^O|<2& z685fyn0_;`h}-lu=z#!@5H`~xVLi&?DwecPBXpa?uV@A z*1Ci6sp2w6c)P2IR?iA=#9s1BU1n`qjc#K*Nceuj_p|PGKXKnn+-C{@rufrJY`ztn zC&cDS%Lk8&{cB=>wb)-{dA{bh{`VF8C&f)Cu^%CBZV{X2a`zW&qZ4kIu5QJWyMLQ>{yNLfmJG z4_U%5v6QdUY#E&;J~*}Xn9QL3N8%PPelGFZU1J`wym~<5`I~j4>xn;ug+_|YdUAKC zwNX38&kUhua<@$QjuOL;61P#}@}T(8QR3E7__>yv7+bse!o=p1?XhXrZZgDuhD|&BcG)<5AT}S$-FV6Qc&TIY z5{Ghe6EC613qM)t3BHa&!ppPq@@#iIFR6>9UG(N#8?AfG-KJtQ+&3GW;lke_beP3&fuSmuD7LKO+1K%(`m6 zV6g4FKxiAO3EkyxcZqFx38lN-%@jUM+~kOx%Y_aU`y6pIMDF$#`-_E6kh{YjDsF}f zKUDZ{g#SkPo5g(_OZi5`wuLD+4t%d+V>m<7I8pqcAub;jS|C2m5I1)Se~0j=S(oYy zag!`*$&$NQn+LI9C2o2-u@!!pxV&B{{S-nRX6=!d&K$IzkDg-hVq@o;SKC~LIyQE0 z`fK4gSy{@?E!KyH;@d(AeW7C`Va=3Uah8NWQ{2q7v)g=SWuKZUZn_HJRrsDV8~&TP z>?zOolrrt<+Sq58S)Q*rZRpX?Q{n^rJ#qJJaXDRLGF|LN+Utqp+eEp0r_hOVccOjD zB#O;8vDqg6Op#KUA~kJ_)U+gVGezPf`J<;uTJEv-e4R;r`1aPOYYINtz2EF!*NSxp zjWO+B*HO%E$HLjYuA4DSoS{zR8`S>HFWbGYdn>!wb$`B-naPejcCYKRl-=uk7CY{& zVjY3q>-tLado^};uj_01>gHX(im`iLKQDV-zbboOe38{L+#?%MN}pX_x_kB+^r`SOsxuKDhey{>hA**hl6?scu3*uAcKayVym zyVo`Q(~MzXnHF}hYrYC(uWNRjVXtfaSiyOQwqLpRldszb$k%N-eARX%eJb|7W``K| zy~bC&+sshDW_wWfhIx$LVA$~*f7$U`k42K*$X*Sp`_$BWJ-`LMQB8a2>qf`EBhMs*1QkY~)=$3q zbtLdplD~r^;1DHB#qPAA#D_Jz+BYJ$IEZx6c%zfA=rN~m3lqva0!x8u^V_k`V*+^!wi z&KGZvD;twVvX&;DuMr>Te1LNe=PEg`b@`RlmSvpwjx|?_+f~Bbd-m>P&Utz|r+s#f z*gE%CvQy9`y-eLpsNsCk_#RRFh%fCm-!E=a_4!tD4{@y22UQZ^BOc=`>J4UtO5=;^ zb-|s%o%&OuUkcqIbYrk8Sf#%b`gL#{`<#3ubd%gI6S_ILk9|(I2;Cao&z=k0gjUE? zyM*ozRt76|rSSWN?icq5g2#i$_4h)nE-{Y;8ENrcMX*9l(jeyE;9hdFo-*OT z^KLSMt}2Y*G4u^ALp|Mua{nJ4K^r^I5wxM#If6%nM?+S@->7jM!C%-l)IEa7g2zI< zHG?)q?qQz|J@2&--M6MU4#EF$dY`ws=DG>K9(-MRaw@o$lM)Vo#z~F_e+vF&?|F}) z&A~cHt*}CAITW-KLk?T!A>NwaxYV+@rDyNE`4F_F;yFTj5_H{pZdE^H?Sk8b+iTq; zy}`qrZrnomY~2d+UbAj|z4yF)U3bCXIYa5kw0T*}y^Y5SPH#C|ceXa3sn#d@!z%@n;&((3gv6?$Ka3R2ld$29pS$YejP05B=3WpIc@!@6TiCt z$#b51TsQtwCZ2W>dcJPCgzCEZ_g&DAVXF^GOP%&+Z-&xR?a;&2@{ckT_pa%2rfeGJ zUZ`IF5d5Lm{T;y_a(`2BlbCM1^12D})p=Sc-$P{|+!EX(ez*_Xs zinb|k>Xe6-Kf#ZKAIrUOg5LzGBQ~zV4V*R}b@EHxdE}V5amy~0m)4E<2(@LKZ}x3e z?ar_^@FPtRh&S&pc8Fs?0QRCepns4z?@{(K8EW#`-Dskj$bL3c*byn+eoN1OE`_F; zot;Wd3H!P5T|9d>8lGWS0^aECOMqYOOhAu}y$R&KzJa~PN;U5@cJup+o*w%Xs77{& z0^G1i0dIYFDWC`Tt=^8Cow^*#J_WekLvKJ!%zVV`d)3hHRe;Nb^ba)sgJ0p&>`ky3-p05I`wM}Uv&tN z3y%zs4E(}5CeV+w2WJYWwQ0%8b=dGQ<8SOsWbfDNlOERX>}9}7?ruh>0*9|1=$(aj znfP3Gmmn9a_m;5xN`&^3*e|;<*6bZ|%w*e?lEU$_6NAn74E^fTKB(Vq+bQtoaLS}J!p2EPh^Mc>cTudU>y?`P>I zaZ@IAvu#cE{Vd&T-{AE9Ed5q|_)h3{p*tj$ox+z3tq`Ag3EeGE?GgJ*vDqhdzl3tY z%3OMhmR5<&AA}wh_lJc4N$6prKMVau=&wSLh|8lwkI7rWr=32j(+_p}l5ymq&5@6V zuC>ya9JF-3m9pfTrKLhQ2Iup@r)*2gbP*9O-G*Ws6? z>+Ktu{IaxE=te7#$uCPcTiHu~S^BNq{Z8n1p*w|^3#}0MyM*o$n@ZvL2|Xw_M}!`= z)MVQ_LQ0+vK21t29d2{Rj1ZbHHsclFO_+)HJw!U*3ch8g(910~XA7Msv`}0w6uLzG zTxzMZDYhl?vCy?r?>`Z`-o9H%v8AO#Hwyhq=w`|1EkeH)8+jM$?ZWRAS}wFg=q{mq zgjNdOC-k7uBSMc_N{VUuqp}1Tq zbcyZ5vD>BX$?+buCMmXbt$m|Y#!y10h7$6(!EXuK(tM$zT0_5sT4U`OTB?$%pY&^y zLzaFibc4{1QWw7xx=C!xgl@L|4eE)dTWvoY>0)VxxZEXlx7bt)zfb6XdA3UIe-L^| z=ubiq3;kK>FG7E{l=^_&B42DvI9up6p+&ZDZq5_B#I|tq#rDR0q~E$7&%4c#tBj7M zT{vUXNw&U;8(Sa8qb2a+#`Z?yk<~U0A6XyCRZG{3_oJhc?7$6d%k*`^DK0( zo7gLSGdrZsL+84eouYq6Ls!C1VT%34!gLt>g0WkhTB+NjZN1#?siv+#PnyO&Rff(| zSEFa`#g1ZU=rh!H^4GxY(V;F;H(DJkd&8mMQn#Q(y-MB6u3$H)JJ=U&m0FGV^fq;u z)t0HdMVEDtoo`2XVSo?#K^O>ta8M6KfJjh3__AsM8j=T%knK^RF=zssf@UC^dodsu zu$w9R>eEyU&=SOh1keg3g4Q4jv;j6>+wy!n&>nOEr-6>xcLJS37tj@S11X?8NLB68 z$DO9qK?WgYVrCIq59)tU&texN@X0J1?27zlDf9vB1$gCWG>OfVD- z1H-`xkPk+JQD8I}1IB`JJTo3lfS-st37iEcgDH%Ar-HM=G;j`>4lWC>Q9oe_b+z$dKp>v3i_m1!N0*9 z!OiM@@B#P`(7#diZq!=v30Ma{#jo|?Kj1U)Irsv62{wRIun~L(z6Rg$Od0W~kEFJM ztzaAYHu#154r~WIz)nyOcJtQV1NH*6DQX{D&i&v3_#RY&AHYHIBRB+p0*ArRjQM{7 zzk(y+C^!a=gA?e#egnUQU~s!eBc?UGl^fs#eh^0gJ^;c&JrDtsa7SI%)M4gnx9gsm z^Xc!M3l@O$g6nlLSOU)H`U0>FTnLtfi-Pa#i!m<&D{ylu=1OoG{1w3+`buyu*M9@o zf$PBy;6`vW&#uCxPUtn{nbp#MDD*|4&k0>G^c$h}KQ|lbIg1`xs2#y>pnh1&oHEqW z?*1bDQ$n{2eO0KP1w*GSlrb~>TA}-d?iK3jlfMw2e^KG)5VKooiWB{}EOu z_I&%MYH?lj0kl#(ulg4bTKGwiQ`w)?@hL35+G8(&3g>@f?dYbx4(K0x)*J@k46?%R z&kx?}wlDFQk5lEqsq9bgv8z@8&CzCBH?B=BEndyD)_0`ZDGo;$ZPV&aNln^4`h4r= zsG~cl*XQc_Y2USukbN!7KU{=T?8Gg2pW}f3i~>JJZ9TQFkSA7sX^W?rHC zmM}FsdfXhLcUY|xX2e#fyu5Df_)<&DUHjhu^r|K`wkKFO4vuf3JaMncU&g3UyH`i( z`#xN^l>QitP|p1Ej_Z$`ddcGh!6%&3l6-&I<`U(4iq>nNr?@1HpHKFQH{%9un=KhO3HkdX)JlRa=+QMb2I zH(fTawk_H=K|hPy#k=91;474!Ew@dI)|Q#EyscoT*_q@EmdYxCi1iCX5|4N+$7fhvQUk-(&>9- zqSG$y+MdVcO^xpJ;_(Sj=_wFIyY46E%Kij(L z=Ep8{iquSMzq}nBw_I0C!jR3WOmB)$>E>8Etc*|LdTQhjKvr@4Q`|vbJv_ zr{<&2hWE%E`Ww8Huya?;*xB$4=?_Byy7l%dhPJ@^4{WjE!)51rnvZO`ue`>5oM(=)K|`^r#{ zN8ajRJ3TwS&EfKsfYWTWRq~?z0Y<&LA_qz9LKh3kQ^sQ^}TuUlF zQcJF-joG<2PFh6G;k?0XuJQfV(3Ks1)IFrPEV2|$lO3hhXMSvRs8(Y0gYBvO_rqK7 zYOmY*@6W2vHE%1a)u+odSwbV7H>b=^_Cya;-`bL9G2v=jV?DX^E4p8W-rJ8(x4K=} zJAZc6t=A`WOAB&KfnKehVczdZ|Bx-ASy|WwQ+iR=euR;q_6~<#Wg6n)>WsZypN#AF zYsWk}4M`<@IYs#}JLfrgC%SQPEYZ0Ag~MC7YRc#@Ja`HxBHxLJl^cw~+&Iwp{goce zPWzC^0f{NS$vsXQys^NA$i-6xV14+n^552U@8PLK)z;3Kjxs+UN_X&WdsD{xZi}l~ zr-?ly3ArzAnUO>2x=#PYbw$~bYG$jQwgUf#JZ3I1nCO&K|(u|K-4(+l{nc0D=OQ?>o^rm=3{uC~jQT%Bge zH@t$5C7oh4@Z0t zVZVfEY3J|w=hFGi-LG;?Os<|yVph5?sb*5d#A~o!;Z3o?=)F? z*iqK;btb3n_gl6{U~5vC)FJ!-^5&Fx95pLUYEm<)Lw@`JE@o&?R|6txA(xWvEk6P?MgdCTZ%?8`SeR`J+%nnQ!k-Ykdd)JWUfAqZ=eFKvk2GWth%hH?V5`S z>RCgP0iC3tB~#B@Q9tY-)w)pILiMZ-^{gHCwuae1aCe38rn~WfwiKNL-(7cyPt~dL zcK*L5Ut8H)XjcKWlvMz2JoT>)@}wVkktqC^(AL5Z)WT(|p8cyB|7N?0T3AnNVS8%f zrSL1Mi>Xoz`yg|!g1<&zgZ;I9E7hC-iv11#I(;4d_4;~vyP6=;evt)l*AyhmngXYG zwns+Y#hO>vTk>~PyZ$vrYGsh&C%gd$rD#!jz%aXIzRsBU2)~m2?}^_2I5?U5<2_)@69-m%ALX_MIQ@ zyuzON_(G?LEtMyn=Y)1T*K?+Zc!|+~l%bs>J63dj3wmY8p+L^l9_WzMp<{=RZC7;Y zSRE7M)OIT9V{O{3YBMKvw$5!G8#)sYCvE_TNkb$t0g3B5-}0PK!9T?LR^sh0mSZlL z)82b4(b9RG`EvG3v}XrShij17tJR@aTR9KGedgjN%$Mw$*nqQFt7)xrTlH(D5_Ts% zp0FZec7ln|j8BedSIm~b#3#308@D0uso3SQ1Dd_p>~VWWUML zPA}jOb(irU#9qkY2;LlV?>>1pT!LRs}`yhKNw9APuAwLI(0Z6F!T4?2Xw6oDTYe zGeAGk9}EE5AO{QtxgZY=0)xRgFdj?U=FgT>L40{?^#pzy_yMT(TAUnSpd#s{CWXchW_J1upC?jE(Vt{ zj#~jP1uMb-CbR>D_7kBUAhZMjqtMzSec8(zIQysD9nbybv7a}+ADQeYPyD=D{p5v@ zx4DnBeL-m75!x4o_64DRtBM(qF99WBDL5ZofKSVqg}D$c2N!{h!Oz6hu4X)e=JGf2 zI|#Ch+-frEQ7|T*1!jW+a2~h--&j$DPn+>+Gd^v`r_JQtF>>x0Id_blJ4RUgK-=GpIfcB8Cv`k67#aYjxW+ry;dJlx*FdYI$b+h@L^@2I(NfDiaV7zluHP!B`^ zt7ECp_^$zINI$9(T9_!%7&HM*K{F7|y%-P+nu9pd0<;A2AOW-jiJ&z|0&PGtXv_2M zKzq;uoCZ2#-wAXET|igR4WxkXtY=Kcmo)fvkU_|qj1saidvo0foDTYeGeAGk9}EE5 zAO{QtxgZY=0)xR2!aWlV1;fB_FaqR*kzf=U4aR`6U>whk2NU2YVom~Qfyv-htL5e} zvYiX&f%)KEumBW-g89rJ2mzWMv$v9?*wbGy9?Y6?g96Lzk~b0{on!cAb5zn$lo*Rnfh2w*AiI~i>&B~ ztcXHZ#8UsdGTQO6imr-PbXBaPt6~*h6|3l~SVdRGD!M9G(N(dEu8LK3+gL@nja78p zSVgyuRdm}}MYoMrblX@(w~bYF+gL@nja78pSVgyuRdiMKYJIGv`<9h-Rjj0|VkKP_ zE9t6ONms#2x(Zg(Rj`t-f|Ya?tfaG}Cm*Zn_OY67AFJv1v6^lltLb*Knr2|W3 zZYQhhcCwmoC#&gpvYKuutLb*Knr2}gm83EIVeg#(haugf`$B}d=@SkrqX}uM+ z-U?c81*_=}vYPH7tLYB1n(iR0=_*-GSIKI+N>=53Jg&V%1(1tM<0BYOjh_dsVF3+s3NB6Rg@h!K%F~ zR_#@>YOjh_dsVF3t76q&6|44sVAWm~tM;}r((|)&uZmHg|9>Q54k2LIMkTCY2u2D#sC}cwv5+(`> z6NQ9{Lc;7p!t6o9>_NipLBi~z9!DWzqL46ANSG+whk2NU2YVom~QfyrPBEnzA+8%zV|fa&0WCSfX&FcnCc3M5Pg z5~czPQ-Or3K*CfYVJeU?6-by0BuoVorVI&FhJ-0Y!jvIl%8)Q+NSMt?n5{^d%}AKd zNSHDtOc@fU3<*<)gegP9lp$ftkT7LPm@*_x84_kM62^~&@t;D%bVI`Ukua4=m`Wr} zB@(6*2~&xLsYJq5B4H|#FqKG{G9*kH5~d6ZQ-*{oL&B6HVakv&Wk{GZBup6+rVI&F zhJ>j^!uXLen~^Y;NSI0_OeGSg5(%>d39|zUvjYjU0|~PO39|zU<43|&B4H|#FpR9h zJ|x6`Z~%M{s=yE6Aovj+0zZMn;Ag@*xrF)g6cXk~B+QRUm>-caKO$lFB4PF-VfG?n z_99{S{x2lVuO11r$0K3({E3A5)s--N>PVPMBupg|rVaiNSI0_ zOeGSg5()Do5~dOfQ;CErL&8)dVJeX@WgZDriG-;{!c-z*Dv>aiNSI0_%#TQzN+e7f z5+)1@Q;CEL(=F)39H0+#fIiFt`Y;FR!yKRwbAUd~0s1fp=))YK55t@g{g?*8>BE#G zVbHjNCZH*32L5k-m~tdcITEHE2~&=QDgQ5hm<~voWKSQ)N|+cVOx*vm4^xhWDM!MT zBVo#sFy%;?awJSS5{Cc4gVA6N7z=9pFy$xd!|X@G>_@`vN5bq!!t6)F?EfE1n5{^d ztw@-yNSLiin5{^dtw@-yNSLiin5{^dtw@-yNSLiim{KH6DH5g>2~&!MDMiARB4NHl z!fZmqe1(Mh3JFt+gegVBlp2~&!MDMiBUs+KTMG6sE?cSX%t z0ik|ONXnEWWy+B<P6e&}Rlqp5Z zlpx;Wy+B<aOmpyShaeyXY1dL_tAOAb=o&5D---curm9w zGFw7cW}jFYHM)9_anwhl(bY*bd>Czh7WWIhmF{;Eb+;R+k}+Eq7y`cP&K%j1GoSA* z01Lq)a4EP9EM|sq30Ml2f#vY!3UDR33S14Y0oQ^R;5u+UxB;wW{%;lS?nZDESPgCl zx3X68Hn0ZV4(f%ayxCgownRgTW+Ttmp6Ya5y_Si&wY@$6j(H@&$RKJ4~B9Oq)AQn>$RK zJ4~B9Oq)AQn>!36-l5HHqs?uj&26L2ZKKU?qrGiq+_05#!&b%(TNyWOrG4$8eeI!r z?V)|`p?&S4jaWNwWVg*qcHyi7H-fvsT7ZX!4=&tn(tgNp@Sm`v#iR$xQ~Snb&zS7F zlKoS%H|kk-_WhA-e~PPOwAPE!T5>2VVC@Zhn0|8u{pJSx%?h3`m`$_6g@bV6AY3>I7Y@ROgGg`?E*yjl2jRj& zxNs0I9E1x8;X)%^XoL%maG?<{G{S{OxNs1ucfo~&aN!_aXoL%maG?<{G{S{OxX=g} z8Yx*LTxf&~jd72m^WT8S!4u%O;7Q^(Pk}!Y!+M_h&0l!^8}XbMz>9!=fOwLv*!Wg# zd@DA-6&v4*jc>)qw_@X4vGKyGMmW_7ryAi@Bb;i4Q;l${5l%J2sYW=}2&Wq1R3kRN z6&r8Bt%KP3R&0DLHog@b-wM~7;aW3XYldshaIG1xHDluq_|*o#+Td3k{Az<=ZSboN zezn1`Hu%*BzuMqe8}@V$_HhrkuoYX_iY;t~Cynr=5uP++3tO>;t=Pg=Y+)<5uoYXl z7hBkhEo_BHjo89gY+);WYGf?eiVbYV2DV}YTd{$y*uYk7;9hKCD?Dq&2Kw=|TCsr= z@%$!i{vG%`cp1C`UInj#*TEa$PV8X=zEuOhRRg|N1HM%QzEuMwum=322K=K2{G$f^ zqXu?4p28lU!XBQ&9yZ{&G~l;1;I}m3w>03lG~l;1;I}m3w>03lG~l;1;I}j|B5J@M zp28lU!XBQ&9-hJ;p28lU!XB>29&8SLRR*u!VwbTgc8hSSY( zx*1M4!|7%?eGE0?I z4F8(pUo-q`hJVfQuNnR|!@p+u*9`xf;a@ZSYle5nuw6T_T|2N{JFs0l;MP&NbrfzL zg9zMl{!m<{HslBbsYObNkWUel)iq&Fx2X`_bGHGz|Aa@?b{%B}e2R6|(Z~!N80XOgf0|e0Fc67KM9d1X5+j*wM zz)z#Y)9COtIy{XIPm|AP^4Uy2o5^Q0`D|7av~%S&S59-~G*?b@<+OTk*Sgtrj^~~| zk06olcJ=gQJpBkyKf=?G@bn`*{Rm?zjq5tF0XuL2CvX8b@BjlWMwTUDDOd(pqQ_fc zVRu?{>eJ4ApwHq=jk#xD)tB&o*_j>s74%>>uAJb?A+BuX_eH0vGa&Yc?)ij!s9Ekg z!aaw%=RaK8%C|q}%1(9VFjpEwSGI8FBd&ZJCnqxO=H4Tud_l@Fo;{0`Am;MTC7jE_ ztO2PSdz&Tz93~ADLW&%?cpg~xblzCl~1|Spi1#2SB`V#7$;pU0n2E! z@{EI_{0?yE=iG_Tcw#G`IGt0MQ%BtBZWk^1G%cfpZ+yl*hq>n%_Z-#l#`o%a$Bwh^ z@bZi!%F}*SojS6=J5B0qknnEm=MGIPLT}?esY9aXBCtiM6|bn3UMoni6{Ob+(rX3jwSx3oL3*tqy;hK3D@d;uq}K|BMfXBfT;J4tbkEmourduzU8IpD)3+);fn8-pq+LyD7HtCZ#x*U}NDDR6LXEUgBQ2~Ao6Oi|GGm*`tPrc^7q_Ry zJcnA>9jgwkE0+^kSV;w@=G4KUA z4o-lR%nh|rXJ3L-;529jZOpT`GXm}aL2xE6S>xw#G)^hkq=f%apn;#-C%f|zM9vYdB!c^?zolu zTIx{waG~Dx1T6m&mVXJ$za(P2ma*A7a4)zI+|Nku0mcwt#ez>fj*b{0oj`|vY#`g1=20$=Kvy#5M2MqT{|JPw`!zXiW%9VO># zz{%cl@>V!`4V-)sPW}c?R>H}f;A9b;yb?~9z{M0emjdTf;9LrvOM!DKa4rSTrNFrq z);HSY{sT4wJW+ln!XCGox_poP-sk-muoZBYBx@k;a4rR|rNFfmxHc57rNA8<+_AwO z8{DzM9UI&+;f@XN*x-&0?%3dt4SsmwhX;Om;D;T4$oiaL)7Nlr6@GsZzdwlIAH?qu z;@up@?>~;;e;mL6IDY?e{C?JXzQJ?1^| zIZt5)ewW}bh9=eME7 zv+jy>(JC}rMI5an?p$|u@>qANyTYa1PbphNr94V0n z)OIXtJL`EJSWO33(}C4=U^N|BO$Qd!!0P$2aBeJB4=hz*{bD@!DXd(b#`{@uHkXDR&&{FRoHN55&( zZ<_R*CjBP0g56$)t+)}~1XhEa!7bR3TWPbmfi>WEa0mD%Px>7QKlSgte;K?2UInj# z*TEaKLxa`8UdA?4;=GiGQAk`GrAehUsgx#_(xg)wFL!sAGnrqfS&U|ufTds=Sc$Bw zkm*Kn6Icyy2IrR3jhyZnIo-(VMou?!x{=e3oNnY4?YoiFkDMMfZ$nluviguUfUE&z z4Ipc8WaW&#F0#6mtcl2)h^&donux3^$QqBV@yHsFtntVikF2uhJQba$qSI7#nu<3hiReCK(i#?F6 z2a@$bvK~m5)U6h8MZ33wHQ;t|2ar7#BGvCH^B=&|;29uN{*jUhr=A0{`tZ--FW|56 z`aIGjb+SGeCM=2Tg|xlUY%eq`(gvuD7#47s!CfBi@^F`jyPR>|u{XKf^4x;9yJxS( zird8gtBR2ueXV4y@R=G#i+wv9vTsea zQn9k{&`M?8Fa1Ijt#lVpUP;U^>g`HoptH9t?6--fj{7}h%V*+pkaHk14ulDIWxYAI z5^Z@qX-h2bzp(L#k)|zValb&KBS_R3ie-pITgfjHZ%}sArJg$tTQp16f`jKes0BN< z;Dy`WSw%T-<^RW`^+3YY^nAzZgHO>1pP~;wMIU^MKKK-UaFDS`P_qF$@G>5mh~=9E z>Xij-r{9&*v{0Hx^xLF*F;?c$@1CYqSXr1XbI3kw+J^>xs+Or?eFo8$waiVSXZA?n zg3UZhzs$bSP%qpR>V@S*)(=D4l9-V6Yz-Imt<7hBOKRsoD z%fS#|Bu=FwP2~3pE%U+`6-QF_D&^Zq?WKnDJVAcP$WO}G9P%=@@U2*WTIKt1 z-;%z!^S7Mv^(6UxDa_NTN1e69`dy5O(itf~#@OuF-0=kU5l4NvRm-#1G25Acd5x&T zQ*rwkHGf9iIEmD4Xa1$N5wmVbOC50d%$fB%XSV3tnb&k1HE+k~7=;YeV8Cr~dJWOC zJI{PVj-M#o@vAtKIo|+?2MHh%B!L`|3-UldC;+h$+wpv!049P-U^2KE)PpHtDwqbQ zgBf5pIQI(D4q{6k#FjdUEp-rE>L9k%L2Rjm*ir|vr4C|C9mJM87}0*!8QcB%I{Wc; z_T%g9$Jg19ud|=g?B{6ub4IkEGot+*-)BF*&whNL{rEon@qPB=`|QW}*^lqDAKzy` zzR!OA=R=HW_cEg0%ZPR_Big-;X!ojC^at#~)8HBKEche+%AfEgp5wjD-u)T;1)Kgi z+Vl(HMeq*K{14a&Hi37+X7C<(A8Y|z!3W?&@DZ}gIU%y+b2n%Jd%#{GC(`Wa`T@WR zwD|R(f5jQ!4;TaNWDKyAF~Cm706Q52?8NqL!S-yy_H4oSY{B+y(Hxw*?F2611|Gov zVVKwq3s1wmHdxor$Wfh3&B$?!ejQ`H8^9`638z1RQQ*Or>EbbYKH^-~dkG0&d^|2Jo_z*#}H?69Dlb0VIMXkOOi- z9>@m;pb{^!3Jd|``8)wk1e3sIa51O{Q@~U(4NM0!z-%y|xrhZ|Ay@=11($)f#M0J* zd%=C+e((VJ8u&VR5PSnX1Re(8Vg&SU@E!17@ICMd_&()&lzp>5;Pr>#N2L82`~>_I z{2%xk_&N9m_$Bxi_%-WMAH!<=20RX)0KWxK(i=Vneh(l2fMt1_SIehKg2O4)NLt); zq&<(7{WGs>7Mf_$3*betp6|ZI_ud2>z+2#L@DIQl^vpu14@Tz?0yzXR9bf$Q(U z^>^U7lsJ%e;7aoC;2VRsUT-ANpFCvn)F#9?<5huujWb|-Pzoy1{xVp)&S4w`8P z&9sAN+Cekzpcy8zlbGjsfFL--$|6o@pda2xKfF(~0XuN87s3f#zzsa0jXtIQ%n7XY z7qEF7R{9u>{s=35l(upLW=kCLHTv$?bWVJi^WN#hUsG*p&Y2fzM;{S^`G|HTXOh20 z|NR>6Xt&OsA8lzjXL_H&!tbMXd`7!EM7ugfRAepZC9DJYg8RVz-~sS8@OAJY_y%|g zJPf{xr|~W3m%k0Z1HKEs2Oa_6r}aO2=2=*`1=ekWb?af>dRVs})~$zi>tWq`ShpV5 zt%r5%VcmLIw+q(20_$FZb+5p>S76;M%)B3mc^xqCIIKGf<77>K4;Uw2bO5&bVVbP+ zIK>S8aaeX7mTh9D{x~!B$C;@=&P@GrX6lbKQ-2)xZGwH9VBaR#w+Z%bf_tWw|*tZ_`t%rTdjTPPu`!>VA&9HAX?Ar|cHp9Nnux~T$+YI|Q z!@kY1Z!_%M3j4OgzOArtE9~0}``&?l@4~)!Vc)y3?_JpUE^ONc+jhaWU9fEzY}*Cf zPQ$hqnD#GNwi$MP4yz8rDs`4SYItrX!PS#oJ)!*BT6%^<^bADFfF00N(K8&PXCUed z=&9%#4l&EIg;|a*%yMjDmSYP~m09$Y%vzmf*6Jj)RwtRYI*B~*F!QmAnU77(d~9Oo zV-qtUo0$1H%*@AOW0BAL6~O-tWJOW#G$*-Vt|2)$=Bk+LI1%Dx~{_5~$o zj};}}N{P2pV&aFu0i3`E+`t12;KhUVfl63i1%`l?NWKc6>PB!ASPgClx8T#<%C+0T z8gM(f1F-U@(~hmRXlNa{7u*N#2M>U+fvABIP`7s9q?W7J@5$lKF@uW zn*RZ>KLkG_?Z@CJ;HTjKz|X+X!7sos!LPt$$od=bICuj57RZR^9j^TcYy_LYyI?bT z54;byfUV#I@FDn!=wgsoa|Rx1MEzt|ZQ_}isiBvtp_i$he^NWAX*nlpIVWj3C#k9M znCdV!^)fZ}GA&5rmmO-3?hso13{JG`I!~NM%%{tYGj|?mP4!|f=>()~YPd$umzeZg~u5{g(FDO6&hSW%+`fWfZW3@*JQq`8T7w z1+eXRNb(BuAA@NpVA>b3>j{|k1k8E?X1x&ZLy+)AMn)?cn{>|Uo8<2lOIM)MC*|MAg$+Jp4N`shmrdfGRxfBY2^4E+weJYj$6>`qttF2k!5S1{xp)G z#vVM%mA?}CnHyKcct448c_QO-lhJERr>hxU4M$jdaMjuAY9Uwi_--!W&Ecx2(^YwL zAy*5yny;>w)9${Y-4SyKjI?NXU(oKppxrTt1L*f@cV8&U*COLOa4)zI+z%cAUjttU z4}x!ihrq*NC)fpcg9fk%>;;X0-v^;5>7Xa+peN~|C+VOk>7Xa+peN~|C+VOk>43k- z;qP(ydmR2AguCkZDBx~etOnukakzW@tn!+aSK@q@CM6;xJ2?-8Eb|ebaRB~=W71y| zmD5-er2`wV0|#&d7jOd)Fo2g81wLRBfe3(jkN^@v6379$AP?k&0#HfaSAij5Jf9~3 zW*G3w+wjWU@XFip%G>bD+wjWU@XFip%G>bD+wjWU@XFip%G>bD+wjWU@XFip%G)Sa z8>I^3D;=PQ4p2h}sG$SY&;e@b05x=g8ahA?9iWB|P(uf(p#xa57A#o{maGL!)`BH# z!IHIL$y#8ftd(oQlC@yTTCij-Sh5x@Sqqk|1xwa~C2PTwwP49wuw*UxNhh#oXU~;B ziF{81v1)%{H2E}m20RP?h>!Fq@Eq@-htGfJ^)Gz>D-8G>HTnX05v=F?FY)|0!3OXa zcpLl!{1f~O{2TBKTUgB&tY!;VvjwZ!g4JxnYPMiCTdk#Iv2*L=7$Izy|EV0i3`E+`t12;KkiQidQOSsxdP3@##`UkzmhLC;#fIjIk z_FZOaSP8{gXx5nxo#3}({H7mQ^GF=dyqk*WnVH5lW5u7jKf;! z@Bhs>>>x6o(%(N*z)arz^sMrivupTEw1K}btG}nz-!}D^ednYfL_U5gL0d&{rmI=y zI4zqQUpGAn|L}$7@6S2S`j_%9{0|R4j`i&^|In#A5iXhpm)MmnpZWf~oS(^O^@-Ej zY8l$UIV{~ZuMipuC z`VaIU>OVqvaxQ@b?tKRL4&ZAZC*?iioVteRz_ndm+sU;r;AJb<|HXA|0ap%krI9ON zk}AK`rDFjhE}osnxCM^G-IahftN3rHRmxsc_L8!TlwB(2G%2Se_c;0f`+R>VBe9dA zvEON0oqXFGn>Eo6kC1XC&Iq-fy~uHr@9jZC@sxMba+;9xguWO%xs(xttfG_=o2=nv zry}1wi2U-sFZtfi(Dy#(dtwFTd#!x$p!!}b-;>psZR&gbLmD~3_atKS1^I0awbWyw z@8KD8*KVG^3+)`^uG8q|9i=xjB=Z|wY2_I$Ty5nU;uY`bIbU+u4lGXr+It<%ypCpG zMOVy({x>_JYT%@*8Hv10%^c&YcshJj?BOfqcapqLk-CSxBqI6&zd9CF+LJ4%vAb{6 z%e+nR@-}{XD?QA=kmh)oF-FuTUdw$ez@wDphtd~xvD{zsZCTMO+V5n!;}Vf(7kTdG z8*h^5o8c{b{^RlYBiX9GQ313g;<_Wn)sc~k!y*S}9piIWz{Z=e1n^7K%)gOq7E z(#o$7ZX>@>$Qg8_#E@F5f=Nol@d9=6@yPJcE0r=VF$Dk~CA2W}a~p*-r6{FDc1M zN^*#j;FWjDQRb8yxqCl%3vVMiVnS_rlMm@v*h-1soA_68Y1tJCE@zEIYrV zyeg@(vaGB$H7zMMCE1yilbv5skmYhZbCSwzIc~px%$5*X5s0@R(|qnZre7Zz+%{16 zo7eAM<@4Ab`u#>AV07H>uzP%d=bAN6zZR!e5OT0`vfZQSW6L<%QvZ-S@@V>jo$mgh-m8c$df;fyNjGD^O;;#<}y~D#5uC_DRgC3 zez=$vxEm!nQ<76fD=DHAt<0vc8dR5^o8ffoIac}SbWu%4MpkyF-|P2#gUk1>`lMh? zNnwRY_t+PrlCFv>OiwTHdvgp^`y^IZ--t*~^g2c-DU91!9mYH=v1B!p^l zxTA`C%nC`ImD0^S3R;n4+N`=w$m!|!>F&fn{f5u0btL$7zt=Rq!8f%6_4<)ER=rL( z%;Id2ZyKxf2lVjTTs}xJ77i#m z6$iW)2eh%g+W5V@IQpiS@ZI|2ajFs|lq>0lgHBhfddVp$E3K?3&(F!J2)|iwn5Lm+ zPg_2?cR^A?ZROSVfBS9akf9a7{aFS7er6i_Ltb<6?4kKXbB)BA)wPQ%uCD1t&@GdRnT*{QUf!Y^TeamYPbAqEIN{GHu#|g=@#y zIVp1N+67~-o!n2;{GLm_rY*j{dh}Eq|E7+vo)WN`-b+1xP3t%LT4U60vx*iK&s;Mq zcf`!W9-r4=H)zuA>9c1~8a&E0e4fEGN93X(C6Ce%?NYUoS5&=sPIu}btq|X{;!KW` zemc?*(5tE{m6W2OY^SrJAiuJzYJh&hWy*0;Jm`|_)aLXqPVyvp%;e%k`x}x%{lzB{wGsTrtJopnWv4=b0cKMS6W}?gHPILu41C22&C;vBv6x-~B^76~i z(hIg=g|u%=E7#K+ENyLNcc8QeVbj-PQxYeOM7TuJQqxkCU8b&RB$b(8K=tM*3Y}PX z?`Uz%N3LD^z}g|>vN*L-)AagTcUTM>zGiO8nBs-E)!f+kqUwzP{nMw{lTUzr9#yuJ zbF#&DCW!68n3RerA%ga@cDWYa&TW|83#iH*;teo7&F?M6JS2KeMCo@?zmO<>1`g;mz5WH$DE0b9BdX+zKgo(eAF-}U zG;#rwcGk`LCB1jFu4%SWYn7mlls3d$I?Lq>ey9v&w`z;e=$qZwFW04aO3vYpSj)5Ys1<~}r%AJV1}J#Fcbh z;ihJ_C8xG%!P<*#(DI^t=S{e7LY}7ij4KSo?#`|rFlL(APqPxzWK z^0t}9OG;+kJhEWexD1ca?M^A`Gk(ULOJ+^%JEkJj^j3(-SIt~gCv~OdQ;dPHSZ2=N zQ8jC&OU+v8+gZzI-Jc=liKOpjm38zv!Vv5Jt*poow+89KV_Jh)iG-|>CBdyzJyHcJ zVdc*E`n3Ynm?v`1GtA&exEo%p3WKb{lH`uns{Vb!6KGkF%cIS`NG@{HQe09NZO(R$C$TiH4_VY;;YM~FQ7FwwEx}6pFA$`Psq9l#A z;7SXMSc@bBM0r?*=(tIht{x68apV)*YR_fL?xp${Z6|PCDOaI`^8sQY< zpEx{Hu_M;qpJH^hxPE7H;trqH$!5HQ$;i#bqeB^@27hKYF08!! zv|y~buMW*{>MJBcG%j8%HI3?#;S^bx+Qc)pWcY+**R{AzYGe#D7z zk1f*V=6cON_N4eFZofU)q~$yO?kf_KY+pppTH`SXPO-}od_VXXr_19D*fdYDL+OZl zfDFkK9Sv0y?o_c(OBS)I^x3hpB!nBLBK$(S|6$UkBOJt-q5z96`e0S^8lDRazYK9m z=c(i-sV@C3X30O3yOAO`eJ6WSV(y18*8TJJF?>&*c&`sdx)Mve3YRlvdaJ6!4GXP> zTpE`v(qUg`nEhN1x6kmEn+DQ<*KL;%Kh=f{flT9Ayv^`b`TX`Itz1&thp5k9$FP5y z#zkl>R7;T+>ZVyQpJsaf4?E)hMP|V9 zuw*V)GGGt>NW0P=h*-%#1y6eXevjr2P9b}%URINPXD>2M3l}?Nk_)s5JuYTEto8Mi z9R;d4eYRjjm>PataHRG(k)`8i{_5IagVQ6mw_4R+ovNn~Sw|MGkJxlh#j(<#x1{MM_RThfdr_cN zQB3g$FPYe&8em#s&jRf^DT}4mm~xIAF>f%K}FAGVPSGHn>eoQm;+gVD%a|Va)7$DnZX0S99CK;xQ$2*9oRP?@j%l9EAQoi)GMn z(&d91v570IDoY7^M4#QOs}Ci^t(Xj|I%ROF*Z;_r)gwo)p7Ka+7Rs$yR=Rl7#LEZK z;hj*K=!`gD<#g(|u?XlDCKnWlF$`-rG{SVc;BN{6guJt9tq05mN8Z5NOV(Z_L+J_Y zgxpZtF?yV)YkqgId8ANWD;y;hrvmKLMuUT9V$Hv>c3NQbWW&sJ*F7lIi0A*bY-WoU&se%t-E)!Z&qx5uJYdAmflC% zh)%vjIZ7U&<1En85S_7nVHr#-4<;3lf`U1nURjqlp?21_r5D{}R9zzB7>fV-QLWS!hU`7`ot%dUR+bRu$0m&IaO)rhf4dTDsAUmNN7p{xDW)ZM)=M4uRfbtvHn6luiP!7B+vRon(#)|_gZ0Wg zNJG+bw6mVd!bW31VH=wxLh$1#=8MubYC$oYFDP%INR2E^??lE&+tjoA@T`eEtACeg zl|_cRk*7w3N9y?wum2i({xvTuM>!~eU%XoKx?09ZT9)6N=QW>;=J=dRfA9$z@wCvjNSSH1(6HZt7nUkD5#ts)wPkbe8K*{v z^<&0d%<$CfAA2{Q^<~-*Rp)Wl^eI!Ohu>dzdkwj&rz=^Ot6V<}jhL-;>7%XmkJ&dZ zQg*$6JGC#JE_iqXHubuCwm!25CTY7ry?rrSGj;2d5ZKZ$8K5I({xk?A+Aswih zgP5Y9r3qK(xQzDn;JR$5WBKaN$|=9BEU*P^b!wQO3*7TKiK4*G3 zA`{Vr%oSLY?qCmhv;^WCSm~VqsY)PRdw^Qx70;l+CbJ(BtBiWp$u4oHwO;Xw_7Ab9 zbIpMMw?-Ou!bm%r$K&CS~?()Tpe)91=JUo4XYQF`x^3dZW zPnS26T}rZ*jq=(hzt;l^wp-`|=k~PCm}d(FD&tLk{tR2sxo)3!i=G%5&Gj2@u*b(I z`1Q3myXNsF2WzxEcS3?Y_=&b5+2_&hwvM~~3Gp7SKKPs`9^EKu6sJ~&ICVzprfd3k z)-H+8taRayb^jJ--lOTl5-a@!PGFPLCZfyxt#&ITH^`6_)n{6CENM$p9*7#E6u^j_ z0*_DEcE4})d6xT3$0Ogj2Yi=%eD?Qt>$=~4wa>Kw@@ICFfuzl8)5d6(PN&D`b8X-5 z^7%YYXYk*_m$*QZbI%^94>_bAOUvwQ*^dgFn$zl1t4^0qdVej$Fgb_R=e~r&=~GXV z^IVTl(+`Vj+N1qiR}XdtKh<(1Hx@4h7wy4Fp&gZr(k3x3<%K@4w9`2qZ+UU_ZqcRO z%}m|$XsT$^O8tNlf+n5{D;XjqEm5QfIU~^lG5=cF1dcEmSZND_e^VB3r`L2lg1^^f z$mL~397wclZ0v8qrAiFOx_c|@+7L6Y zFDuwP^hDwuL!yzDDr?roWyR=*+r&$Wj7}>|oe>TrF3T3AepEhl@Tvt5_AATn?{x>J zc)eQBvgy?$=9Kp^jcF9BWP~SYXx7r2>fZf~qOrx}W|+w-{RX7>@cWGT0mT`a<>Ly1 zZ;R0{NX$;TG-Je|lz2~JdTM5XGFsAohSY0S8TWCLgp|E=x>NsrMt7#8(Y7n^B| zRi5^7zhPN{9$vd0Jr##FnV#R%u(=!;Gu;-}rQvaB^y%H(C5mbc>+_Smx{}nq-fqq1 zNJn{5?I!g!T#i`%V(T;`w|9DuH8DFlM$OT3pNQz0lR0Db{BA@ajiY6L6Ybj;!5Ps9 z{!Ta^*Hs1OGWnRAW;wZ`NWY4eS(YRvS!u6;$-P*O;Hd!?m$ohjyklFY$HgZjlX@e_OX_&!7X zhcrdmg*OjbjMOHpb~GAqVhEmIPAKMIAjS!DL^{)ykh3pSXf`QvRb@Wh2& zy~JuvW0u$7b5dnReMKK8&h$Rz_4A4crOMzmbx?6(X)j4kD>Di+^WE{P&oTOl zj$Z%ktar?iBfC#=lA(qifz*_EGuaw)q|9D^!G-QKWZua0j5h}PS6_NQeWcb@>7=EjBvESW~l=Pbu#s z^y*zPrMz+yESx2jngR+dsJ>-`cApG<7)Yv zmsxmCeoZEf%c#l?Q&40qM#kRsh_!LkqZU*o%*TIFl8FhG%8U4KMAKs8uraYUhDGHS zq2O9_vP5^X>#wh_yl@MI@c|4vO{%rRE-0_ulW3y_MBR=Ds^-fbyUT9RkXcM zbrdRo8FtUAt;XuIwe@v?Q5b|Vv2sPTWo**sK%5M)RS9SM{ zy@R5uZ$zGPXo|Fq66VK6lQo>!74j`qI|zrAWBbjhHubJPMT~OiFz;nfnypONwG*bV z7-Aa1=XIsvG!*QctrqdrS2JI3O^5wCq-hzc74=EEZIO}TnS5PH-|ncolPU)pj*LE` z#a+5TqE zp?R3e9zAo0$$F{&a3N3e7cUo$``lB71AFAp&(0->K0w(l z%UZo3c7wW?3f(FV*w&&MiOjk94b?eBUUYtgDK75cjGg4ypE~JAbRyD5tTLZbf@#bU zvF6CT`D&16XxrsOu+T8|MW!)cKJW*v*YWJPbhoTmiq*P2Ir=2D7x_MN@*S;KaX@O$ zsqgb)JJ@Lyr}<)1KYvM5KP9y!CUyTWB=r!f>AKh|>j@1{OX~Yd7V)&K)V+_88aMn* zowl3O=c~C}SKMDY!BFl#$}@WCVympX4?QRCE#u6~x#V$U1P8r6oGTF^slRw4yzW)86RiHd?4?6IrUvZ!s=5{Ygs-bkJ~kx$mbfSdw( zMll(wSmzFAmS@+I1Lcw&n9&`kGRYjaKvoONbFC4_-z-MRID~(7TD_`!D|P?tB7?9z z9Y2H7ekaC7VmHK{V`fdnD659fIQ@5q-&`nmZ_@#T<(~n+=hbLq4?gt^x5sDL4g2e- z3>Par-T!*SilU4rx=N1Jz~7a08(GAmn6LLi7hU9tu17dq4%H%%% z<7ENU{j%x{R3mx*?;$Zh-sN!{KEw6mbE>6L?zp!pcQ-9fl~0i^Y$L>1wJd@*Oj(4N z`L>JR%g4S!Wht~rcve0qp-BsTYnDN3@tF+CfpS`@8xKkQAz$sT$h|K863xSRG+6!vj&?671OJvcWMco z=WU;oo@w$nY`e1i6lnVV!ohPD`{`TRfIeLE`@r1^rBLF4H^67AVrE;gV?x3%$QC* zF*P^{Ete>qx?-rzDA%m0uU}Ck?}o0JT2U=NZFNQH)flpTOk#QB*kwb8EE~(4G0TT6 z89gd6C@^Yt=tVSR-WcfO&$~wi$Ej4Y)(l}Kjas!CS zJ&2CW8AXI*l#wA>xb=&%&pK;Z1_26GRG>>0s4!e;xK=cEg;IU(iuw@?(=+B)hXxf| zuff^9@}h%^jOtlc)$=m?EEpN7sv7Z-YqUW-do!3D+$&mVC3*S9)`%`Z>d2RoFx8L!^!2 zv)}v8Xqc_2EDg~Kd(bgk5jFMPDW$ce0rB#OG zmC?ZxBhOBwC9!kejh6m`e3L87rYQMrgpSIpr=*z%5eH^o3W=`}ap+N66p1(>)Z4>XUh*e+WT~!QHLlmo5V7w}=^kdcf_gy8eRDz6;#-tu-msEOw zks@+0JwLLs4nS*0f5W;Nc}k!oc6~=^_l=5KvGFRhzQa{JtCC=Dz%#|}bokQ?GYTrw zh7TPwF?Zm?v6WLL_-lG5I^1?|ue^->!lde&(NnT2t}y!a>t}lXy2INeAu~OtSBlSH zSX)?d(SY8CMFFqhZZp#2vw9}6H#!B~qXR_VpFw={=)rZs7Ngvl;b)K;60M~$u z1W}dI^8!>=H`gAG!FJWm{v6=~>pVm=Aw4?vjW>wC#Oto5y+?g7%V8kQ;F6_0%yURT z8yXRytLR6y$RkW*OjGGqvu0g6FeNv+U|@3Z>II{!{if+J7&da`@IoK%cy?8FZE0z( zS_|>-;a7~Po--)7D&J?8*5&6;7@lVuEb$mIVBiST7{qG&!6ikN!$t-xEf0~|UG1lM zxm2<85F;WXSEmMLxi*h@{W5#!omM<&6dk>3jLgWy;iFAiQJMT<)2ygWN_JYuVdaI# zlfw_tLy4NVrNFUSSWPz4ZaZy2>|Z;*P)&SHAwR>sQ?k1=Jy&JhZ^m#IY!U`~LQaJ{Ti9*=7^Q>{l)CEzZlIE$J``U4_zNlG+0|3}2}ji^t9e@suK+ zTS9V|B`FqzSR-Z|cF7>TGp~O|`LK|eMz!^xYE&&7tMwF>1)J`utJad#TV-&IatejZ zR$gKg7)2`&<0WMiI;Z0qU3>Q3ct%sAsp1*QGl+{WjHZU4jAx|0sIJfbOhvb>)Hfne zIHWw+VMM}2549mZgy&l&pm#pCbe(I}-Tj7V#mn?+@2Rz2HRUzj>Agc!s^OVb-R2AOF5-UXlhPIi7Dt^B+_$%#FCu_Uyj^H38z_aNgZ7Do^z`33s zS+K%ZfXM1{d9&Fx9p7f1%0!>3ExwWk=B2FWzH+hV_f7Qp^>5n(<|u!H?MfDwv$8yr z|6jE6u0X&Qd_kKOmP0yPtv0y9D`RU*9$`R>#VsvMX{KxXeooR6jali!BkTSKe(52a z9(~SUe$hm9nTjscDepkbmdWH%%+wJRN0vd8Wp-(d-0)!FO#yRF&YTjD-)4X6=Qf+) zGsow%tz2pM`eu-3`{nQLq|NvG?6J+@d~6&Q>1etZ5fQIcnS|IF>GcXTt?o$mOdDf5z}RVt*!Phs z3h64}e)FpqWta#vq%d=nwVp^kgiRM*m<{sfj>+dL4>RZcWIe?BJ)#t~4XCr+v8?%z zrR)q1zg$W7_&IoDO%p^^8e;TBWMHo3zsYb81;z?eEBOy8%z&qs^JbXmqU>3Up=rex zYfozf^w;q}u&hb65V5SGXO6taZf~&^Dsm872=-8M#jx^Pdmtep-fs+Wc{Qz1 zWq$Q=!!y9`^^`a>dKYFSC23C&=vy$*<#7hQ9*@iI?c1Ze?Avay*KGunM&$HzIg-3s zJ0ux^BvnX~ApR)A8SmG15-I}*dEhv-h~MP#70axT(K{1P(5dR05j z(1-0{mYT^F-(Jpfunuxqu4wjesf6H@Pp-TtvvywXlRwQZ?KNb`PYENa?a1Q{^OrYV zR&;S~&ea*E=_S?VEBUa$g?#9Q;%<4eij`>jL>I=J29K7ja8V4{;b!ZlfPahe@y8rQV3=}Zo1PnJ~B++MKq<# zLq2*$Bi+@6JlH05jL9ulN83#AuT^ff^0&+5H6rZn`n0xwN{OE3Oe@iSxJAZ1ROw1~ z>sdt%Evn%L#Q+)DWt)0RY}P0-q1T+DH?FL?d7e{v;Vi349XdRXF;r?x8q~e*e;^pcq3y-xAw+e zcl<2ODR%P1Qf4lX|2FD-TboM@GfTm(PPt?uVb+Km?OblVtDN@Z`S?B za?@{rW{#Sto%4*1LA1wh)5dFqnPKr6?qG{%7p(-_HG^p+kHZywH~50h?J+qDCMkH_ zp!AwA*yNDa$a##(<}kt>(>cwT$Sfb7 z>+yQLeG7Z^&m3GHFsi*~$)G-kskwQE$LGl#U7k@jr@VL7pv1(Aky#@aRUo_6##3lL z-{Wyzs^_-_O&?YO}2 zc0s3O0%TUOYjvK8+QfU5G|bS#%nv!`Q{?G|Jl{c{Y-_c-v>B82HDXSPghm?}Mn-3Z z|548&myIXTm{FV#gS5YkUTDe%cisBbRb@JhM<%7%90O+FIm2p9t0TK&+4)P`f&U?m zaXtc{@qhWO_FNJTaK?>SGKr4l`~>WDY!48rE@g<6QgpS+0yf2-mtfV8UeJL#4duo@ zMV>xW#WVv3_ zUDBZP();BmzG^o;3t4ci|Ep(7`9-DI22a{d_hOIV{+8Oe)3d5rdmhFznz9EgsxBU! z_N>@sO#Ij((vs#by~T*tv2J z70}9EwZO0*rOqOSi|UcWDW++VT~R%>@@nlv$)(dNB{5kB{}`4-k&tqUMR^SgdWuz5`G+~L+-Knm zono|bA9?)@V=$*g5^kq}*H6EFwG@zjWjfRSbr{)BWyB-GDC{SP0-4>eI?Gx|%Bkv2 zne;rXf6mD%i(2m|4bv(odxHLfVb^U}Nw!xp3DL=phcZVR%g$e`G_C5-_Bm1%M#hlJ zQqr46DAHLHWw;}fgzHr6t=iuWH{VOinO6*chX3)()fF|>mF#=8yy4IpI1x$kPfj4c z5(KNme1+P9>Qp;WwYX9~Uyau?)QX@Ml;@W#OQ{T`6%JDq?lB88l_`o@W2()ZanD4V zd!KaojLEl7aN@NrU=5IGRPDI>9nyez)Q_tj1xh8ne-l2GcE@^UE9k63X3A<2){sFQ-G3P7I>YBPw9j!rjU9m@qtb6& zsg6P#IwEOczg}wOGQ@8T_xgO^;fDV&iK*V}uT^D!x4hJunAJ-*w#f*l#r8Cs9T>-H zgx0Fq{EDap9VTd*@B#lcJY{ZeNr^qSB16|7xagXT@}a=jLt{Z)um?3q{zX?`H1XQ; zxi*fm$h+v;iNzdW!9NbNC>kvXS@6y%7D?5)2dpt zMlXqs15+wW-AR3WHd4Db#jqcsnE){fY^XWboQZ0^uapHU?AD>8f?UxclCjH!oTeW;*Ykf2g*3$|TWPF0mz8%0G7@eRvp zrPiU79Nd_1b&-ni(wJR%I8|8{tY(|mYvzdTX1mX$Z)S4Ec7h#Dy`~aqUzFw;nGsdn2PsRxSNxSqf7j$BaRylii@U<@IS#)=7 zOL(O5r2dAC#^5e9K$gvmoc|-NK|$ybMr*wSN%jxzhJUVU+JAR#=Ddli9#%a15__ll z9%S2cz^||KCIyUOoVIA$0%nN}Gr503aEw1d9c7-m$(}$RnQ>D?-cvL#lV>%X=+54+ zJp8u(oRzAj>!o?M*M8-pxVS{&NYDQ|D(CDs{9GB8d&GxQ{$Z7mdq$8WIZUwynTw)-aikE-6Px~;c}2YS9ZiB9+^n;;w2)3Hg~1XjEI zfpbh%#JCyf8bga$j9vaE@`Wv9q%(;Oz|JL@#chn0>WRXRPnF;6GECRw61wpkDsSk@Y*U#mMTwho#5VgQrBm4}aKYs7r;OF8~t3KG)3mIwgdEG{kVoYYIpo1+< z#S~22vkO$W`=aPhEB2bAJpWR>jF+<`F(Rv93!i;~a~9DWDvFH4Oz{L1J3F_(NIgc} z#Lh!|8J30Oq#eHxRbkh|5xcLvv(HIXwPyPn8j-oSu=CM5Ke5f7WwPBT{ljUc2nj8y zP%uOiDfE}rCUh$rZ26&)YEmaPhBUB6F_~P?p#kYFI?JJ77NxE#cc<`1HzkgZbdYNW z?MF^P_JqzGt*8iDg{am4ulqtZhKYnk2Ykgdkt6N?0?tN;rK}6lZ%044JL1Qj|BM56 z&^B?#foHBS-uC2E9Hk&2!=lZ=~hJ}Yh{zWLcB9xGn2qHx3@8Sy-;5upgxp+vJx_Oiw~lusv} zfJ>qv7g&ed7sT95@9$G{%s%I>!#{fcGf@q-d7hXTvSar3Cq!2CretN#K_YQhy39pb z=?$F5Dx)?B&Y=~3CDv$_ojD8nzBOm@EWh_BWtFI`?CwyzTs7-ggW{p&lFWqDZ)$Ic7$vl$JAXX%1(T!_Doj;u(slvyIl zF8xs4^{Q-Ex|G>UZ-~1(CjG~e^v^=~s|c>rDJvU0>(rK~(&^<@`d;;%(7Ad<1!gOL zvCl2IoTuR*sU_ zYNKY=^z`BBb5~znmY<><{MGf-dZ@Qq~z9{ z3I_}s1tzI}Vra>-45R^=ASDLmR$zNC7thR#O^lH!O@tNk$a+!^W>-)Eg^ zJa0YQ%4)F{$*nu{fRge;X2O-&zckFOTr8a5`(xD@!Ub4k8jG(sjXxVE@4A#l#i7te zs4Px>^S`&ipE}l{8A?Og}^kUvhHIvQa&&=GU<^yP7>?g%z3c)pb47kt+4f_lV)h9>4*y zbJi@C2nQ^>nX~2;Ug=2&NUrW)S@C4Z@!b7IP1DgjMGLoIB!RvL!7z(5E>OCu_*Gyeh)H}C-wpM+mkujhw1sOdV zJp!3&SvfwVpU2-jJs~qWBZC2wr+3wW-X)`R^C}Y(#`P;5%Q9zcuV@*%?mCxLaMmPs zn#@vP;-Sdw!Oqc=IN@k==u=+sklk;X9*r4K7_qviw#LPbVlZDmYPUIp_c>*6l~^vd z8&JDhap0MH;Xv2ay;H(+$YBGh6+>-$DucK@_U9ju|O#9G7lqpUP?wq0b5mKIq$TqS99 z4WHjNGr{wdpLh~xaDE#Hk9do|?=byYoC5dVd>?^zZMWN-5d2*0k>GU)`*M#*yT%o4 z_5{!bN5B2bBP%RK|FVX*3j@>=PSv`?0#t}?i0qx;qxQ+FKYOBv|9RC^tQCwe(LSi( z&<;c_!8=Nul3xb-q1A9{I}Jr+x|KhO<-zuU7|kGdaip*(v?GnDZ&P+ga^sXGK7UK) zCSIr`CbzK9+{r*Uc>Rw_R@&R*8vj}`#oA{PSx(Ah;(B7~b9t7UnUAgm>Sks>W)bog z!h_7@Ua9`gtTUd>YI55v2?Ya(_nY;OEJdE`^V)v;il6xqe|%nF?WC+awo^CN#ZyM) zyrXRktw&xbWwr7be%?#jocc4Eb{l7y)aje>lV#Q5(9mdK#aU&!U?ei6W^UvMYorcs zGBdA^#>PDexicJk=byK_hN*b_u$x)CGu`W-yLy=X)V&tCVBado`v zW5=+e?1`$Sqx17eFRgk)${71e%hKxV>dZ|4=uvfb{!DZ9D3L!EC9-aux@l8&BjoQu zpS-HEAND0QNVaBatl6lLZzC+!9x}|H69&v*qE6W<9yC~D)RMPpJe8U<=kj04`=1o% z^vZfl-U(NfEjjamzJz>WxQc~HW?_SpaP|zgc8WQ`HSz$O?}zHI+V(BJLD+-@*Jux@ zEJ(yz2wSXuRxV*)OEF?Ih-4aWN9=_Bf#1stB>t)`IA=dUl#djJwtD6PTM>B>T{~|{ zA{m9JyYyF>l}Au@u2gK6=PB9QB}9+Xa>II5W-nYlQ9V*P#WIZ;;-j6^==XB=cDHJz zc;d?8O`*!u>8kOvAil`SR#mqJWAK3fr89VF*dj&q5}QN;`||WW z@$(e1RX-G7K^5(%&MxUM2$QD{o?b0oRzVUOU}+~VqnM*dmJaA&Hd3-2ec63e*ai?V zMkL#u1FxR1T)r3qtQxZwB7s^9&{?+;>k}%ZD!AfCWD;K0BJJ$os+~>zf8Bj~fFo6% ze_rLRs-%+cq?1l3=}tPGr0*-8q&uCy?{m(co*8Ba#u)}=E?{6#4naY8QCL*K#S>R> z1w=rU1#cWL)&q4}QCU|+P&`M$bHz!1pYN-xbkfr^1MKgg-%p3?s+X$wzVG|q`@Zk} zFqY@tD=A;64RoZdy)}sw%~UpA-C20kV7joi@ZkkjQDX&(;!|`bGjw4Gs01eln=sZ0 zOz&w52;uaP@6==x1Nr0l;4Gu=pu~AeGHth9b`>SadgCRFujwKhy&#GvcTb{gP`I0z zu)BqU?xyZalVn>@AthaVq@xGL`l#)|Wu3EeQMT9;wf&>uj=+3bM*HhxHmfYgXW%Ho zK&$^OH~|~iz^z1asvc4$cK4HY8>-_q^B}WljUXu2^+=TK>`5eZI!w7_JmrLcLm zyJ&&E3)zoGx3`m4&q3Lbh|CLM>_SM!O}|g@Aiy_864`wtJk!Oq{Uam&aNyu;9(MRD zVA>51vLChxKI^DWoitV@-KVxjYWOE(Bzw*KwRAa{rSE@aB^V#65lFHxO^u+&+y5t* z3$$6A|J0ciB^YTwaH{7iwYbB>PDdeaezdqLAYhfn9Zp<(&OP^nz2791Nu<63f z55wp0$lhJ;tuT?i+q`pr^OCw7+7UfvdcgINMP#mbN1CGK3t|EPFpTk=)b-@+c?{jPM%v*0jTuAwC*_aC$+* zM7;hz6cZ;HNN9OaZPQfbpemJ`&Qa=oP-jCh4fw*!Igm|R=%QyOo7M8K>H4pltR|2o z(PR@Ii=^&S<d2csMEO;BO?IIV>KFwDx$utA#MLq9BX*?vcjYO{rbkIo> zI{u2F$WLiOk=N~T)%P?7WH^=tBCg9!E*#NXj7U2#y54s=%}C zMoWqTTJ<$-QU6=~b2b-ZjT+g?OG$^}=B02sGS7BciGR zPA!s`AiucLm+M(n>7009Or|-$?pASdI*~lRZh(_7z6)@c10c0>^SAkGlO2j6M~SZB zdNa3ww{kfUs!>Jy%p6_kzEq|GvRS{0RS9jUz&6{SAqjdTT-EEn_F8m?6mzcy0G0SEN^Od}**s9zZZju1_9P!$cr zlhok7Xj4+4Pry}gUVt}(rcYKGzv4te%UZAZ@?N98Q}mi}uJ=_9J7xb<)Bxu(&Zx6Q z*|0`8%k&OtwGFtYY|bKa7o{uz<=eVVC-^dNd!^}}i|hO4q1$kvlkKV9=@Lt^>Oy&vN(9!U9=XUnYatPi&glJ zVKDzVuoMH9a(rxhP8u7&FP~ZDM<9nbC*OSv;lFIOPOS$}PpotT8lFLs*eOI%>Z?-S z@lMh>81+et&5&MYACMiv4Lw`W=NgEP_TEX(uPV}CKul=a;-W|oR#h!+=gNo=G$w-4 z`v`3`f!G4w{fyottc_vOB5OG~X<$u#=2`A6l;sZ3svey5J+P<(tM~CNe;lh^Z=&%0 zzr`M`Sb@*jEp{u^I}Z8&XheZcb4`^BunOzT0;F=>of4~na`kBd_i@d>T1uLIva%rp zet~+V<*rAM-^P2m+A44#q$I)6sOs=E%dZ-A%KZW`V#HysWS~Yz(R~C8xk1@AT|28g zd@Y=2jf4AV*B|NwbFv`#JEOt4*P^H2t?l3PkstH?#x+9aHAlrdX$}c@U$+2MF!QPm z$N8@q6WRLuEM+)W#{+%R#^V{o_joT<-9MFIs`j;)`bzs%QaFZERongRLo229u3GvG z&wZqFXM`=%kpf#tBWBcjHh7!Hh{|MW#%dfUF=y6O$S*zVqIKM3@gM}(yPFfZ8)?YD z`Z996f#!SY3tWD4vh;G+*oe&^tg%v}H)~DMZyOmyZ&lqS^;XqQ{+jFd)l%xMTKWu+ z(OIp>{26~&)dteL|DfW4=Upc(K0^6a8QBfV5`G`Yxt2i-GwFx?S4E8$X1wUTlYK*>#fh#Mu0#$f5?DAxF23;Vi5G39Ty*hL%xX<~;= zI8h}JVaA>69)wK=yGBKSQ@zz8Ps&PFPB9545$AL&<`X9@R-CD#$Zt(?o8SHd3#u0JI#E zY}KM-DR0pKMt_}4%KsZj(?bNIm(351t;^;=C)wF|HQuXFqBByoy3yv>wKi|&ZBC%g zKD6nhyo)r^D7$mXaBy}+Emv)rT)+JUj<%Y6oZ5Zj&DVi@&e1Jb41*zT9-re)UvX9b zdmOZWy5`||1UR(z0LM2~I9LPVU{r2+3~)dcE7i?WYdh(4&S*jAmFeNcpQx_FAMwp^ z_^f=qfvC~v%+IMUpr-Glg~xddpN2orYcUJj!D5`JV>eljUE?b3zDq|gwo&v*lpS{a zsc{RAnrcRi-$jdnqfYA)c#8g2Ff1VnTw@sFHtoN`EO57k4;F?s5CVpQLIc_s<23o?(^13?d{V1LB4xv z@SyMi$+7)8*2>3o2w9DespCoe!3vy~=rSKqLhP@wY9f4cJdMs_MDzIS_Z*W?Vz_`w z7h$ria2+zw^poEeE1#V!*kl(^%z^Pzqz`O8+KM35l6b2r9Y}6G-d=p51MReKaXnk*_CYK5jB9hzpcQ#FI?eKEoV7uv-1v+HdWm1%u znQUe_s7OPSBXiN#iSy^Wo$MRTA^kgjc!XfoU;&Ij(qL3+DXrfw;K@ZXCM7FX9SQYV z9(Bcjd+U)bNW%y76LW{O`MiHA4>tLNAB@kb>k;Sw^@G;Y%H#r($~34|5zXPFK)na` zqo`sis=QVsNCMkVAw!rwq2YafXymUnn?6+AlmCXOz)@3?#*oi%i%VjUGmieufeq9$ z-zc7J9j?!JLp}9`j3dAp2xt$_Mg=Gc9`yKWZ6+rRKOo9fxX$opzzNDk@>A(d=9Gz( zy(`f}ZgV*Cddy_>4*MXx8tc?fbXA{GJmCTca@7U|XJgucMmep4_Hi7$j!S?CYdeA5 zIk2J2XLKQJ3#KBTtLu!9xWx+Xo~B9mxyR)S zN&0?o4*6EGmisQ4z2HW{Xt!-~o387H1AbO=8V6menKq0(_LiRx?whHsWG~q$9kw%X z1o932h3f<(+Jx_!8@-~rQw6t`1m)b+n)UjM!gf4v{a1%OViTE9ZjW>$kotk;3p0X4 zhDL==%(2I3>zXZ+P+@P9Wu$*=oloB;JFu$rd=9V=jV~~ay56Qt98jzt?Lb8dmMw}r zvo8Yz*68&nyxzN2K)m*ja2FQo1Irhsi-7!wUPyR<%E2+Z0FJ}}&O)jToPU`BI4V_x zjj5mfm)+p6%W*);V0CjKg0@0mbgK6;rc@9@{LV%ogxTdUZrqd=f!1=UWIFM*980ehM%Lr? z0%`YapjKBj0%ag%k8aXI-SkNU_5J}B)PY~A_2NtWb|NH`2#N5T|S(&B4r{<@o^Wj!C8wu-%o7UgxSi=oC>wRw)Bx+K2bZ+Up z!0fPHDcgjrtrkpFhg^{7hxy6v2PgN9I`c2sadtdmD1gDE-6%kjr8@OUGJSb?eIyx> zB#=myL%$v}Ux>{*|g zCVqd%Jg5Fz`&h@n(J0BG!lwY&L}5hO0JxkqzgJVk%$as9zEtLk%Dqi;;fT5V-mrh5 z=NB|Sg|qf|G)^ved;zrWZ&Z-CNXjQqw_kzUXwU%4;~KT`bKZkYm_G%S;1|~5Xw~h7 z;Q*P5IT)oJ@;kR)3XY+KaZMiJ`xNbhYE5Y2_Zl9)qEXbp3U{G}D4lT;^I{BEwgu2r ztfiHW!j0SwiIW7jBmGOrfrnYqviRD;rI|xmBw+A_v&J7}xzQTVrR83?hjnaDGaDyy zFN|C;n_vU`#~IiOY_Li19x4!AUj)1_a(EvQK5BUFN#z#d z^HV36CWOziucLQXVePN<-^;(I{Zeg9{Tk91Ch#GSec;Be@8Trb6^SjFIQS8en{pLS z*+Dp-y4BN3WFS82!!!Z@y!Fx?_X3%iyRP0_A&9cs?2x&NYMoJ*O_iE}lRCJOwsv*+ z`YY`XL&FJT4zTgVPR2eH8y-#AE7Y+hxB%~8wb^oRf#d~ns*Lvc1bdPR_{GAW#8oi# zV%OXXZY)4jqZT2Z=2@?KFf=w!I-n0;Nqpmk1&U6QhaGl$YVU9ubP<%Xeq`^&U`Tg8Ffu$q8|t9cJi9v+7#R&_c274qPw&nIM@Isg-LuU{mS#fzxxm!& z@>C$#ADUSrTJFZ>9}!cb&uR#(FxP=WROvX!E~}Cqw~3Y3W*lhx6N}x-qv61sST03D zTJ%^xX2zOQ^DiK}9^LF09-|S{XpF{f^6*ksXSHJUfvu361TbJQYm2TAAPsX@D%u$^ z3zgzF9H&}8va5g7!A!$==RYxShXfBQ`W^4@nHi{U>kV~=9FpDEvSBFRdrq!n-Aw%L z4q26HC>nuIg6`Gutm-n=^O;Jt1DTA@6KZfEZKrkydS-IpSiP1nNo-2j=B7sbL03id zC-VG!AoejjG4*5ifwNOnS4#Dnk+6s)1A(xkFO|YFY?hqn`_%*za!ypt4>EJxmXYz@ zPzK{X8(b*QO@#8haR@XpGE9?*HR`W<6D1K?49Bc5$vBUlB^=cUC1sl(Ov-i;wXc0k zk*)!QXQ{BVcUPbQi5nn~U4w}VNr*-{UigzRhSpRMIYc4zHdI__iKDd5npENXfJAuC zYz-ORTaIFWImDf^(SASaeICMkE=y>a^<+I#xFf=aw$8>t@#GATI$|Do*2DixnUq4A86oL zY2@%kUACvm{*oDI%$TggF{8j@!&{Q07mXjO&9*dWYiqO3Em;f>BMDnD0iGIl9`|J2 zt0xK>hu;C0gBS8&CyA2Vtwtgrv8gIY`8Gn*wE2Ca@E$vP%Ji- zP_L0pq%ISUX6ouPQaHQVnO*YvraQCC-CfJs&gp9Ja&~BU=KMf+eY7hO=!({Z8yjKO z&B0Jhb#+T9)C`g!a19UnNAOXh^-N9}mPEUeMu(rxOBu$<(cG|~qk`Vx(Ob#;A>iM~2O(+T{oKPC0_T6F4}#uO(&0GHPXWDQU49jQ~PXJlmW>Hxye>fu?2P&~8PmbYY7on!zCRXf zsP@>;!fBNC%!M=CSLkZj@_2mginZt}(6)I^x=L$i8(=&QUG4Urg04=krmNT6s%**a z6OCt}tDnb;Y8cwOCS6^?2&Dke>FMe*(AD3oXLGZM?GCf~xJeCp2X>IRslLJUJ}H%ILzMx@5)Q6@l}q$ zgFTyE$JWnU~a}WSo~!L?(UZ!B@!jfVPOCqmnS*8R2aKW?tgOI|IA~FvIrF3ZRb8$rA=i|ob;fn$W%v6I))DJW_kXvMQ;9bH-- zpt8mt{0)vnUdIkzN0;Cz!m&1_DZLKBL9{a=tga)a)q$B~oZ@frQQ)mC@m5l>bkZF4 z;EgbUgO34!gJ!E$NI~Cv4>*QsVFO!l_!RI>Izs4KPRiv>!j{f0c)${WYHX1bkIha^ z&5?)ex-oAxG`a!{)Ya8sF$uE&{+V1&BAA<<%>@%RxtWE^pwl1q`QmY(FY0#&D>1|d zdtr;=NklDUbbQd*t#AXRx~uR+-|5X*D=i+2(R!ZbX!UyU@jl&*NS%PK!m?oX<{$T> zj$mO68wTu+Y8{{uoCjQyqsUp2J6u}ea7hZmZm*>D&(!-N0E>b}vbDOI5F2P1oUR+} zxQ-eL+MH5rPouZeXLp%pt2@*Y8t$l1CL`WDYOxyF$^mMu1KvR*ytjCb;+t(w z>wq`v#lIUD?(sHx8w8jAA=&EvG)kJhHyor<37~(ys28)vSCXx#7HH(jdceAp%95mx zjMj#FZV)l%mU|k~^%Xc6$<$<(%~pF=RduMQ+U>U4QrM}XT9-cU^}fv_25TBJ;mF|q z6@faZB$|wHh7WqG0;1@WZk3%3o$WV^IFqclH;P4!lTA0UaljNHojzfm>nlp-9#t+N ziGap{ry6^(alFyx6StUMwl*7Z+#4QfOpG=K!{J~c9KJu4X{>1q%NJOX>eM8g#{=o6 zcyHZI$ma_NeZCMd;Q@wg&?=}Z@Cz^0@(~3d&heP`-B{&)%4;(2N5JJhRaV=t3BPqX zg#1sU6J7>kKt6)`NsxmoYOtS5nkmAbDe1=p9mA1ouge|mZ|}&4DqUV*WVo{`*5E~E z;VN%!t+%Q+bboDkZ4IQU24A2f6zT~0qO!f&j|Ar9RhR|%f7Pw-%1Sr=NBH!jX9F5k z1QVB7xSXX+SgO2I3dTX;(9~Bq*aRFlk2QLHwoSmSqM`F19}M{jL|+J$=>NgJ$k4?uH#{Y5qrHeUYQ-tX#knz24B&Lh9X820jMF9A8Sn~4?QOBY4jLo(YQZa;4-JcL$nAxvV%ttc%xCUc_1N>Mpy z7w-Wk$%kF_5r@6P@C3BY{2XW+iP9F0tDcvsalL8njv@5xY=Y)&h%z5qxyL9a0ohMb zcLhoEC&RY7_6Zy+T2mZ$#pxl%#vTb??qyK3yGt4YB>={R{WvFudH}AXC$#>`@;qP1E0yzFMz}Je}T+1|6GJ@P(05@ap9POQi zwqwxqew~`ir)9@6MdF2c5q0MZvxo|^Xk%0ANQ*i%TK{OS)0lmf>fpu_t|HwUI`by;84y$C24|U3V?3g`12TNwJ|CD+ zeTl--7KB)g8JWq=ZfY^REzFV=!M;|A6E>S*YTt&sY>Gmk zZ0|7P+X=A(?5&u?i*9p^Nw6}bacRkj76puZ2YBo2gqv{s(E`UJF$@}^D!Eg~AK^b5 z<>!Gw+@u(#kRa_&|{kyIz-fgBI_IS^x zKZjjLPnJHq3OAW6eOD#y;z5VhE8B15*Ml|(3e>9|Cqg-H)7Eg2QZ2$F;5KWbEz&4@Fy8oR8>+G?`?{o58yCgPXxWzVX$ zkPWxg$CB)*>?TDq=I;^gwh_fCu+2t?ZL`yYN=)#ML?!eWGT~oXli{d3KUY(K7&q)V z)ES*-P1?;yiM~m(-ohu@!!$Qn)9_DHlHLvcn_lf^gN$!LC;z}c#Lm^`GpAU(N(w6l zG!RZ0iu4DVEa)#T*iZFuEW%l!vT7ZFKV2O+uBt=*M)>q;z7gOJQPwWLNfjOw9zawj z`bRt*Ef*bD?nW-m5yZU?WDX)&dEPQ@?*~7MZ;0a~@rchku95cf3HX`XOsn7Dnn9Kj zdp-VVcRo{{K5J}jd)nu%hXrt9ZhjH$Zat|RATMz4L9LMk0i!IIC5K4Wek($_Q9e9P7r7~o)oSLftzmQh>KF^8ih zdJu1`;Sp(<$+TiV4#apT3Wv$}WMIXQNiU%)BqX!=J;0Wrc zC|=T5k-ZS8A=<_v}!@`WrD3=+H7!jJY^;L{u^U#D#I^wj#+QIXa!8DFPu^FYW0k$bu3e3)K*07955rn%cWo zAj95*qxk^YbbDWg$ypr&WUjuBsr@rC5MzL5Y}_(2xUs^~Hk_CQXdGNXHU`M10U6>B zX-stfghoAD%tXW)9C&8JSMCU1F%U+s`W-;_f-E_3@$s!eU3DvU@r`69JQZjrgLwB! zHAtW6kWm*t7rx5R_fe$3!;yx3yIu}A{Hem**bj}}#6O;X#(r^Z&uBP2x(Aj$#n!CO z*k)`om!4_H-h~#vrnYd}8T;rdW-K!GW9|zrr<$?Eo&U$FxbuEdB#zg078i6B=Nv-Z z%eZrV&EC$xhMWWVR&n>3J*!IoN9Cc5HNLRwxr~!m<(w5+pou-t-T{tKKYL7FSR{XN zS%OTEe5n;n(3)}BtSCuQ5^9*sng6+7fDB1gz@FDc4Rp-zDZM9tt`qIurnd(vZdG4S z?j2Z!a&hK`RnP4CQrWHao~RT02lT;eR@-A{*#yJCbj>xA zP0s(;Bw6$yEXZGDw@Gh&qhzzso6N^-PO(9B+K!o#c6>~6NmpGZxs)-{ewv?XjCuk0 zXbIf35vf_hXz}w%YYsEb5Sk!s{Oqezt+Z^J03t|1*?12){P`D)aB9U`$G~yFOhC)} zKqC!ovlxf^Ot@yQSAjFM0KBk1wS~;-yS?x#x<{DrU{A0^m~ULN#Off|nBvOA+9-Qw zZoAAp@MzoyH(?Kx9M3u=!3`hFt+48P7%opZrAO_GVtxlqUMQqOSIX(x1<@Ep^3HA;fCqQ4;R;Y#ig>=f95Xn*k3*E_i2x^m>Ue)wqf zj`?ShZbDM3TWi@}JO<l{0Zkt9Q67-)i6NE@8NA7ZXl*AP`P>;>O+aG+r*(bCYl zePm#{6B^C8+iPWG<%}pZr$1bI0dnD;BiYTF#&buaqrneGlaX{N&>m=yhP&i|)OFTC z>qN%aHXCnW=xkqTZyxFK-Q*eRtdPa3?%s6u0jtd>Sxo6@;=G!g54d|`;Wj_p?5vIX z>KhYLXDulMVYzr8dkGpDIMUFptkAUDoP{s?>+vv^c3Wdp#4dI0nw;I;#lZ0` zO0vc5iP5j=6n`f# znyTA3CObxJk*rR%`<%_SzK~BT`;kq`wUA3fPRV7H!rrR-TBi%M2R4190XT3&^R8iG z#UBT*qI9YkIQ}Rd2??13)KN_Z#DxvgFK~2lEtBeD$Sok+B7@7Jh21vyraisAd)_ps z|IO`NY;9fKm+L=op|y44y#C~D0uD9C#B4G<+W_C_E=e}pBH>WjL5?*J+(c|f8O09P zFe{bTL^V#euX}BO|7+K^>wkAQPqx{DL0j8ob1YRu$^AT4HStPYO^vNGUIQN9VVC^r zn88%m_X+g`23^)u1)J5qPf%1Atsz=IWoL4e0SR^-OzdCvLn4J0Nh`Iu3fh_j-d~{JR&jFS2{Eo7nk$PXd70mCE#HrM9TY zT4!46-!%QJZ*H!~f9$5Fsn*uXrbb++n%46&loM?jFLW9nGG3!g_PQ)YWPz)DM>siE zBvY7fUiyiucbtOu;bG%|x)YO@r1U%Es^_z5OT%H|r5E;JASzre%0CV*d93saP?5s# z!@xhtSCVQg-9FXbN!_A=e}eRw>^MU8AK^{tx2W?iJS^ch0{u3AtDV9PizE9C>W9=! z^_OJ3I!#L}J2esh?@J1o3svdxh$0Qk%EhFby;!}H1KEuZ9M7cOftF^I=g@ z7_%tSW}r-3hlrB=O9>D-X~N8 z=g{(VE{zzY9w0bVe!8t# z;m<4U|a``r8u7g(joW$(W~Q8tKcip-wxG>~eOwc3z?kQD+Clret@8?2x3!$k2E^ zJTVw)lq84j>h5V0$>9()C2PnEbwVBHEXF895NAmGa+FH zzWVyUC~k=MG+-Qd8N<-*)67q$pcK<48s7jp8=x`|D+41SylZ8+w8D|L~?D@ld<(9v-^@4B2OnYH{&}xaniz0(PcJcPK<Vl36#J1&}BBAmygljn_3?n!qp=f>8BhqE06 zAR=}LLJ>x;&cC0!w-PO`-%DNF2a}WC9&fCvuDPitB4g0%M*5=3<#cMS-Q$fnhg-X< zV{W9k6}z@&m{pMiL^cAFa%kliD-POj;oYExN&S?gwe|@ji5NNJW-u|!_XxMn&EW9V zc>;D`y+XM!ue)S;_>y(E+(9khp~${>WQVdt?@%OwP$wyAhYaHFcIiHFT`7Z7STzF*98qG1qnwm;hpag`d+3_tY}6X9&3hi;iYs)z-LC; z*^1NheGxG@9Yf=O|8SNQC&ux7yO3$_K6f^X<8q;`$lpl;OY{rQ7;#vCCsn$Tw;|FS zi}ptO5itBPnCgn@&@qrMl z|H?5b98);2jYW5wHN+JxL)|0VJnwyjM%~S*^b=;x^^AI$(0>% zHGT3GITge;*WzdpeJig*?unV3p>fA{^?pnYcxzm$AMhOB;?3!!vsq zf;{Yf$-Z7DaGUm>Mk}jrjWzn?QISd3acJ5)=dG1BHI>m?9o%)J11iXJkj9CuH#jDjRFTk=n8wkjYRzs}-j|Lobd;j&Z$z#Ti8V zed(;>MAslGpW?{Q69IVygy)HsN1*&(KAHI?59b15AI$>183W-|&oR_nh3EFE+cjGF z{LaMEs(#J9>Y1{XmBaCCT3qZ}y5_H1xyNwp$92s@nGa5m4GJ!tDuP5H*uEPOGyXMX z04aS2oa7yF2-qRdZEy|*=V{Ms(hWR1?oJ}>e%bLfo#nI*XF0v+y*SGWR1H%?k$G?= zdoSTIe=f%)zgMK+vs(m&BVdU0$62M-ZWoOfA%EcalCp*JlA)W37Qc}QIc{-*O+mA) z!UJcSTMXldh%hW!O_7@^ z@-g6HF3DJ}H{1Y2J=lyJZUo!XYqQ;OgUyB$u&p=VV0BN>4gG);6lg!Z4R7NaA7U&s zhCcQyAbJmYAN;6)BaG|0 zn!qd;N4n%r(?xELv)NozIcEqyL_r% za4In9o2=lZ0L44`b)#yYpDHQZpjzsO4KDV5;cU`KX|tqEgKow?h1q8{TP&Mx$XxV? zKgbHKl6Di@;8YZ6{(h6)B};Qor{&H&tWISf2VfYVm)y>39Ekor+=I~n2wHeVZ2|K< z5mWa@&<^5~dF8rof!%4coAUQj#cU&Piw(Lg!t;_x@hiMiS)5l~mOJjG_Cy)uhd3kl z6X9xj#kQWR7i8^3AN1*@4=&gmr~JYTG8}Hrl9_D`h4S|yvYrp?`E)%di&>}3eBc01 zL(IZC$M`{KO{m6oyU_%9sN%u_4A?i>f9No%{lt=6tCy=M6#KkcvgGd%h1dp*1RJj+ zzxX1Moac$T*m*Rz$4tiCU1-<&K_hZPW|ay8$mPt6@+v4mwV_|U3hNhsr+hq%bFIj@ z7v~pImDLD2At|>|i?=}KNYPigy;+g|oBpyl&=rY!|C#svWH;w+kIj;Z=SZ@( ztFER>AIEzXxNo5jk*+zXzhS$;Qrc*s~QOw_UL#uZhv_=&Puuhblb0RQ$@xC|*H+2tA>}O-q_shU%p}`tyXqp{fRvuC&lX!N1H_A_skK4F&Fm#) zf`gAdW>PA}H@Fg##E?Wa|7AW&`6Rj7Aw!S_E8&nop?Y@p3R8$){vpK@!7H?AOeC{E zDa^p368ALaly*x_5l(p0YeZ+v=CUp=St@M8l*1NX1ibeDl_YkrNt+oPz zud+m&2)V71dVAygTo1o*X-KRefW0RrJF1&%grW3?4j%H}(R$*`=}jG=0Ehw-t`=@V zrXDSV3v-@_w_(8{;|wQt(OATokzFR?DYM5jp+s?f`Ipe0@sM8Jpxq~)!qETo)Q9GAu>OE=qU3h?erg%5+i*^&<|7>C1l&E?N?MfZECbktw1B zC;Geyg8-f%Q=}P(OGG{^mKH1QGgJoSS^iJ93VXf1!j}INe@2fG4JCZqxPv%3X#J4m zL!(Y~yp3-3Lgy%u_`5g?<%3Wo4BH*-=$CL9_G47-n}28m8*ODz>Pbh1`)oLvDk(co zgt4h3BM@X|I&n72{c%?GU_MeY6`O;0iTpGvCl3U*9zC0v#|PyYt6x0apSy70-&|3> zqhGY*s845=wPiY0XA=4kF2=k3Lb|sv1w&bNm(+h?cIH6VE|2eNwUTig%+bO**DQ%1 zar!|2o@jr}X1DcBP4(apCMz&7hEYhvs~S8HjlznErP}pM3e-VPfks;5NNi(?;>1^^ z7nL26o?xgatU#XygdlTqWvaQc*XNKi48jMgEdSd?rnWYdU~c{dE1x1Y)kKX*fb>I5 z1)RCqT=<*tbHqH6S49RsjEHwauC`UtSJ<#rtsG5cXtB7B4aX;xMF&9?IrB@0v^cJm z7RSZzRbz_gCW|peemg9dHDNuZDF2pIzd`mC*d@ zr+$MByt1ENz~3;=6{EVU{Hvn6$`Qb#WVa)9C;iUH#A)#@~*=6r)2>u zBd6d>i9RGU3Ap)s{N}riX0!1wv)|L~skVHC9zJ5J_EcSa?R7qhHCh~2tHZ(?iM-~2 zpg!e)Ao(u&-S2)!GQNu(%eg;Y9+I-eh$3G=Gtx?pXstkya$ zJg3#F)KpZ|D8+gUZx+mj9|&71;=@=H@o_FsW+j-_j7tWg@V)$tI9cQqcn`VbtR-)j z-ovEPqW}9B%I(zXJ*d`z{DA9G!^crWLio7h&5&6w!o#11cU)l@Sy#R*?KLc)#rnW}00W14tuXbW?s7aF)N5yj>k8)^uHo>N^<$qGHbn1j zFe*~8Ao_bG;vaqYq)=sO6V6{f zve=6Np-`obEGyh3R26=H@_T2Mzh^AGJ^#Gn4h^5cMrnLdDF#Zh^cr5r^ZeSyv5|jX zZTEF59tMg~9A{OAQO2Pyt=Ys=gs~#D2A^OyT%gX^()Zd{&sQC5GI@iRjuyYyxq_&Q z?MuCO$tT%$tncw|t1z1@wo#7_?+-Go`0%6Z(^NT=@#MH z0{MCl3i>m$=;3(?&kyn6f5!DR9~SITO7X~KPzLT3M z!w(CO)Az)OXm&NNcN;~~ji9H^TIYVcs&9RTsNoNwm=@tT9JXws!SENuCOrR^!-kaK z>`Jz2Ee#a3ec&RwVz}Udeh*I#+*iG_@DwWue&P@b9%&-^NANtAdal?%fh3G$;nguRTw#~B3_F_vO?`4Nwu&spo z2zS6a>Nb4Ac$nbAj4G>XGrh`VI_PVDgCcyh7x3&dUZagOEDW50aYt^rH;4|W?MkzI zzr!hRG`oc37VADURB!Sgvkftc%&-GrFR5Q+4eIisHp&NFW^n0_{ciJ>Hm6-U?y{Np zAOzZK-e{Uu``8))Kx1g2NORVdh;%gwWW>twbQIuGZuOYG04qQ?vS!Hybsk2<` z{+a*;bZr6wgtQT8Tf>+CkF$P*dYUWpT~#YUt@{p#K~d6%PC- zJlE0C&_VyD_%q4~i-W>1*=5Fq>PT^exhg&Ih!0h=;7%I|b71b8Xb^b;CAr-y$^N)M zpg-C~yFVuUvZ4~sm{w;z>aD1BIc40{gOT8~mtAh$SUP)k8IjLk;DH@NVx zKxh8zRCH#&N@up(B%uTOa?9upTGZ(b$?gcB%cOZvU|1tR2676`WRhw)I8)}(s$8~8i^&unY_wF_qAlG&VwKf4r_JhcJC#Zt+2kJ_oKvkYuf}8Kt?5{F%(Mh z8*n!m8+BjOeP+wn{YeysBT=O&FBQp$uPZl}%!72Y4=!vr?94OM0>X<*EKiWjOIwmY?^dwearO^}Isq{qUXihbEU!rYQ z*n?xPX&vD5{|7~Y992MU_-)1jva)=u@ThSCG@{gV{t.d(a,{Zo:()=>c,kt:()=>f});var n=t(7294);function o(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function r(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function s(e){for(var a=1;a=0||(o[t]=e[t]);return o}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var i=n.createContext({}),p=function(e){var a=n.useContext(i),t=a;return e&&(t="function"==typeof e?e(a):s(s({},a),e)),t},c=function(e){var a=p(e.components);return n.createElement(i.Provider,{value:a},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},k=n.forwardRef((function(e,a){var t=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=p(t),k=o,f=d["".concat(i,".").concat(k)]||d[k]||u[k]||r;return t?n.createElement(f,s(s({ref:a},c),{},{components:t})):n.createElement(f,s({ref:a},c))}));function f(e,a){var t=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var r=t.length,s=new Array(r);s[0]=k;var l={};for(var i in a)hasOwnProperty.call(a,i)&&(l[i]=a[i]);l.originalType=e,l[d]="string"==typeof e?e:o,s[1]=l;for(var p=2;p{t.r(a),t.d(a,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var n=t(7462),o=(t(7294),t(3905));const r={},s="@produces basics",l={unversionedId:"guides/Guide_21_Produces_Basics",id:"version-0.6.0/guides/Guide_21_Produces_Basics",title:"@produces basics",description:"You can use @produces decorator to produce messages to Kafka topics.",source:"@site/versioned_docs/version-0.6.0/guides/Guide_21_Produces_Basics.md",sourceDirName:"guides",slug:"/guides/Guide_21_Produces_Basics",permalink:"/docs/0.6.0/guides/Guide_21_Produces_Basics",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"@consumes basics",permalink:"/docs/0.6.0/guides/Guide_11_Consumes_Basics"},next:{title:"Defining a partition key",permalink:"/docs/0.6.0/guides/Guide_22_Partition_Keys"}},i={},p=[{value:"Import FastKafka",id:"import-fastkafka",level:2},{value:"Define the structure of the messages",id:"define-the-structure-of-the-messages",level:2},{value:"Create a base FastKafka app",id:"create-a-base-fastkafka-app",level:2},{value:"Create a producer function and decorate it with @produces",id:"create-a-producer-function-and-decorate-it-with-produces",level:2},{value:"Instruct the app to start sending HelloWorld messages",id:"instruct-the-app-to-start-sending-helloworld-messages",level:2},{value:"Final app",id:"final-app",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the message was sent to the Kafka topic",id:"check-if-the-message-was-sent-to-the-kafka-topic",level:2},{value:"Choosing a topic",id:"choosing-a-topic",level:2},{value:"Message data",id:"message-data",level:2}],c={toc:p},d="wrapper";function u(e){let{components:a,...t}=e;return(0,o.kt)(d,(0,n.Z)({},c,t,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"produces-basics"},"@produces basics"),(0,o.kt)("p",null,"You can use ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator to produce messages to Kafka topics."),(0,o.kt)("p",null,"In this guide we will create a simple FastKafka app that will produce\nhello world messages to hello_world topic."),(0,o.kt)("h2",{id:"import-fastkafka"},"Import ",(0,o.kt)("a",{parentName:"h2",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka"))),(0,o.kt)("p",null,"To use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator, frist we need to import the base\nFastKafka app to create our application."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka import FastKafka\n")),(0,o.kt)("h2",{id:"define-the-structure-of-the-messages"},"Define the structure of the messages"),(0,o.kt)("p",null,"Next, you need to define the structure of the messages you want to send\nto the topic using ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"pydantic"),". For the guide\nwe\u2019ll stick to something basic, but you are free to define any complex\nmessage structure you wish in your project, just make sure it can be\nJSON encoded."),(0,o.kt)("p",null,"Let\u2019s import ",(0,o.kt)("inlineCode",{parentName:"p"},"BaseModel")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"Field")," from pydantic and create a simple\n",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class containing one string parameter ",(0,o.kt)("inlineCode",{parentName:"p"},"msg")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from pydantic import BaseModel, Field\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n')),(0,o.kt)("h2",{id:"create-a-base-fastkafka-app"},"Create a base FastKafka app"),(0,o.kt)("p",null,"Now we will create and define a base FastKafka app, replace the\n",(0,o.kt)("inlineCode",{parentName:"p"},"")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"")," with the actual values of your\nKafka bootstrap server"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n\nkafka_brokers = {\n "demo_broker": {\n "url": "",\n "description": "local demo kafka broker",\n "port": "",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n')),(0,o.kt)("h2",{id:"create-a-producer-function-and-decorate-it-with-produces"},"Create a producer function and decorate it with ",(0,o.kt)("inlineCode",{parentName:"h2"},"@produces")),(0,o.kt)("p",null,"Let\u2019s create a producer function that will produce ",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages\nto ",(0,o.kt)("em",{parentName:"p"},"hello_world")," topic:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n")),(0,o.kt)("p",null,"Now you can call your defined function as any normal python function in\nyour code. The side effect of calling the function will be that the\nvalue you are returning will also be sent to a kafka topic."),(0,o.kt)("p",null,"By default, the topic is determined from your function name, the \u201cto","_",'"\nprefix is stripped and what is left over is used as a topic name. I this\ncase, that is ',(0,o.kt)("em",{parentName:"p"},"hello_world"),"."),(0,o.kt)("h2",{id:"instruct-the-app-to-start-sending-helloworld-messages"},"Instruct the app to start sending HelloWorld messages"),(0,o.kt)("p",null,"Let\u2019s use ",(0,o.kt)("inlineCode",{parentName:"p"},"@run_in_background")," decorator to instruct our app to send\nHelloWorld messages to hello_world topic every second."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,o.kt)("h2",{id:"final-app"},"Final app"),(0,o.kt)("p",null,"Your app code should look like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\n\nkafka_brokers = {\n "demo_broker": {\n "url": "",\n "description": "local demo kafka broker",\n "port": "",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,o.kt)("h2",{id:"run-the-app"},"Run the app"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'script_file = "producer_example.py"\ncmd = "fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app"\nmd(\n f"Now we can run the app. Copy the code above in producer_example.py and run it by running\\n```shell\\n{cmd}\\n```"\n)\n')),(0,o.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\n")),(0,o.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.\n")),(0,o.kt)("h2",{id:"check-if-the-message-was-sent-to-the-kafka-topic"},"Check if the message was sent to the Kafka topic"),(0,o.kt)("p",null,'Lets check the topic and see if there is a \u201cHello world!" message in the\nhello_world topic. In your terminal run:'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=\n")),(0,o.kt)("p",null,'You should see the {\u201cmsg": \u201cHello world!"} messages in your topic.'),(0,o.kt)("h2",{id:"choosing-a-topic"},"Choosing a topic"),(0,o.kt)("p",null,"You probably noticed that you didn\u2019t define which topic you are sending\nthe message to, this is because the ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator determines the\ntopic by default from your function name. The decorator will take your\nfunction name and strip the default \u201cto","_",'" prefix from it and use the\nrest as the topic name. In this example case, the topic is\n',(0,o.kt)("em",{parentName:"p"},"hello_world"),"."),(0,o.kt)("p",null,'!!! warn "New topics"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.\n")),(0,o.kt)("p",null,"You can choose your custom prefix by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"prefix")," parameter in\nproduces decorator, like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.produces(prefix="send_to_")\nasync def send_to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n')),(0,o.kt)("p",null,"Also, you can define the topic name completely by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"topic"),"\nin parameter in produces decorator, like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.produces(topic="my_special_topic")\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n')),(0,o.kt)("h2",{id:"message-data"},"Message data"),(0,o.kt)("p",null,"The return value from your function will be translated JSON string and\nthen to bytes and sent to defined Kafka topic. The typing of the return\nvalue is used for generating the documentation for your Kafka app."),(0,o.kt)("p",null,"In this example case, the return value is HelloWorld class which will be\ntranslated into JSON formatted string and then to bytes. The translated\ndata will then be sent to Kafka. In the from of:\n",(0,o.kt)("inlineCode",{parentName:"p"},'b\'{"msg": "Hello world!"}\'')))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0030fd86.cda43b2b.js b/assets/js/0030fd86.cda43b2b.js new file mode 100644 index 0000000..ce9fba7 --- /dev/null +++ b/assets/js/0030fd86.cda43b2b.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1887],{7619:a=>{a.exports=JSON.parse('{"pluginId":"default","version":"0.5.0","label":"0.5.0","banner":"unmaintained","badge":true,"noIndex":false,"className":"docs-version-0.5.0","isLast":false,"docsSidebars":{"tutorialSidebar":[{"type":"link","label":"FastKafka","href":"/docs/0.5.0/","docId":"index"},{"type":"category","label":"Guides","items":[{"type":"category","label":"Writing services","items":[{"type":"link","label":"@consumes basics","href":"/docs/0.5.0/guides/Guide_11_Consumes_Basics","docId":"guides/Guide_11_Consumes_Basics"},{"type":"link","label":"@produces basics","href":"/docs/0.5.0/guides/Guide_21_Produces_Basics","docId":"guides/Guide_21_Produces_Basics"},{"type":"link","label":"Defining a partition key","href":"/docs/0.5.0/guides/Guide_22_Partition_Keys","docId":"guides/Guide_22_Partition_Keys"},{"type":"link","label":"Lifespan Events","href":"/docs/0.5.0/guides/Guide_05_Lifespan_Handler","docId":"guides/Guide_05_Lifespan_Handler"},{"type":"link","label":"Encoding and Decoding Kafka Messages with FastKafka","href":"/docs/0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","docId":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Testing","items":[{"type":"link","label":"Using Redpanda to test FastKafka","href":"/docs/0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka","docId":"guides/Guide_31_Using_redpanda_to_test_fastkafka"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Documentation generation","items":[{"type":"link","label":"Deploy FastKafka docs to GitHub Pages","href":"/docs/0.5.0/guides/Guide_04_Github_Actions_Workflow","docId":"guides/Guide_04_Github_Actions_Workflow"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Deployment","items":[{"type":"link","label":"Deploying FastKafka using Docker","href":"/docs/0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka","docId":"guides/Guide_30_Using_docker_to_deploy_fastkafka"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Benchmarking","items":[{"type":"link","label":"Benchmarking FastKafka app","href":"/docs/0.5.0/guides/Guide_06_Benchmarking_FastKafka","docId":"guides/Guide_06_Benchmarking_FastKafka"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"items":[{"type":"link","label":"FastKafka","href":"/docs/0.5.0/api/fastkafka/","docId":"api/fastkafka/FastKafka"},{"type":"link","label":"KafkaEvent","href":"/docs/0.5.0/api/fastkafka/KafkaEvent","docId":"api/fastkafka/KafkaEvent"},{"items":[{"type":"link","label":"ApacheKafkaBroker","href":"/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker","docId":"api/fastkafka/testing/ApacheKafkaBroker"},{"type":"link","label":"LocalRedpandaBroker","href":"/docs/0.5.0/api/fastkafka/testing/LocalRedpandaBroker","docId":"api/fastkafka/testing/LocalRedpandaBroker"},{"type":"link","label":"Tester","href":"/docs/0.5.0/api/fastkafka/testing/Tester","docId":"api/fastkafka/testing/Tester"}],"label":"testing","type":"category","collapsed":true,"collapsible":true}],"label":"API","type":"category","collapsed":true,"collapsible":true},{"type":"category","label":"CLI","items":[{"type":"link","label":"fastkafka","href":"/docs/0.5.0/cli/fastkafka","docId":"cli/fastkafka"},{"type":"link","label":"run_fastkafka_server_process","href":"/docs/0.5.0/cli/run_fastkafka_server_process","docId":"cli/run_fastkafka_server_process"}],"collapsed":true,"collapsible":true},{"type":"link","label":"Release notes","href":"/docs/0.5.0/CHANGELOG","docId":"CHANGELOG"}]},"docs":{"api/fastkafka/encoder/avsc_to_pydantic":{"id":"api/fastkafka/encoder/avsc_to_pydantic","title":"avsc_to_pydantic","description":"fastkafka.encoder.avsctopydantic {fastkafka.encoder.avsctopydantic}"},"api/fastkafka/FastKafka":{"id":"api/fastkafka/FastKafka","title":"FastKafka","description":"fastkafka.FastKafka {fastkafka.FastKafka}","sidebar":"tutorialSidebar"},"api/fastkafka/KafkaEvent":{"id":"api/fastkafka/KafkaEvent","title":"KafkaEvent","description":"fastkafka.KafkaEvent {fastkafka.KafkaEvent}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/ApacheKafkaBroker":{"id":"api/fastkafka/testing/ApacheKafkaBroker","title":"ApacheKafkaBroker","description":"fastkafka.testing.ApacheKafkaBroker {fastkafka.testing.ApacheKafkaBroker}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/LocalRedpandaBroker":{"id":"api/fastkafka/testing/LocalRedpandaBroker","title":"LocalRedpandaBroker","description":"fastkafka.testing.LocalRedpandaBroker {fastkafka.testing.LocalRedpandaBroker}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/Tester":{"id":"api/fastkafka/testing/Tester","title":"Tester","description":"fastkafka.testing.Tester {fastkafka.testing.Tester}","sidebar":"tutorialSidebar"},"CHANGELOG":{"id":"CHANGELOG","title":"Release notes","description":"0.5.0","sidebar":"tutorialSidebar"},"cli/fastkafka":{"id":"cli/fastkafka","title":"fastkafka","description":"Usage:","sidebar":"tutorialSidebar"},"cli/run_fastkafka_server_process":{"id":"cli/run_fastkafka_server_process","title":"run_fastkafka_server_process","description":"Usage:","sidebar":"tutorialSidebar"},"guides/Guide_00_FastKafka_Demo":{"id":"guides/Guide_00_FastKafka_Demo","title":"FastKafka tutorial","description":"FastKafka is a powerful and easy-to-use Python"},"guides/Guide_01_Intro":{"id":"guides/Guide_01_Intro","title":"Intro","description":"This tutorial will show you how to use FastKafkaAPI, step by"},"guides/Guide_02_First_Steps":{"id":"guides/Guide_02_First_Steps","title":"First Steps","description":"Creating a simple Kafka consumer app"},"guides/Guide_03_Authentication":{"id":"guides/Guide_03_Authentication","title":"Authentication","description":"TLS Authentication"},"guides/Guide_04_Github_Actions_Workflow":{"id":"guides/Guide_04_Github_Actions_Workflow","title":"Deploy FastKafka docs to GitHub Pages","description":"Getting started","sidebar":"tutorialSidebar"},"guides/Guide_05_Lifespan_Handler":{"id":"guides/Guide_05_Lifespan_Handler","title":"Lifespan Events","description":"Did you know that you can define some special code that runs before and","sidebar":"tutorialSidebar"},"guides/Guide_06_Benchmarking_FastKafka":{"id":"guides/Guide_06_Benchmarking_FastKafka","title":"Benchmarking FastKafka app","description":"Prerequisites","sidebar":"tutorialSidebar"},"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka":{"id":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","title":"Encoding and Decoding Kafka Messages with FastKafka","description":"Prerequisites","sidebar":"tutorialSidebar"},"guides/Guide_11_Consumes_Basics":{"id":"guides/Guide_11_Consumes_Basics","title":"@consumes basics","description":"You can use @consumes decorator to consume messages from Kafka topics.","sidebar":"tutorialSidebar"},"guides/Guide_21_Produces_Basics":{"id":"guides/Guide_21_Produces_Basics","title":"@produces basics","description":"You can use @produces decorator to produce messages to Kafka topics.","sidebar":"tutorialSidebar"},"guides/Guide_22_Partition_Keys":{"id":"guides/Guide_22_Partition_Keys","title":"Defining a partition key","description":"Partition keys are used in Apache Kafka to determine which partition a","sidebar":"tutorialSidebar"},"guides/Guide_30_Using_docker_to_deploy_fastkafka":{"id":"guides/Guide_30_Using_docker_to_deploy_fastkafka","title":"Deploying FastKafka using Docker","description":"Building a Docker Image","sidebar":"tutorialSidebar"},"guides/Guide_31_Using_redpanda_to_test_fastkafka":{"id":"guides/Guide_31_Using_redpanda_to_test_fastkafka","title":"Using Redpanda to test FastKafka","description":"What is FastKafka?","sidebar":"tutorialSidebar"},"index":{"id":"index","title":"FastKafka","description":"Effortless Kafka integration for your web services","sidebar":"tutorialSidebar"}}}')}}]); \ No newline at end of file diff --git a/assets/js/036db789.b29b3dc1.js b/assets/js/036db789.b29b3dc1.js new file mode 100644 index 0000000..1edc0c8 --- /dev/null +++ b/assets/js/036db789.b29b3dc1.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6590],{3905:(e,a,n)=>{n.d(a,{Zo:()=>k,kt:()=>m});var t=n(7294);function o(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function r(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function s(e){for(var a=1;a=0||(o[n]=e[n]);return o}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var i=t.createContext({}),p=function(e){var a=t.useContext(i),n=a;return e&&(n="function"==typeof e?e(a):s(s({},a),e)),n},k=function(e){var a=p(e.components);return t.createElement(i.Provider,{value:a},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},f=t.forwardRef((function(e,a){var n=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,k=l(e,["components","mdxType","originalType","parentName"]),c=p(n),f=o,m=c["".concat(i,".").concat(f)]||c[f]||u[f]||r;return n?t.createElement(m,s(s({ref:a},k),{},{components:n})):t.createElement(m,s({ref:a},k))}));function m(e,a){var n=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var r=n.length,s=new Array(r);s[0]=f;var l={};for(var i in a)hasOwnProperty.call(a,i)&&(l[i]=a[i]);l.originalType=e,l[c]="string"==typeof e?e:o,s[1]=l;for(var p=2;p{n.r(a),n.d(a,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var t=n(7462),o=(n(7294),n(3905));const r={},s="First Steps",l={unversionedId:"guides/Guide_02_First_Steps",id:"guides/Guide_02_First_Steps",title:"First Steps",description:"Creating a simple Kafka consumer app",source:"@site/docs/guides/Guide_02_First_Steps.md",sourceDirName:"guides",slug:"/guides/Guide_02_First_Steps",permalink:"/docs/next/guides/Guide_02_First_Steps",draft:!1,tags:[],version:"current",frontMatter:{}},i={},p=[{value:"Creating a simple Kafka consumer app",id:"creating-a-simple-kafka-consumer-app",level:2},{value:"Sending first message to your consumer",id:"sending-first-message-to-your-consumer",level:2},{value:"Creating a hello Kafka producer",id:"creating-a-hello-kafka-producer",level:2},{value:"Recap",id:"recap",level:2}],k={toc:p},c="wrapper";function u(e){let{components:a,...n}=e;return(0,o.kt)(c,(0,t.Z)({},k,n,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"first-steps"},"First Steps"),(0,o.kt)("h2",{id:"creating-a-simple-kafka-consumer-app"},"Creating a simple Kafka consumer app"),(0,o.kt)("p",null,"For our first demo we will create the simplest possible Kafka consumer\nand run it using \u2018fastkafka run\u2019 command."),(0,o.kt)("p",null,"The consumer will:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Connect to the Kafka Broker we setup in the Intro guide")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Listen to the hello topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Write any message received from the hello topic to stdout"))),(0,o.kt)("p",null,"To create the consumer, first, create a file named"),(0,o.kt)("b",null,"hello_kafka_consumer.py")," and copy the following code to it:",(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom os import environ\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nkafka_server_url = environ["KAFKA_HOSTNAME"]\nkafka_server_port = environ["KAFKA_PORT"]\n\nkafka_brokers = {\n "localhost": {\n "description": "local development kafka",\n "url": kafka_server_url,\n "port": kafka_server_port\n }\n}\n\nclass HelloKafkaMsg(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_app = FastKafka(\n kafka_brokers=kafka_brokers\n)\n \n@kafka_app.consumes()\nasync def on_hello(msg: HelloKafkaMsg):\n print(f"Got data, msg={msg.msg}", flush=True)\n')),(0,o.kt)("p",null,'!!! info "Kafka configuration"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n")),(0,o.kt)("p",null,'!!! warning "Remember to flush"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.\n")),(0,o.kt)("p",null,"To run this consumer, in your terminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see something similar to the ouput\nbelow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})\n[878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n[878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}. \nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.\n\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.\n")),(0,o.kt)("p",null,"Now you can interact with your consumer, by sending the messages to the\nsubscribed \u2018hello\u2019 topic, don\u2019t worry, we will cover this in the next\nstep of this guide."),(0,o.kt)("h2",{id:"sending-first-message-to-your-consumer"},"Sending first message to your consumer"),(0,o.kt)("p",null,"After we have created and run our first consumer, we should send a\nmessage to it, to make sure it is working properly."),(0,o.kt)("p",null,"If you are using the Kafka setup as described in the Intro guide, you\ncan follow the steps listed here to send a message to the hello topic."),(0,o.kt)("p",null,"First, connect to your running kafka broker by running:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"docker run -it kafka /bin/bash\n")),(0,o.kt)("p",null,"Then, when connected to the container, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello\n")),(0,o.kt)("p",null,"This will open an interactive connection to the hello topic, now you can\nwrite your mesages to the topic and they will be consumed by our\nconsumer."),(0,o.kt)("p",null,"In the shell, type:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'{"msg":"hello"}\n')),(0,o.kt)("p",null,"and press enter. This will send a hello message to the topic which will\nbe read by our running consumer and outputed to stdout."),(0,o.kt)("p",null,"Check the output of your consumer (terminal where you ran the \u2018fastkafka\nrun\u2019 command) and confirm that your consumer has read the Kafka message.\nYou shoud see something like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"Got data, msg=hello\n")),(0,o.kt)("h2",{id:"creating-a-hello-kafka-producer"},"Creating a hello Kafka producer"),(0,o.kt)("p",null,"Consuming messages is only a part of this Library functionality, the\nother big part is producing the messages. So, let\u2019s create our first\nkafka producer which will send it\u2019s greetings to our consumer\nperiodically."),(0,o.kt)("p",null,"The producer will:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Connect to the Kafka Broker we setup in the Intro guide"),(0,o.kt)("li",{parentName:"ol"},"Connect to the hello topic"),(0,o.kt)("li",{parentName:"ol"},"Periodically send a message to the hello world topic")),(0,o.kt)("p",null,"To create the producer, first, create a file named"),(0,o.kt)("b",null,"hello_kafka_producer.py")," and copy the following code to it:",(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom os import environ\n\nimport asyncio\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nkafka_server_url = environ["KAFKA_HOSTNAME"]\nkafka_server_port = environ["KAFKA_PORT"]\n\nkafka_brokers = {\n "localhost": {\n "description": "local development kafka",\n "url": kafka_server_url,\n "port": kafka_server_port\n }\n}\n\nclass HelloKafkaMsg(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_app = FastKafka(\n kafka_brokers=kafka_brokers\n)\n\nlogger = get_logger(__name__)\n\n@kafka_app.produces()\nasync def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:\n logger.info(f"Producing: {msg}")\n return msg\n\n@kafka_app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello(HelloKafkaMsg(msg="hello"))\n await asyncio.sleep(1)\n')),(0,o.kt)("p",null,'!!! info "Kafka configuration"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n")),(0,o.kt)("p",null,"To run this producer, in your terminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see something similar to the ouput\nbelow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.\n\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.\n")),(0,o.kt)("p",null,"Now, while the producer is running, it will send a HelloKafkaMsg every\nsecond to the hello kafka topic. If your consumer is still running, you\nshould see the messages appear in its log."),(0,o.kt)("h2",{id:"recap"},"Recap"),(0,o.kt)("p",null,"In this guide we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Created a simple Kafka consumer using FastKafka"),(0,o.kt)("li",{parentName:"ol"},"Sent a message to our consumer trough Kafka"),(0,o.kt)("li",{parentName:"ol"},"Created a simple Kafka producer using FastKafka")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/04d4af82.f9ae366c.js b/assets/js/04d4af82.f9ae366c.js new file mode 100644 index 0000000..446bc7b --- /dev/null +++ b/assets/js/04d4af82.f9ae366c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5775],{3905:(t,e,a)=>{a.d(e,{Zo:()=>p,kt:()=>c});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function i(t){for(var e=1;e=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var s=n.createContext({}),d=function(t){var e=n.useContext(s),a=e;return t&&(a="function"==typeof t?t(e):i(i({},e),t)),a},p=function(t){var e=d(t.components);return n.createElement(s.Provider,{value:e},t.children)},m="mdxType",k={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},u=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,s=t.parentName,p=o(t,["components","mdxType","originalType","parentName"]),m=d(a),u=r,c=m["".concat(s,".").concat(u)]||m[u]||k[u]||l;return a?n.createElement(c,i(i({ref:e},p),{},{components:a})):n.createElement(c,i({ref:e},p))}));function c(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,i=new Array(l);i[0]=u;var o={};for(var s in e)hasOwnProperty.call(e,s)&&(o[s]=e[s]);o.originalType=t,o[m]="string"==typeof t?t:r,i[1]=o;for(var d=2;d{a.r(e),a.d(e,{assets:()=>s,contentTitle:()=>i,default:()=>k,frontMatter:()=>l,metadata:()=>o,toc:()=>d});var n=a(7462),r=(a(7294),a(3905));const l={},i=void 0,o={unversionedId:"api/fastkafka/testing/Tester",id:"api/fastkafka/testing/Tester",title:"Tester",description:"fastkafka.testing.Tester {fastkafka.testing.Tester}",source:"@site/docs/api/fastkafka/testing/Tester.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/Tester",permalink:"/docs/next/api/fastkafka/testing/Tester",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"LocalRedpandaBroker",permalink:"/docs/next/api/fastkafka/testing/LocalRedpandaBroker"},next:{title:"fastkafka",permalink:"/docs/next/cli/fastkafka"}},s={},d=[{value:"fastkafka.testing.Tester",id:"fastkafka.testing.Tester",level:2},{value:"init",id:"fastkafka._application.tester.Tester.init",level:3},{value:"benchmark",id:"fastkafka._application.app.FastKafka.benchmark",level:3},{value:"consumes",id:"fastkafka._application.app.FastKafka.consumes",level:3},{value:"create_docs",id:"fastkafka._application.app.FastKafka.create_docs",level:3},{value:"create_mocks",id:"fastkafka._application.app.FastKafka.create_mocks",level:3},{value:"fastapi_lifespan",id:"fastkafka._application.app.FastKafka.fastapi_lifespan",level:3},{value:"get_topics",id:"fastkafka._application.app.FastKafka.get_topics",level:3},{value:"is_started",id:"fastkafka._application.app.FastKafka.is_started",level:3},{value:"produces",id:"fastkafka._application.app.FastKafka.produces",level:3},{value:"run_in_background",id:"fastkafka._application.app.FastKafka.run_in_background",level:3},{value:"set_kafka_broker",id:"fastkafka._application.app.FastKafka.set_kafka_broker",level:3},{value:"using_external_broker",id:"fastkafka._application.tester.Tester.using_external_broker",level:3},{value:"using_inmemory_broker",id:"fastkafka._application.tester.Tester.using_inmemory_broker",level:3}],p={toc:d},m="wrapper";function k(t){let{components:e,...a}=t;return(0,r.kt)(m,(0,n.Z)({},p,a,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.testing.Tester"},"fastkafka.testing.Tester"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/tester.py#L48-L197",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("h3",{id:"fastkafka._application.tester.Tester.init"},(0,r.kt)("strong",{parentName:"h3"},"init")),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/tester.py#L51-L77",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n self, app, use_in_memory_broker=True\n)\n")),(0,r.kt)("p",null,"Mirror-like object for testing a FastKafka application"),(0,r.kt)("p",null,"Can be used as context manager"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"app")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[fastkafka.FastKafka, List[fastkafka.FastKafka]]")),(0,r.kt)("td",{parentName:"tr",align:null},"The FastKafka application to be tested."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"use_in_memory_broker")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bool")),(0,r.kt)("td",{parentName:"tr",align:null},"Whether to use an in-memory broker for testing or not."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.benchmark"},"benchmark"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L1113-L1164",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"benchmark(\n self, interval=1, sliding_window_size=None\n)\n")),(0,r.kt)("p",null,"Decorator to benchmark produces/consumes functions"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"interval")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[int, datetime.timedelta]")),(0,r.kt)("td",{parentName:"tr",align:null},"Period to use to calculate throughput. If value is of type int,then it will be used as seconds. If value is of type timedelta,then it will be used as it is. default: 1 - one second"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sliding_window_size")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[int]")),(0,r.kt)("td",{parentName:"tr",align:null},"The size of the sliding window to use to calculateaverage throughput. default: None - By default average throughput isnot calculated"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.consumes"},"consumes"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L475-L560",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"consumes(\n self,\n topic=None,\n decoder='json',\n executor=None,\n brokers=None,\n prefix='on_',\n description=None,\n loop=None,\n bootstrap_servers='localhost',\n client_id='aiokafka-0.8.1',\n group_id=None,\n key_deserializer=None,\n value_deserializer=None,\n fetch_max_wait_ms=500,\n fetch_max_bytes=52428800,\n fetch_min_bytes=1,\n max_partition_fetch_bytes=1048576,\n request_timeout_ms=40000,\n retry_backoff_ms=100,\n auto_offset_reset='latest',\n enable_auto_commit=True,\n auto_commit_interval_ms=5000,\n check_crcs=True,\n metadata_max_age_ms=300000,\n partition_assignment_strategy=(,),\n max_poll_interval_ms=300000,\n rebalance_timeout_ms=None,\n session_timeout_ms=10000,\n heartbeat_interval_ms=3000,\n consumer_timeout_ms=200,\n max_poll_records=None,\n ssl_context=None,\n security_protocol='PLAINTEXT',\n api_version='auto',\n exclude_internal_topics=True,\n connections_max_idle_ms=540000,\n isolation_level='read_uncommitted',\n sasl_mechanism='PLAIN',\n sasl_plain_password=None,\n sasl_plain_username=None,\n sasl_kerberos_service_name='kafka',\n sasl_kerberos_domain_name=None,\n sasl_oauth_token_provider=None,\n)\n")),(0,r.kt)("p",null,"Decorator registering the callback called when a message is received in a topic."),(0,r.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"topic")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Kafka topic that the consumer will subscribe to and execute thedecorated function when it receives a message from the topic,default: None. If the topic is not specified, topic name will beinferred from the decorated function name by stripping the defined prefix"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"decoder")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[str, Callable[[bytes, Type[pydantic.main.BaseModel]], Any]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Decoder to use to decode messages consumed from the topic,default: json - By default, it uses json decoder to decodebytes to json string and then it creates instance of pydanticBaseModel. It also accepts custom decoder function."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'json'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"executor")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[str, fastkafka._components.task_streaming.StreamExecutor, NoneType]")),(0,r.kt)("td",{parentName:"tr",align:null},'Type of executor to choose for consuming tasks. Avaliable optionsare "SequentialExecutor" and "DynamicTaskExecutor". The default option is"SequentialExecutor" which will execute the consuming tasks sequentially.If the consuming tasks have high latency it is recommended to use"DynamicTaskExecutor" which will wrap the consuming functions into tasksand run them in on asyncio loop in background. This comes with a cost ofincreased overhead so use it only in cases when your consume functions havehigh latency such as database queries or some other type of networking.'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"prefix")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},'Prefix stripped from the decorated function to define a topic nameif the topic argument is not passed, default: "on_". If the decoratedfunction name is not prefixed with the defined prefix and topic argumentis not passed, then this method will throw ValueError'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'on_'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"brokers")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"description")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional description of the consuming function async docs.If not provided, consuming function ",(0,r.kt)("strong",{parentName:"td"},"doc")," attr will be used."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bootstrap_servers")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a ",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," string (or list of",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," strings) that the consumer should contact to bootstrapinitial cluster metadata.This does not have to be the full node list.It just needs to have at least one broker that will respond to aMetadata API Request. Default port is 9092. If no servers arespecified, will default to ",(0,r.kt)("inlineCode",{parentName:"td"},"localhost:9092"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'localhost'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"client_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client. Alsosubmitted to :class:",(0,r.kt)("inlineCode",{parentName:"td"},"~.consumer.group_coordinator.GroupCoordinator"),"for logging with respect to consumer group administration. Default:",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka-{version}")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'aiokafka-0.8.1'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"group_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"name of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key_deserializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Any callable that takes araw message key and returns a deserialized key."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"value_deserializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Any callable that takes araw message value and returns a deserialized value."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_min_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Minimum amount of data the server shouldreturn for a fetch request, otherwise wait up to",(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"52428800"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_wait_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"500"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_partition_fetch_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of dataper-partition the server will return. The maximum total memoryused for a request ",(0,r.kt)("inlineCode",{parentName:"td"},"= #partitions * max_partition_fetch_bytes"),".This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1048576"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_poll_records")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum number of records returned in asingle call to :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany"),". Defaults ",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", no limit."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"request_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Client request timeout in milliseconds.Default: 40000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"40000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"retry_backoff_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Milliseconds to backoff when retrying onerrors. Default: 100."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"auto_offset_reset")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"A policy for resetting offsets on:exc:",(0,r.kt)("inlineCode",{parentName:"td"},".OffsetOutOfRangeError")," errors: ",(0,r.kt)("inlineCode",{parentName:"td"},"earliest")," will move to the oldestavailable message, ",(0,r.kt)("inlineCode",{parentName:"td"},"latest")," will move to the most recent, and",(0,r.kt)("inlineCode",{parentName:"td"},"none")," will raise an exception so you can handle this case.Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"latest"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'latest'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"enable_auto_commit")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"If true the consumer's offset will beperiodically committed in the background. Default: True."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"auto_commit_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"milliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"5000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"check_crcs")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Automatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: True"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"metadata_max_age_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"partition_assignment_strategy")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"List of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: ","[:class:",(0,r.kt)("inlineCode",{parentName:"td"},".RoundRobinPartitionAssignor"),"]"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"(,)"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_poll_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum allowed time between calls toconsume messages (e.g., :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany"),"). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See ",(0,r.kt)("inlineCode",{parentName:"td"},"KIP-62"),"_ for moreinformation. Default 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"rebalance_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to ",(0,r.kt)("inlineCode",{parentName:"td"},"max.poll.interval.ms")," configuration,but as ",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka")," will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:",(0,r.kt)("inlineCode",{parentName:"td"},".ConsumerRebalanceListener")," to delay rebalacing. Defaultsto ",(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Client group session and failure detectiontimeout. The consumer sends periodic heartbeats(",(0,r.kt)("inlineCode",{parentName:"td"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe ",(0,r.kt)("strong",{parentName:"td"},"broker")," configuration properties",(0,r.kt)("inlineCode",{parentName:"td"},"group.min.session.timeout.ms")," and ",(0,r.kt)("inlineCode",{parentName:"td"},"group.max.session.timeout.ms"),".Default: 10000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"10000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"heartbeat_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than ",(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms"),", but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 3000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"3000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"consumer_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"maximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"200"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"api_version")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"specify which kafka API version to use.:class:",(0,r.kt)("inlineCode",{parentName:"td"},"AIOKafkaConsumer")," supports Kafka API versions >=0.9 only.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"auto"),", will attempt to infer the broker version byprobing various APIs. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"auto")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'auto'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"security_protocol")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Protocol used to communicate with brokers.Valid values are: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SSL"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT"),",",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAINTEXT'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ssl_context")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"pre-configured :class:",(0,r.kt)("inlineCode",{parentName:"td"},"~ssl.SSLContext"),"for wrapping socket connections. Directly passed into asyncio's:meth:",(0,r.kt)("inlineCode",{parentName:"td"},"~asyncio.loop.create_connection"),". For more information see:ref:",(0,r.kt)("inlineCode",{parentName:"td"},"ssl_auth"),". Default: None."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"exclude_internal_topics")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Whether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: True"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"connections_max_idle_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Close idle connections after the numberof milliseconds specified by this config. Specifying ",(0,r.kt)("inlineCode",{parentName:"td"},"None")," willdisable idle checks. Default: 540000 (9 minutes)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"540000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"isolation_level")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Controls how to read messages writtentransactionally.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed"),", :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," will only returntransactional messages which have been committed.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"read_uncommitted")," (the default), :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, in",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," mode, :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," the seek_to_end method willreturn the LSO. See method docs below. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"read_uncommitted")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'read_uncommitted'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_mechanism")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Authentication mechanism when security_protocolis configured for ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT")," or ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Valid values are:",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"GSSAPI"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-256"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-512"),",",(0,r.kt)("inlineCode",{parentName:"td"},"OAUTHBEARER"),".Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAIN'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_username")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"username for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_password")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"password for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_oauth_token_provider")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"OAuthBearer token provider instance. (See :mod:",(0,r.kt)("inlineCode",{parentName:"td"},"kafka.oauth.abstract"),").Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]], Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]]")),(0,r.kt)("td",{parentName:"tr",align:null},": A function returning the same function")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.create_docs"},"create_docs"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L943-L969",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"create_docs(\n self\n)\n")),(0,r.kt)("p",null,"Create the asyncapi documentation based on the configured consumers and producers."),(0,r.kt)("p",null,"This function exports the asyncapi specification based on the configured consumers\nand producers in the FastKafka instance. It generates the asyncapi documentation by\nextracting the topics and callbacks from the consumers and producers."),(0,r.kt)("p",null,"Note:\nThe asyncapi documentation is saved to the location specified by the ",(0,r.kt)("inlineCode",{parentName:"p"},"_asyncapi_path"),"\nattribute of the FastKafka instance."),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.create_mocks"},"create_mocks"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L1031-L1109",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"create_mocks(\n self\n)\n")),(0,r.kt)("p",null,"Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock"),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.fastapi_lifespan"},"fastapi_lifespan"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L1168-L1187",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"fastapi_lifespan(\n self, kafka_broker_name\n)\n")),(0,r.kt)("p",null,"Method for managing the lifespan of a FastAPI application with a specific Kafka broker."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"kafka_broker_name")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"The name of the Kafka broker to start FastKafka"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[ForwardRef('FastAPI')], AsyncIterator[None]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Lifespan function to use for initializing FastAPI")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.get_topics"},"get_topics"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L668-L677",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"get_topics(\n self\n)\n")),(0,r.kt)("p",null,"Get all topics for both producing and consuming."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Iterable[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"A set of topics for both producing and consuming.")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.is_started"},"is_started"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L309-L320",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"@property\nis_started(\n self\n)\n")),(0,r.kt)("p",null,"Property indicating whether the FastKafka object is started."),(0,r.kt)("p",null,"The is_started property indicates if the FastKafka object is currently\nin a started state. This implies that all background tasks, producers,\nand consumers have been initiated, and the object is successfully connected\nto the Kafka broker."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bool")),(0,r.kt)("td",{parentName:"tr",align:null},"True if the object is started, False otherwise.")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.produces"},"produces"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L585-L664",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"produces(\n self,\n topic=None,\n encoder='json',\n prefix='to_',\n brokers=None,\n description=None,\n loop=None,\n bootstrap_servers='localhost',\n client_id=None,\n metadata_max_age_ms=300000,\n request_timeout_ms=40000,\n api_version='auto',\n acks=,\n key_serializer=None,\n value_serializer=None,\n compression_type=None,\n max_batch_size=16384,\n partitioner=,\n max_request_size=1048576,\n linger_ms=0,\n send_backoff_ms=100,\n retry_backoff_ms=100,\n security_protocol='PLAINTEXT',\n ssl_context=None,\n connections_max_idle_ms=540000,\n enable_idempotence=False,\n transactional_id=None,\n transaction_timeout_ms=60000,\n sasl_mechanism='PLAIN',\n sasl_plain_password=None,\n sasl_plain_username=None,\n sasl_kerberos_service_name='kafka',\n sasl_kerberos_domain_name=None,\n sasl_oauth_token_provider=None,\n)\n")),(0,r.kt)("p",null,"Decorator registering the callback called when delivery report for a produced message is received"),(0,r.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"topic")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Kafka topic that the producer will send returned values fromthe decorated function to, default: None- If the topic is notspecified, topic name will be inferred from the decorated functionname by stripping the defined prefix."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"encoder")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[str, Callable[[pydantic.main.BaseModel], bytes]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Encoder to use to encode messages before sending it to topic,default: json - By default, it uses json encoder to convertpydantic basemodel to json string and then encodes the string to bytesusing 'utf-8' encoding. It also accepts custom encoder function."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'json'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"prefix")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},'Prefix stripped from the decorated function to define a topicname if the topic argument is not passed, default: "to_". If thedecorated function name is not prefixed with the defined prefixand topic argument is not passed, then this method will throw ValueError'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'to_'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"brokers")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"description")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional description of the producing function async docs.If not provided, producing function ",(0,r.kt)("strong",{parentName:"td"},"doc")," attr will be used."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bootstrap_servers")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a ",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," string or list of",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," strings that the producer should contact tobootstrap initial cluster metadata. This does not have to be thefull node list. It just needs to have at least one broker that willrespond to a Metadata API Request. Default port is 9092. If noservers are specified, will default to ",(0,r.kt)("inlineCode",{parentName:"td"},"localhost:9092"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'localhost'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"client_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka-producer-#")," (appended with a unique numberper instance)"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key_serializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"used to convert user-supplied keys to bytesIf not :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", called as ",(0,r.kt)("inlineCode",{parentName:"td"},"f(key),")," should return:class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"value_serializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"used to convert user-supplied messagevalues to :class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),". If not :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", called as",(0,r.kt)("inlineCode",{parentName:"td"},"f(value)"),", should return :class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"acks")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"one of ",(0,r.kt)("inlineCode",{parentName:"td"},"0"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"1"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),". The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common:",(0,r.kt)("em",{parentName:"td"}," ",(0,r.kt)("inlineCode",{parentName:"em"},"0"),": Producer will not wait for any acknowledgment from the server at all. The message will immediately be added to the socket buffer and considered sent. No guarantee can be made that the server has received the record in this case, and the retries configuration will not take effect (as the client won't generally know of any failures). The offset given back for each record will always be set to -1.")," ",(0,r.kt)("inlineCode",{parentName:"td"},"1"),": The broker leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after acknowledging the record but before the followers have replicated it then the record will be lost.* ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),": The broker leader will wait for the full set of in-sync replicas to acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica remains alive. This is the strongest available guarantee.If unset, defaults to ",(0,r.kt)("inlineCode",{parentName:"td"},"acks=1"),". If ",(0,r.kt)("inlineCode",{parentName:"td"},"enable_idempotence")," is:data:",(0,r.kt)("inlineCode",{parentName:"td"},"True")," defaults to ",(0,r.kt)("inlineCode",{parentName:"td"},"acks=all")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},""))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"compression_type")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The compression type for all data generated bythe producer. Valid values are ",(0,r.kt)("inlineCode",{parentName:"td"},"gzip"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"snappy"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"lz4"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"zstd"),"or :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),".Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_batch_size")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum size of buffered data per partition.After this amount :meth:",(0,r.kt)("inlineCode",{parentName:"td"},"send")," coroutine will block until batch isdrained.Default: 16384"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"16384"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than ",(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms"),", producer will wait ",(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms - process_time"),".Default: 0 (i.e. no delay)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"0"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"partitioner")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Callable used to determine which partitioneach message is assigned to. Called (after key serialization):",(0,r.kt)("inlineCode",{parentName:"td"},"partitioner(key_bytes, all_partitions, available_partitions)"),".The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", the message is delivered to a random partition(filtered to partitions with available leaders only, if possible)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},""))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_request_size")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1048576"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"metadata_max_age_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"request_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Produce request timeout in milliseconds.As it's sent as part of:class:",(0,r.kt)("inlineCode",{parentName:"td"},"~kafka.protocol.produce.ProduceRequest")," (it's a blockingcall), maximum waiting time can be up to ",(0,r.kt)("inlineCode",{parentName:"td"},"2 *request_timeout_ms"),".Default: 40000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"40000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"retry_backoff_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Milliseconds to backoff when retrying onerrors. Default: 100."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"api_version")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"specify which kafka API version to use.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"auto"),", will attempt to infer the broker version byprobing various APIs. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"auto")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'auto'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"security_protocol")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Protocol used to communicate with brokers.Valid values are: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SSL"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT"),",",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAINTEXT'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ssl_context")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"pre-configured :class:",(0,r.kt)("inlineCode",{parentName:"td"},"~ssl.SSLContext"),"for wrapping socket connections. Directly passed into asyncio's:meth:",(0,r.kt)("inlineCode",{parentName:"td"},"~asyncio.loop.create_connection"),". For moreinformation see :ref:",(0,r.kt)("inlineCode",{parentName:"td"},"ssl_auth"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"connections_max_idle_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Close idle connections after the numberof milliseconds specified by this config. Specifying :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")," willdisable idle checks. Default: 540000 (9 minutes)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"540000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"enable_idempotence")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"When set to :data:",(0,r.kt)("inlineCode",{parentName:"td"},"True"),", the producer willensure that exactly one copy of each message is written in thestream. If :data:",(0,r.kt)("inlineCode",{parentName:"td"},"False"),", producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),". If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:",(0,r.kt)("inlineCode",{parentName:"td"},"ValueError")," will be thrown.New in version 0.5.0."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"False"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_mechanism")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Authentication mechanism when security_protocolis configured for ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT")," or ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Valid valuesare: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"GSSAPI"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-256"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-512"),",",(0,r.kt)("inlineCode",{parentName:"td"},"OAUTHBEARER"),".Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAIN'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_username")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"username for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_password")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"password for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]], Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]]")),(0,r.kt)("td",{parentName:"tr",align:null},": A function returning the same function")))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ValueError")),(0,r.kt)("td",{parentName:"tr",align:null},"when needed")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.run_in_background"},"run_in_background"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L681-L714",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"run_in_background(\n self\n)\n")),(0,r.kt)("p",null,"Decorator to schedule a task to be run in the background."),(0,r.kt)("p",null,"This decorator is used to schedule a task to be run in the background when the app's ",(0,r.kt)("inlineCode",{parentName:"p"},"_on_startup")," event is triggered."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[Callable[..., Coroutine[Any, Any, Any]]], Callable[..., Coroutine[Any, Any, Any]]]")),(0,r.kt)("td",{parentName:"tr",align:null},"A decorator function that takes a background task as an input and stores it to be run in the backround.")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.set_kafka_broker"},"set_kafka_broker"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L322-L338",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"set_kafka_broker(\n self, kafka_broker_name\n)\n")),(0,r.kt)("p",null,"Sets the Kafka broker to start FastKafka with"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"kafka_broker_name")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"The name of the Kafka broker to start FastKafka"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ValueError")),(0,r.kt)("td",{parentName:"tr",align:null},"If the provided kafka_broker_name is not found in dictionary of kafka_brokers")))),(0,r.kt)("h3",{id:"fastkafka._application.tester.Tester.using_external_broker"},"using_external_broker"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/tester.py#L131-L151",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"using_external_broker(\n self, bootstrap_servers_id=None\n)\n")),(0,r.kt)("p",null,"Tester context manager for using external broker"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bootstrap_servers_id")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"The bootstrap server of aplications."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"AsyncGenerator[ForwardRef('Tester'), None]")),(0,r.kt)("td",{parentName:"tr",align:null},"self or None")))),(0,r.kt)("h3",{id:"fastkafka._application.tester.Tester.using_inmemory_broker"},"using_inmemory_broker"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/tester.py#L154-L174",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"using_inmemory_broker(\n self, bootstrap_servers_id=None\n)\n")),(0,r.kt)("p",null,"Tester context manager for using in-memory broker"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bootstrap_servers_id")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"The bootstrap server of aplications."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"AsyncGenerator[ForwardRef('Tester'), None]")),(0,r.kt)("td",{parentName:"tr",align:null},"self or None")))))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0582779b.aec163df.js b/assets/js/0582779b.aec163df.js new file mode 100644 index 0000000..c6048ff --- /dev/null +++ b/assets/js/0582779b.aec163df.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[953],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>f});var a=t(7294);function i(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function o(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(i[t]=e[t]);return i}(e,n);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(i[t]=e[t])}return i}var l=a.createContext({}),p=function(e){var n=a.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},d=function(e){var n=p(e.components);return a.createElement(l.Provider,{value:n},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},m=a.forwardRef((function(e,n){var t=e.components,i=e.mdxType,o=e.originalType,l=e.parentName,d=s(e,["components","mdxType","originalType","parentName"]),c=p(t),m=i,f=c["".concat(l,".").concat(m)]||c[m]||u[m]||o;return t?a.createElement(f,r(r({ref:n},d),{},{components:t})):a.createElement(f,r({ref:n},d))}));function f(e,n){var t=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var o=t.length,r=new Array(o);r[0]=m;var s={};for(var l in n)hasOwnProperty.call(n,l)&&(s[l]=n[l]);s.originalType=e,s[c]="string"==typeof e?e:i,r[1]=s;for(var p=2;p{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>r,default:()=>u,frontMatter:()=>o,metadata:()=>s,toc:()=>p});var a=t(7462),i=(t(7294),t(3905));const o={},r="Lifespan Events",s={unversionedId:"guides/Guide_05_Lifespan_Handler",id:"version-0.8.0/guides/Guide_05_Lifespan_Handler",title:"Lifespan Events",description:"Did you know that you can define some special code that runs before and",source:"@site/versioned_docs/version-0.8.0/guides/Guide_05_Lifespan_Handler.md",sourceDirName:"guides",slug:"/guides/Guide_05_Lifespan_Handler",permalink:"/docs/guides/Guide_05_Lifespan_Handler",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Batch producing",permalink:"/docs/guides/Guide_23_Batch_Producing"},next:{title:"Encoding and Decoding Kafka Messages with FastKafka",permalink:"/docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"}},l={},p=[{value:"Lifespan example - Iris prediction model",id:"lifespan-example---iris-prediction-model",level:2},{value:"Lifespan",id:"lifespan",level:3},{value:"Async context manager",id:"async-context-manager",level:3},{value:"App demo",id:"app-demo",level:2},{value:"FastKafka app",id:"fastkafka-app",level:3},{value:"Data modeling",id:"data-modeling",level:3},{value:"Consumers and producers",id:"consumers-and-producers",level:3},{value:"Final app",id:"final-app",level:3},{value:"Running the app",id:"running-the-app",level:3},{value:"Recap",id:"recap",level:2}],d={toc:p},c="wrapper";function u(e){let{components:n,...t}=e;return(0,i.kt)(c,(0,a.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"lifespan-events"},"Lifespan Events"),(0,i.kt)("p",null,"Did you know that you can define some special code that runs before and\nafter your Kafka application? This code will be executed just once, but\nit covers the whole lifespan of your app! \ud83d\ude80"),(0,i.kt)("p",null,"Lets break it down:"),(0,i.kt)("p",null,"You can define logic (code) that should be executed before the\napplication starts up. This is like a warm-up for your app, getting it\nready to consume and produce messages."),(0,i.kt)("p",null,"Similarly, you can define logic (code) that should be executed when the\napplication is shutting down. This is like a cool-down for your app,\nmaking sure everything is properly closed and cleaned up."),(0,i.kt)("p",null,"By executing code before consuming and after producing, you cover the\nentire lifecycle of your application \ud83c\udf89"),(0,i.kt)("p",null,"This is super handy for setting up shared resources that are needed\nacross consumers and producers, like a database connection pool or a\nmachine learning model. And the best part? You can clean up these\nresources when the app is shutting down!"),(0,i.kt)("p",null,"So lets give it a try and see how it can make your Kafka app even more\nawesome! \ud83d\udcaa"),(0,i.kt)("h2",{id:"lifespan-example---iris-prediction-model"},"Lifespan example - Iris prediction model"),(0,i.kt)("p",null,"Let\u2019s dive into an example to see how you can leverage the lifecycle\nhandler to solve a common use case. Imagine that you have some machine\nlearning models that need to consume incoming messages and produce\nresponse/prediction messages. These models are shared among consumers\nand producers, which means you don\u2019t want to load them for every\nmessage."),(0,i.kt)("p",null,"Here\u2019s where the lifecycle handler comes to the rescue! By loading the\nmodel before the messages are consumed and produced, but only right\nbefore the application starts receiving messages, you can ensure that\nthe model is ready to use without compromising the performance of your\ntests. In the upcoming sections, we\u2019ll walk you through how to\ninitialize an Iris species prediction model and use it in your developed\napplication."),(0,i.kt)("h3",{id:"lifespan"},"Lifespan"),(0,i.kt)("p",null,"You can define this startup and shutdown logic using the lifespan\nparameter of the FastKafka app, and an async context manager."),(0,i.kt)("p",null,"Let\u2019s start with an example and then see it in detail."),(0,i.kt)("p",null,"We create an async function lifespan() with yield like this:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n print("Loading the model!")\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n yield\n # Clean up the ML models and release the resources\n \n print("Exiting, clearing model dict!")\n ml_models.clear()\n \n')),(0,i.kt)("p",null,"The first thing to notice, is that we are defining an async function\nwith ",(0,i.kt)("inlineCode",{parentName:"p"},"yield"),". This is very similar to Dependencies with ",(0,i.kt)("inlineCode",{parentName:"p"},"yield"),"."),(0,i.kt)("p",null,"The first part of the function, before the ",(0,i.kt)("inlineCode",{parentName:"p"},"yield"),", will be executed\n",(0,i.kt)("strong",{parentName:"p"},"before")," the application starts. And the part after the ",(0,i.kt)("inlineCode",{parentName:"p"},"yield")," will\nbe executed ",(0,i.kt)("strong",{parentName:"p"},"after")," the application has finished."),(0,i.kt)("p",null,"This lifespan will create an iris_prediction model on application\nstartup and cleanup the references after the app is shutdown."),(0,i.kt)("p",null,"The lifespan will be passed an KafkaApp reference on startup of your\napplication, which you can use to reference your application on startup."),(0,i.kt)("p",null,"For demonstration sake, we also added prints so that when running the\napp we can see that our lifespan was called."),(0,i.kt)("h3",{id:"async-context-manager"},"Async context manager"),(0,i.kt)("p",null,"Context managers can be used in ",(0,i.kt)("inlineCode",{parentName:"p"},"with")," blocks, our lifespan, for example\ncould be used like this:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},"ml_models = {}\nasync with lifespan(None):\n print(ml_models)\n")),(0,i.kt)("p",null,"When you create a context manager or an async context manager, what it\ndoes is that, before entering the ",(0,i.kt)("inlineCode",{parentName:"p"},"with")," block, it will execute the code\nbefore the ",(0,i.kt)("inlineCode",{parentName:"p"},"yield"),", and after exiting the ",(0,i.kt)("inlineCode",{parentName:"p"},"with")," block, it will execute\nthe code after the ",(0,i.kt)("inlineCode",{parentName:"p"},"yield"),"."),(0,i.kt)("p",null,"If you want to learn more about context managers and contextlib\ndecorators, please visit ",(0,i.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/contextlib.html"},"Python official\ndocs")),(0,i.kt)("h2",{id:"app-demo"},"App demo"),(0,i.kt)("h3",{id:"fastkafka-app"},"FastKafka app"),(0,i.kt)("p",null,"Lets now create our application using the created lifespan handler."),(0,i.kt)("p",null,"Notice how we passed our lifespan handler to the app when constructing\nit trough the ",(0,i.kt)("inlineCode",{parentName:"p"},"lifespan")," argument."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "",\n "description": "local development kafka broker",\n "port": "",\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n')),(0,i.kt)("h3",{id:"data-modeling"},"Data modeling"),(0,i.kt)("p",null,"Lets model the Iris data for our app:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("h3",{id:"consumers-and-producers"},"Consumers and producers"),(0,i.kt)("p",null,"Lets create a consumer and producer for our app that will generate\npredictions from input iris data."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h3",{id:"final-app"},"Final app"),(0,i.kt)("p",null,"The final app looks like this:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n print("Loading the model!")\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n yield\n # Clean up the ML models and release the resources\n \n print("Exiting, clearing model dict!")\n ml_models.clear()\n \nkafka_brokers = {\n "localhost": {\n "url": "",\n "description": "local development kafka broker",\n "port": "",\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h3",{id:"running-the-app"},"Running the app"),(0,i.kt)("p",null,"Now we can run the app with your custom lifespan handler. Copy the code\nabove in lifespan_example.py and run it by running"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app\n")),(0,i.kt)("p",null,"When you run the app, you should see a simmilar output to the one below:"),(0,i.kt)("h2",{id:"recap"},"Recap"),(0,i.kt)("p",null,"In this guide we have defined a lifespan handler and passed to our\nFastKafka app."),(0,i.kt)("p",null,"Some important points are:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Lifespan handler is implemented as\n",(0,i.kt)("a",{parentName:"li",href:"https://docs.python.org/3/library/contextlib.html#contextlib.asynccontextmanager"},"AsyncContextManager")),(0,i.kt)("li",{parentName:"ol"},"Code ",(0,i.kt)("strong",{parentName:"li"},"before")," yield in lifespan will be executed ",(0,i.kt)("strong",{parentName:"li"},"before"),"\napplication ",(0,i.kt)("strong",{parentName:"li"},"startup")),(0,i.kt)("li",{parentName:"ol"},"Code ",(0,i.kt)("strong",{parentName:"li"},"after")," yield in lifespan will be executed ",(0,i.kt)("strong",{parentName:"li"},"after"),"\napplication ",(0,i.kt)("strong",{parentName:"li"},"shutdown")),(0,i.kt)("li",{parentName:"ol"},"You can pass your lifespan handler to FastKafka app on\ninitialisation by passing a ",(0,i.kt)("inlineCode",{parentName:"li"},"lifespan")," argument")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/060147ec.e48d0a0e.js b/assets/js/060147ec.e48d0a0e.js new file mode 100644 index 0000000..8ef811c --- /dev/null +++ b/assets/js/060147ec.e48d0a0e.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7602],{3905:(e,a,o)=>{o.d(a,{Zo:()=>c,kt:()=>f});var s=o(7294);function t(e,a,o){return a in e?Object.defineProperty(e,a,{value:o,enumerable:!0,configurable:!0,writable:!0}):e[a]=o,e}function n(e,a){var o=Object.keys(e);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);a&&(s=s.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),o.push.apply(o,s)}return o}function r(e){for(var a=1;a=0||(t[o]=e[o]);return t}(e,a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);for(s=0;s=0||Object.prototype.propertyIsEnumerable.call(e,o)&&(t[o]=e[o])}return t}var p=s.createContext({}),k=function(e){var a=s.useContext(p),o=a;return e&&(o="function"==typeof e?e(a):r(r({},a),e)),o},c=function(e){var a=k(e.components);return s.createElement(p.Provider,{value:a},e.children)},_="mdxType",l={inlineCode:"code",wrapper:function(e){var a=e.children;return s.createElement(s.Fragment,{},a)}},m=s.forwardRef((function(e,a){var o=e.components,t=e.mdxType,n=e.originalType,p=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),_=k(o),m=t,f=_["".concat(p,".").concat(m)]||_[m]||l[m]||n;return o?s.createElement(f,r(r({ref:a},c),{},{components:o})):s.createElement(f,r({ref:a},c))}));function f(e,a){var o=arguments,t=a&&a.mdxType;if("string"==typeof e||t){var n=o.length,r=new Array(n);r[0]=m;var i={};for(var p in a)hasOwnProperty.call(a,p)&&(i[p]=a[p]);i.originalType=e,i[_]="string"==typeof e?e:t,r[1]=i;for(var k=2;k{o.r(a),o.d(a,{assets:()=>p,contentTitle:()=>r,default:()=>l,frontMatter:()=>n,metadata:()=>i,toc:()=>k});var s=o(7462),t=(o(7294),o(3905));const n={},r="Using multiple Kafka clusters",i={unversionedId:"guides/Guide_24_Using_Multiple_Kafka_Clusters",id:"guides/Guide_24_Using_Multiple_Kafka_Clusters",title:"Using multiple Kafka clusters",description:"Ready to take your FastKafka app to the next level? This guide shows you",source:"@site/docs/guides/Guide_24_Using_Multiple_Kafka_Clusters.md",sourceDirName:"guides",slug:"/guides/Guide_24_Using_Multiple_Kafka_Clusters",permalink:"/docs/next/guides/Guide_24_Using_Multiple_Kafka_Clusters",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Encoding and Decoding Kafka Messages with FastKafka",permalink:"/docs/next/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"},next:{title:"Using Tester to test FastKafka",permalink:"/docs/next/guides/Guide_33_Using_Tester_class_to_test_fastkafka"}},p={},k=[{value:"Test message",id:"test-message",level:3},{value:"Defining multiple broker configurations",id:"defining-multiple-broker-configurations",level:2},{value:"How it works",id:"how-it-works",level:4},{value:"Testing the application",id:"testing-the-application",level:2},{value:"Running the application",id:"running-the-application",level:2},{value:"Application documentation",id:"application-documentation",level:2},{value:"Examples on how to use multiple broker configurations",id:"examples-on-how-to-use-multiple-broker-configurations",level:2},{value:"Example #1",id:"example-1",level:3},{value:"Testing",id:"testing",level:4},{value:"Example #2",id:"example-2",level:3},{value:"Testing",id:"testing-1",level:4},{value:"Example #3",id:"example-3",level:3},{value:"Testing",id:"testing-2",level:4}],c={toc:k},_="wrapper";function l(e){let{components:a,...o}=e;return(0,t.kt)(_,(0,s.Z)({},c,o,{components:a,mdxType:"MDXLayout"}),(0,t.kt)("h1",{id:"using-multiple-kafka-clusters"},"Using multiple Kafka clusters"),(0,t.kt)("p",null,"Ready to take your FastKafka app to the next level? This guide shows you\nhow to connect to multiple Kafka clusters effortlessly. Consolidate\ntopics and produce messages across clusters like a pro. Unleash the full\npotential of your Kafka-powered app with FastKafka. Let\u2019s dive in and\nelevate your application\u2019s capabilities!"),(0,t.kt)("h3",{id:"test-message"},"Test message"),(0,t.kt)("p",null,"To showcase the functionalities of FastKafka and illustrate the concepts\ndiscussed, we can use a simple test message called ",(0,t.kt)("inlineCode",{parentName:"p"},"TestMsg"),". Here\u2019s the\ndefinition of the ",(0,t.kt)("inlineCode",{parentName:"p"},"TestMsg")," class:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},"class TestMsg(BaseModel):\n msg: str = Field(...)\n")),(0,t.kt)("h2",{id:"defining-multiple-broker-configurations"},"Defining multiple broker configurations"),(0,t.kt)("p",null,"When building a FastKafka application, you may need to consume messages\nfrom multiple Kafka clusters, each with its own set of broker\nconfigurations. FastKafka provides the flexibility to define different\nbroker clusters using the brokers argument in the consumes decorator.\nLet\u2019s explore an example code snippet"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\n\nkafka_brokers_1 = dict(\n development=dict(url="dev.server_1", port=9092),\n production=dict(url="prod.server_1", port=9092),\n)\nkafka_brokers_2 = dict(\n development=dict(url="dev.server_2", port=9092),\n production=dict(url="prod.server_1", port=9092),\n)\n\napp = FastKafka(kafka_brokers=kafka_brokers_1, bootstrap_servers_id="development")\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals_1(msg: TestMsg):\n print(f"Received on s1: {msg=}")\n await to_predictions_1(msg)\n\n\n@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)\nasync def on_preprocessed_signals_2(msg: TestMsg):\n print(f"Received on s2: {msg=}")\n await to_predictions_2(msg)\n\n\n@app.produces(topic="predictions")\nasync def to_predictions_1(msg: TestMsg) -> TestMsg:\n return msg\n\n\n@app.produces(topic="predictions", brokers=kafka_brokers_2)\nasync def to_predictions_2(msg: TestMsg) -> TestMsg:\n return msg\n')),(0,t.kt)("p",null,"In this example, the application has two consumes endpoints, both of\nwhich will consume events from ",(0,t.kt)("inlineCode",{parentName:"p"},"preprocessed_signals")," topic.\n",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_1")," will consume events from ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1"),"\nconfiguration and ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_2")," will consume events from\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," configuration. When producing, ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_1")," will\nproduce to ",(0,t.kt)("inlineCode",{parentName:"p"},"predictions")," topic on ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," cluster and\n",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_2")," will produce to ",(0,t.kt)("inlineCode",{parentName:"p"},"predictions")," topic on\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," cluster."),(0,t.kt)("h4",{id:"how-it-works"},"How it works"),(0,t.kt)("p",null,"The ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," configuration represents the primary cluster,\nwhile ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," serves as an alternative cluster specified in\nthe decorator."),(0,t.kt)("p",null,"Using the FastKafka class, the app object is initialized with the\nprimary broker configuration (",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1"),"). By default, the\n",(0,t.kt)("inlineCode",{parentName:"p"},"@app.consumes")," decorator without the brokers argument consumes messages\nfrom the ",(0,t.kt)("inlineCode",{parentName:"p"},"preprocessed_signals")," topic on ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1"),"."),(0,t.kt)("p",null,"To consume messages from a different cluster, the ",(0,t.kt)("inlineCode",{parentName:"p"},"@app.consumes"),"\ndecorator includes the ",(0,t.kt)("inlineCode",{parentName:"p"},"brokers")," argument. This allows explicit\nspecification of the broker cluster in the ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_2"),"\nfunction, enabling consumption from the same topic but using the\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," configuration."),(0,t.kt)("p",null,"The brokers argument can also be used in the @app.produces decorator to\ndefine multiple broker clusters for message production."),(0,t.kt)("p",null,"It\u2019s important to ensure that all broker configurations have the same\nrequired settings as the primary cluster to ensure consistent behavior."),(0,t.kt)("h2",{id:"testing-the-application"},"Testing the application"),(0,t.kt)("p",null,"To test our FastKafka \u2018mirroring\u2019 application, we can use our testing\nframework. Lets take a look how it\u2019s done:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n # Send TestMsg to topic/broker pair on_preprocessed_signals_1 is consuming from\n await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal_s1"))\n # Assert on_preprocessed_signals_1 consumed sent message\n await app.awaited_mocks.on_preprocessed_signals_1.assert_called_with(\n TestMsg(msg="signal_s1"), timeout=5\n )\n # Assert app has produced a prediction\n await tester.mirrors[app.to_predictions_1].assert_called_with(\n TestMsg(msg="signal_s1"), timeout=5\n )\n\n # Send TestMsg to topic/broker pair on_preprocessed_signals_2 is consuming from\n await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal_s2"))\n # Assert on_preprocessed_signals_2 consumed sent message\n await app.awaited_mocks.on_preprocessed_signals_2.assert_called_with(\n TestMsg(msg="signal_s2"), timeout=5\n )\n # Assert app has produced a prediction\n await tester.mirrors[app.to_predictions_2].assert_called_with(\n TestMsg(msg="signal_s2"), timeout=5\n )\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-06-23 12:15:51.156 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-06-23 12:15:51.157 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-06-23 12:15:51.157 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\n23-06-23 12:15:51.158 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:15:51.158 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\n23-06-23 12:15:51.159 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:15:51.178 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\n23-06-23 12:15:51.178 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:15:51.179 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\n23-06-23 12:15:51.180 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:15:51.180 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:15:51.180 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\n23-06-23 12:15:51.181 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:15:51.181 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:15:51.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:15:51.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-06-23 12:15:51.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:15:51.186 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:15:51.187 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\n23-06-23 12:15:51.187 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:15:51.188 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:15:51.188 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:15:51.189 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-06-23 12:15:51.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:15:51.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:15:51.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\n23-06-23 12:15:51.190 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:15:51.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:15:51.191 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:15:51.191 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-06-23 12:15:51.191 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:15:51.192 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:15:51.192 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\n23-06-23 12:15:51.193 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:15:51.193 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:15:51.193 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:15:51.194 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-06-23 12:15:51.194 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\nReceived on s1: msg=TestMsg(msg='signal_s1')\nReceived on s2: msg=TestMsg(msg='signal_s2')\n23-06-23 12:15:56.181 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:15:56.181 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:15:56.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:15:56.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:15:56.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:15:56.183 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:15:56.183 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:15:56.183 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:15:56.184 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:15:56.184 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:15:56.185 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:15:56.185 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:15:56.185 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:15:56.186 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:15:56.186 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:15:56.186 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:15:56.188 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"The usage of the ",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors")," dictionary allows specifying the\ndesired topic/broker combination for sending the test messages,\nespecially when working with multiple Kafka clusters. This ensures that\nthe data is sent to the appropriate topic/broker based on the consuming\nfunction, and consumed from appropriate topic/broker based on the\nproducing function."),(0,t.kt)("h2",{id:"running-the-application"},"Running the application"),(0,t.kt)("p",null,"You can run your application using ",(0,t.kt)("inlineCode",{parentName:"p"},"fastkafka run")," CLI command in the\nsame way that you would run a single cluster app."),(0,t.kt)("p",null,"To start your app, copy the code above in multi_cluster_example.py and\nrun it by running:"),(0,t.kt)("p",null,"Now we can run the app. Copy the code above in multi_cluster_example.py,\nadjust your server configurations, and run it by running"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=development multi_cluster_example:app\n")),(0,t.kt)("p",null,"In your app logs, you should see your app starting up and your two\nconsumer functions connecting to different kafka clusters."),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:24092'}\n[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:24093'}\n[182747]: 23-06-23 12:16:14.131 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[182747]: 23-06-23 12:16:14.131 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\n[182747]: 23-06-23 12:16:14.131 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\n[182747]: 23-06-23 12:16:14.131 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[182747]: 23-06-23 12:16:14.136 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[182747]: 23-06-23 12:16:14.136 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\n[182747]: 23-06-23 12:16:14.136 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\n[182747]: 23-06-23 12:16:14.136 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[182747]: 23-06-23 12:16:14.141 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \n[182747]: 23-06-23 12:16:14.141 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \nStarting process cleanup, this may take a few seconds...\n23-06-23 12:16:18.294 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 182747...\n[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:19.471 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 182747 terminated.\n")),(0,t.kt)("h2",{id:"application-documentation"},"Application documentation"),(0,t.kt)("p",null,"At the moment the documentation for multicluster app is not yet\nimplemented, but it is under development and you can expecti it soon!"),(0,t.kt)("h2",{id:"examples-on-how-to-use-multiple-broker-configurations"},"Examples on how to use multiple broker configurations"),(0,t.kt)("h3",{id:"example-1"},"Example ","#","1"),(0,t.kt)("p",null,"In this section, we\u2019ll explore how you can effectively forward topics\nbetween different Kafka clusters, enabling seamless data synchronization\nfor your applications."),(0,t.kt)("p",null,"Imagine having two Kafka clusters, namely ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2"),", each hosting its own set of topics and messages. Now,\nif you want to forward a specific topic (in this case:\n",(0,t.kt)("inlineCode",{parentName:"p"},"preprocessed_signals"),") from kafka_brokers_1 to kafka_brokers_2,\nFastKafka provides an elegant solution."),(0,t.kt)("p",null,"Let\u2019s examine the code snippet that configures our application for topic\nforwarding:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals_original(msg: TestMsg):\n await to_preprocessed_signals_forward(msg)\n\n\n@app.produces(topic="preprocessed_signals", brokers=kafka_brokers_2)\nasync def to_preprocessed_signals_forward(data: TestMsg) -> TestMsg:\n return data\n')),(0,t.kt)("p",null,"Here\u2019s how it works: our FastKafka application is configured to consume\nmessages from ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," and process them in the\n",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_original")," function. We want to forward these\nmessages to ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2"),". To achieve this, we define the\n",(0,t.kt)("inlineCode",{parentName:"p"},"to_preprocessed_signals_forward")," function as a producer, seamlessly\nproducing the processed messages to the preprocessed_signals topic\nwithin the ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," cluster."),(0,t.kt)("h4",{id:"testing"},"Testing"),(0,t.kt)("p",null,"To test our FastKafka forwarding application, we can use our testing\nframework. Let\u2019s take a look at the testing code snippet:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n await tester.mirrors[app.on_preprocessed_signals_original](TestMsg(msg="signal"))\n await tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-06-23 12:16:31.689 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-06-23 12:16:31.690 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-06-23 12:16:31.691 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n23-06-23 12:16:31.691 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:31.701 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-06-23 12:16:31.702 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:31.702 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:31.703 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-06-23 12:16:31.703 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:31.704 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:31.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:31.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-06-23 12:16:31.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:16:31.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:31.707 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n23-06-23 12:16:31.707 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:31.708 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:31.708 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:31.709 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-06-23 12:16:31.709 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:16:35.703 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:35.703 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:35.704 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:35.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:35.705 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:35.705 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:35.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:35.707 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:35.707 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"With the help of the ",(0,t.kt)("strong",{parentName:"p"},"Tester")," object, we can simulate and verify the\nbehavior of our FastKafka application. Here\u2019s how it works:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"We create an instance of the ",(0,t.kt)("strong",{parentName:"p"},"Tester")," by passing in our ",(0,t.kt)("em",{parentName:"p"},"app"),"\nobject, which represents our FastKafka application.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Using the ",(0,t.kt)("strong",{parentName:"p"},"tester.mirrors")," dictionary, we can send a message to a\nspecific Kafka broker and topic combination. In this case, we use\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.on_preprocessed_signals_original]"),' to send a\nTestMsg message with the content \u201csignal" to the appropriate Kafka\nbroker and topic.')),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"After sending the message, we can perform assertions on the mirrored\nfunction using\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)"),".\nThis assertion ensures that the mirrored function has been called\nwithin a specified timeout period (in this case, 5 seconds)."))),(0,t.kt)("h3",{id:"example-2"},"Example ","#","2"),(0,t.kt)("p",null,"In this section, we\u2019ll explore how you can effortlessly consume data\nfrom multiple sources, process it, and aggregate the results into a\nsingle topic on a specific cluster."),(0,t.kt)("p",null,"Imagine you have two Kafka clusters: ",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," and\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_2"),", each hosting its own set of topics and messages.\nNow, what if you want to consume data from both clusters, perform some\nprocessing, and produce the results to a single topic on\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1"),"? FastKafka has got you covered!"),(0,t.kt)("p",null,"Let\u2019s take a look at the code snippet that configures our application\nfor aggregating multiple clusters:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals_1(msg: TestMsg):\n print(f"Default: {msg=}")\n await to_predictions(msg)\n\n\n@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)\nasync def on_preprocessed_signals_2(msg: TestMsg):\n print(f"Specified: {msg=}")\n await to_predictions(msg)\n\n\n@app.produces(topic="predictions")\nasync def to_predictions(prediction: TestMsg) -> TestMsg:\n print(f"Sending prediction: {prediction}")\n return [prediction]\n')),(0,t.kt)("p",null,'Here\u2019s the idea: our FastKafka application is set to consume messages\nfrom the topic \u201cpreprocessed_signals" on ',(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," cluster, as\nwell as from the same topic on ",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_2")," cluster. We have two\nconsuming functions, ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_1")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_2"),", that handle the messages from their\nrespective clusters. These functions perform any required processing, in\nthis case, just calling the to_predictions function."),(0,t.kt)("p",null,'The exciting part is that the to_predictions function acts as a\nproducer, sending the processed results to the \u201cpredictions" topic on\n',(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1 cluster"),". By doing so, we effectively aggregate the\ndata from multiple sources into a single topic on a specific cluster."),(0,t.kt)("p",null,"This approach enables you to consume data from multiple Kafka clusters,\nprocess it, and produce the aggregated results to a designated topic.\nWhether you\u2019re generating predictions, performing aggregations, or any\nother form of data processing, FastKafka empowers you to harness the\nfull potential of multiple clusters."),(0,t.kt)("h4",{id:"testing-1"},"Testing"),(0,t.kt)("p",null,"Let\u2019s take a look at the testing code snippet:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal"))\n await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal"))\n await tester.on_predictions.assert_called(timeout=5)\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-06-23 12:16:41.222 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-06-23 12:16:41.223 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-06-23 12:16:41.224 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-06-23 12:16:41.224 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:41.239 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-06-23 12:16:41.239 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:41.240 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n23-06-23 12:16:41.240 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:41.241 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:41.241 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-06-23 12:16:41.241 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:41.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:41.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:41.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-06-23 12:16:41.243 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:16:41.243 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:41.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n23-06-23 12:16:41.245 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:41.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:41.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:41.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-06-23 12:16:41.247 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:16:41.247 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:41.248 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-06-23 12:16:41.248 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:41.249 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:41.249 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:41.249 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-06-23 12:16:41.249 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\nDefault: msg=TestMsg(msg='signal')\nSending prediction: msg='signal'\nSpecified: msg=TestMsg(msg='signal')\nSending prediction: msg='signal'\n23-06-23 12:16:45.241 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:45.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:45.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:45.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:45.243 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:45.243 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:45.244 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:45.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:45.245 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:45.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:45.246 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:45.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:45.247 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"Here\u2019s how the code above works:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Within an ",(0,t.kt)("inlineCode",{parentName:"p"},"async with")," block, create an instance of the Tester by\npassing in your app object, representing your FastKafka application.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Using the tester.mirrors dictionary, you can send messages to\nspecific Kafka broker and topic combinations. In this case, we use\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.on_preprocessed_signals_1]")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.on_preprocessed_signals_2]"),' to send TestMsg\nmessages with the content \u201csignal" to the corresponding Kafka broker\nand topic combinations.')),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"After sending the messages, you can perform assertions on the\n",(0,t.kt)("strong",{parentName:"p"},"on_predictions")," function using\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.on_predictions.assert_called(timeout=5)"),". This assertion\nensures that the on_predictions function has been called within a\nspecified timeout period (in this case, 5 seconds)."))),(0,t.kt)("h3",{id:"example-3"},"Example ","#","3"),(0,t.kt)("p",null,"In some scenarios, you may need to produce messages to multiple Kafka\nclusters simultaneously. FastKafka simplifies this process by allowing\nyou to configure your application to produce messages to multiple\nclusters effortlessly. Let\u2019s explore how you can achieve this:"),(0,t.kt)("p",null,"Consider the following code snippet that demonstrates producing messages\nto multiple clusters:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals(msg: TestMsg):\n print(f"{msg=}")\n await to_predictions_1(TestMsg(msg="prediction"))\n await to_predictions_2(TestMsg(msg="prediction"))\n\n\n@app.produces(topic="predictions")\nasync def to_predictions_1(prediction: TestMsg) -> TestMsg:\n print(f"Sending prediction to s1: {prediction}")\n return [prediction]\n\n\n@app.produces(topic="predictions", brokers=kafka_brokers_2)\nasync def to_predictions_2(prediction: TestMsg) -> TestMsg:\n print(f"Sending prediction to s2: {prediction}")\n return [prediction]\n')),(0,t.kt)("p",null,"Here\u2019s what you need to know about producing to multiple clusters:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"We define two Kafka broker configurations: ",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," and\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_2"),", representing different clusters with their\nrespective connection details.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"We create an instance of the FastKafka application, specifying\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," as the primary cluster for producing messages.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"The ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals"),' function serves as a consumer,\nhandling incoming messages from the \u201cpreprocessed_signals" topic.\nWithin this function, we invoke two producer functions:\n',(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_1")," and ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_2"),".")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"The ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_1"),' function sends predictions to the\n\u201cpredictions" topic on ',(0,t.kt)("em",{parentName:"p"},"kafka_brokers_1")," cluster.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Additionally, the ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_2"),' function sends the same\npredictions to the \u201cpredictions" topic on ',(0,t.kt)("em",{parentName:"p"},"kafka_brokers_2")," cluster.\nThis allows for producing the same data to multiple clusters\nsimultaneously."))),(0,t.kt)("p",null,"By utilizing this approach, you can seamlessly produce messages to\nmultiple Kafka clusters, enabling you to distribute data across\ndifferent environments or leverage the strengths of various clusters."),(0,t.kt)("p",null,"Feel free to customize the producer functions as per your requirements,\nperforming any necessary data transformations or enrichment before\nsending the predictions."),(0,t.kt)("p",null,"With FastKafka, producing to multiple clusters becomes a breeze,\nempowering you to harness the capabilities of multiple environments\neffortlessly."),(0,t.kt)("h4",{id:"testing-2"},"Testing"),(0,t.kt)("p",null,"Let\u2019s take a look at the testing code snippet:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n await tester.to_preprocessed_signals(TestMsg(msg="signal"))\n await tester.mirrors[to_predictions_1].assert_called(timeout=5)\n await tester.mirrors[to_predictions_2].assert_called(timeout=5)\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-06-23 12:16:49.903 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-06-23 12:16:49.904 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-06-23 12:16:49.904 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-06-23 12:16:49.905 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:49.905 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n23-06-23 12:16:49.906 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:49.921 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-06-23 12:16:49.921 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:49.921 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:49.922 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-06-23 12:16:49.922 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:49.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:49.923 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:49.924 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-06-23 12:16:49.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:16:49.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:49.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-06-23 12:16:49.925 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:49.926 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:49.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:49.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-06-23 12:16:49.927 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:16:49.927 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:49.928 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n23-06-23 12:16:49.928 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:49.928 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:49.929 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:49.929 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-06-23 12:16:49.929 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\nmsg=TestMsg(msg='signal')\nSending prediction to s1: msg='prediction'\nSending prediction to s2: msg='prediction'\n23-06-23 12:16:53.922 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:53.922 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:53.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:53.923 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:53.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:53.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:53.924 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:53.925 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:53.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:53.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"Here\u2019s how you can perform the necessary tests:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Within an async with block, create an instance of the ",(0,t.kt)("strong",{parentName:"p"},"Tester")," by\npassing in your app object, representing your FastKafka application.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Using the ",(0,t.kt)("inlineCode",{parentName:"p"},"tester.to_preprocessed_signals"),' method, you can send a\nTestMsg message with the content \u201csignal".')),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"After sending the message, you can perform assertions on the\nto_predictions_1 and to_predictions_2 functions using\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[to_predictions_1].assert_called(timeout=5)")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[to_predictions_2].assert_called(timeout=5)"),". These\nassertions ensure that the respective producer functions have\nproduced data to their respective topic/broker combinations."))),(0,t.kt)("p",null,"By employing this testing approach, you can verify that the producing\nfunctions correctly send messages to their respective clusters. The\ntesting framework provided by FastKafka enables you to ensure the\naccuracy and reliability of your application\u2019s producing logic."))}l.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/065bbf18.851ffe9a.js b/assets/js/065bbf18.851ffe9a.js new file mode 100644 index 0000000..7e38e4f --- /dev/null +++ b/assets/js/065bbf18.851ffe9a.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1244],{3905:(e,a,t)=>{t.d(a,{Zo:()=>m,kt:()=>f});var n=t(7294);function o(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function s(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function r(e){for(var a=1;a=0||(o[t]=e[t]);return o}(e,a);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var p=n.createContext({}),k=function(e){var a=n.useContext(p),t=a;return e&&(t="function"==typeof e?e(a):r(r({},a),e)),t},m=function(e){var a=k(e.components);return n.createElement(p.Provider,{value:a},e.children)},_="mdxType",c={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},l=n.forwardRef((function(e,a){var t=e.components,o=e.mdxType,s=e.originalType,p=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),_=k(t),l=o,f=_["".concat(p,".").concat(l)]||_[l]||c[l]||s;return t?n.createElement(f,r(r({ref:a},m),{},{components:t})):n.createElement(f,r({ref:a},m))}));function f(e,a){var t=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var s=t.length,r=new Array(s);r[0]=l;var i={};for(var p in a)hasOwnProperty.call(a,p)&&(i[p]=a[p]);i.originalType=e,i[_]="string"==typeof e?e:o,r[1]=i;for(var k=2;k{t.r(a),t.d(a,{assets:()=>p,contentTitle:()=>r,default:()=>c,frontMatter:()=>s,metadata:()=>i,toc:()=>k});var n=t(7462),o=(t(7294),t(3905));const s={},r="Using Tester to test FastKafka",i={unversionedId:"guides/Guide_33_Using_Tester_class_to_test_fastkafka",id:"guides/Guide_33_Using_Tester_class_to_test_fastkafka",title:"Using Tester to test FastKafka",description:"In order to speed up development and make testing easier, we have",source:"@site/docs/guides/Guide_33_Using_Tester_class_to_test_fastkafka.md",sourceDirName:"guides",slug:"/guides/Guide_33_Using_Tester_class_to_test_fastkafka",permalink:"/docs/next/guides/Guide_33_Using_Tester_class_to_test_fastkafka",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using multiple Kafka clusters",permalink:"/docs/next/guides/Guide_24_Using_Multiple_Kafka_Clusters"},next:{title:"Using Redpanda to test FastKafka",permalink:"/docs/next/guides/Guide_31_Using_redpanda_to_test_fastkafka"}},p={},k=[{value:"Basic example",id:"basic-example",level:2},{value:"Testing the application",id:"testing-the-application",level:3},{value:"Final script",id:"final-script",level:3},{value:"Using external brokers",id:"using-external-brokers",level:2},{value:"Example: New Employee app",id:"example-new-employee-app",level:2},{value:"Testing the application",id:"testing-the-application-1",level:3},{value:"Final script",id:"final-script-1",level:3}],m={toc:k},_="wrapper";function c(e){let{components:a,...t}=e;return(0,o.kt)(_,(0,n.Z)({},m,t,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"using-tester-to-test-fastkafka"},"Using Tester to test FastKafka"),(0,o.kt)("p",null,"In order to speed up development and make testing easier, we have\nimplemented the ",(0,o.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/Tester/"},"Tester")," class."),(0,o.kt)("p",null,"The ",(0,o.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/Tester/"},"Tester")," instance starts\nin-memory implementation of Kafka broker i.e.\xa0there is no need for\nstarting localhost Kafka service for testing FastKafka apps. The\n",(0,o.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/Tester/"},"Tester")," will redirect ",(0,o.kt)("inlineCode",{parentName:"p"},"consumes"),"\nand ",(0,o.kt)("inlineCode",{parentName:"p"},"produces")," decorated functions to the in-memory Kafka broker so that\nyou can quickly test FasKafka apps without the need of a running Kafka\nbroker and all its dependencies. Also, for each FastKafka ",(0,o.kt)("inlineCode",{parentName:"p"},"consumes")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"produces")," function, ",(0,o.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/Tester/"},"Tester")," will\ncreate it\u2019s mirrored fuction i.e.\xa0if the ",(0,o.kt)("inlineCode",{parentName:"p"},"consumes")," function is\nimplemented, the ",(0,o.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/Tester/"},"Tester")," will\ncreate the ",(0,o.kt)("inlineCode",{parentName:"p"},"produces")," function (and the other way - if the ",(0,o.kt)("inlineCode",{parentName:"p"},"produces"),"\nfunction is implemented, ",(0,o.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/Tester/"},"Tester"),"\nwill create ",(0,o.kt)("inlineCode",{parentName:"p"},"consumes")," function)."),(0,o.kt)("h2",{id:"basic-example"},"Basic example"),(0,o.kt)("p",null,"To showcase the functionalities of FastKafka and illustrate the concepts\ndiscussed, we can use a simple test message called ",(0,o.kt)("inlineCode",{parentName:"p"},"TestMsg"),". Here\u2019s the\ndefinition of the ",(0,o.kt)("inlineCode",{parentName:"p"},"TestMsg")," class:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'class TestMsg(BaseModel):\n msg: str = Field(...)\n\n\ntest_msg = TestMsg(msg="signal")\n')),(0,o.kt)("p",null,"In this example we have implemented\n",(0,o.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp with one ",(0,o.kt)("inlineCode",{parentName:"p"},"consumes")," and one ",(0,o.kt)("inlineCode",{parentName:"p"},"produces")," function. ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input")," function\nconsumes messages from the ",(0,o.kt)("inlineCode",{parentName:"p"},"input")," topic and ",(0,o.kt)("inlineCode",{parentName:"p"},"to_output")," function\nproduces messages to the ",(0,o.kt)("inlineCode",{parentName:"p"},"output")," topic."),(0,o.kt)("p",null,(0,o.kt)("strong",{parentName:"p"},"Note"),": it is necessary to define parameter and return types in the\nproduces and consumes functions"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\napp = FastKafka()\n\n\n@app.consumes()\nasync def on_input(msg: TestMsg):\n await to_output(TestMsg(msg=f"Hello {msg.msg}"))\n\n\n@app.produces()\nasync def to_output(msg: TestMsg) -> TestMsg:\n return msg\n')),(0,o.kt)("h3",{id:"testing-the-application"},"Testing the application"),(0,o.kt)("p",null,"In this example ",(0,o.kt)("inlineCode",{parentName:"p"},"app")," has imlemented ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"to_output"),"\nfunctions. We can now use ",(0,o.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/Tester/"},"Tester"),"\nto create their mirrored functions: ",(0,o.kt)("inlineCode",{parentName:"p"},"to_input")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"on_output"),"."),(0,o.kt)("p",null,"Testing process for this example could look like this:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},(0,o.kt)("inlineCode",{parentName:"p"},"tester")," produces the message to the ",(0,o.kt)("inlineCode",{parentName:"p"},"input")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Assert that the ",(0,o.kt)("inlineCode",{parentName:"p"},"app")," consumed the message by calling ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input"),"\nwith the accurate argument")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Within ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input")," function, ",(0,o.kt)("inlineCode",{parentName:"p"},"to_output")," function is called - and\nmessage is produced to the ",(0,o.kt)("inlineCode",{parentName:"p"},"output")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Assert that the ",(0,o.kt)("inlineCode",{parentName:"p"},"tester")," consumed the message by calling ",(0,o.kt)("inlineCode",{parentName:"p"},"on_output"),"\nwith the accurate argument"))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'async with Tester(app).using_inmemory_broker() as tester:\n input_msg = TestMsg(msg="Mickey")\n\n # tester produces message to the input topic\n await tester.to_input(input_msg)\n # previous line is equal to\n # await tester.mirrors[app.on_input](input_msg)\n\n # assert that app consumed from the input topic and it was called with the accurate argument\n await app.awaited_mocks.on_input.assert_called_with(\n TestMsg(msg="Mickey"), timeout=5\n )\n # assert that tester consumed from the output topic and it was called with the accurate argument\n await tester.on_output.assert_called_with(TestMsg(msg="Hello Mickey"), timeout=5)\nprint("ok")\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"23-07-31 10:38:30.810 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-07-31 10:38:30.811 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-07-31 10:38:30.812 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n23-07-31 10:38:30.812 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-07-31 10:38:30.826 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n23-07-31 10:38:30.827 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-07-31 10:38:30.827 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-07-31 10:38:30.828 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\n23-07-31 10:38:30.828 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-07-31 10:38:30.829 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-07-31 10:38:30.829 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-07-31 10:38:30.830 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input']\n23-07-31 10:38:30.830 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-07-31 10:38:30.835 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-07-31 10:38:30.835 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\n23-07-31 10:38:30.836 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-07-31 10:38:30.836 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-07-31 10:38:30.836 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-07-31 10:38:30.837 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output']\n23-07-31 10:38:30.837 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-07-31 10:38:34.828 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-07-31 10:38:34.828 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-07-31 10:38:34.829 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-07-31 10:38:34.829 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-07-31 10:38:34.830 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-07-31 10:38:34.831 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-07-31 10:38:34.831 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-07-31 10:38:34.832 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-07-31 10:38:34.832 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\nok\n")),(0,o.kt)("h3",{id:"final-script"},"Final script"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'import asyncio\nfrom fastkafka._application.app import FastKafka\nfrom fastkafka._application.tester import Tester\nfrom pydantic import BaseModel, Field\n\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\n\napp = FastKafka()\n\n\n@app.consumes()\nasync def on_input(msg: TestMsg):\n await to_output(TestMsg(msg=f"Hello {msg.msg}"))\n\n\n@app.produces()\nasync def to_output(msg: TestMsg) -> TestMsg:\n return msg\n\n\nasync def async_tests():\n async with Tester(app).using_inmemory_broker() as tester:\n input_msg = TestMsg(msg="Mickey")\n\n # tester produces message to the input topic\n await tester.to_input(input_msg)\n\n # assert that app consumed from the input topic and it was called with the accurate argument\n await app.awaited_mocks.on_input.assert_called_with(\n TestMsg(msg="Mickey"), timeout=5\n )\n # assert that tester consumed from the output topic and it was called with the accurate argument\n await tester.awaited_mocks.on_output.assert_called_with(\n TestMsg(msg="Hello Mickey"), timeout=5\n )\n print("ok")\n\n\nif __name__ == "__main__":\n loop = asyncio.get_event_loop()\n loop.run_until_complete(async_tests())\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"23-07-31 10:38:34.855 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-07-31 10:38:34.856 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-07-31 10:38:34.856 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n23-07-31 10:38:34.857 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-07-31 10:38:34.871 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n23-07-31 10:38:34.872 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-07-31 10:38:34.872 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-07-31 10:38:34.873 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\n23-07-31 10:38:34.874 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-07-31 10:38:34.875 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-07-31 10:38:34.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-07-31 10:38:34.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input']\n23-07-31 10:38:34.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-07-31 10:38:34.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-07-31 10:38:34.879 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}\n23-07-31 10:38:34.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-07-31 10:38:34.879 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-07-31 10:38:34.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-07-31 10:38:34.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output']\n23-07-31 10:38:34.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-07-31 10:38:38.873 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-07-31 10:38:38.873 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-07-31 10:38:38.874 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-07-31 10:38:38.874 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-07-31 10:38:38.875 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-07-31 10:38:38.876 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-07-31 10:38:38.877 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-07-31 10:38:38.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-07-31 10:38:38.878 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\nok\n")),(0,o.kt)("h2",{id:"using-external-brokers"},"Using external brokers"),(0,o.kt)("p",null,"If you have already running brokers e.g.\xa0",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", you can use\n",(0,o.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/Tester/"},"Tester")," method\n",(0,o.kt)("inlineCode",{parentName:"p"},"using_external_broker")," to set brokers which will be used in tests."),(0,o.kt)("p",null,"The same example as previous but with external ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),":"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application_test.py" file\n\nimport asyncio\nfrom fastkafka._application.app import FastKafka\nfrom fastkafka._application.tester import Tester\nfrom pydantic import BaseModel, Field\n\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\napp = FastKafka(\n title="Demo Kafka app",\n kafka_brokers=kafka_brokers,\n)\n\n@app.consumes()\nasync def on_input(msg: TestMsg):\n await to_output(TestMsg(msg=f"Hello {msg.msg}"))\n\n\n@app.produces()\nasync def to_output(msg: TestMsg) -> TestMsg:\n return msg\n\n\nasync def async_tests():\n async with Tester(app).using_external_broker(bootstrap_servers_id="production") as tester:\n input_msg = TestMsg(msg="Mickey")\n\n # tester produces message to the input topic\n await tester.to_input(input_msg)\n\n # assert that app consumed from the input topic and it was called with the accurate argument\n await app.awaited_mocks.on_input.assert_called_with(\n TestMsg(msg="Mickey"), timeout=5\n )\n # assert that tester consumed from the output topic and it was called with the accurate argument\n await tester.awaited_mocks.on_output.assert_called_with(\n TestMsg(msg="Hello Mickey"), timeout=5\n )\n print("ok")\n\n\nif __name__ == "__main__":\n loop = asyncio.get_event_loop()\n loop.run_until_complete(async_tests())\n')),(0,o.kt)("h2",{id:"example-new-employee-app"},"Example: New Employee app"),(0,o.kt)("p",null,"In this example, our ",(0,o.kt)("inlineCode",{parentName:"p"},"app")," has one consumes and two produces functions."),(0,o.kt)("p",null,"Every time a company hires an ",(0,o.kt)("inlineCode",{parentName:"p"},"Employee"),", some employee data is sent to\nthe ",(0,o.kt)("inlineCode",{parentName:"p"},"new_employee")," topic."),(0,o.kt)("p",null,"That\u2019s when our application comes into play! The app consumes this data\nby calling ",(0,o.kt)("inlineCode",{parentName:"p"},"on_new_employee"),". Within this function, ",(0,o.kt)("inlineCode",{parentName:"p"},"to_employee_email"),"\nand ",(0,o.kt)("inlineCode",{parentName:"p"},"to_welcome_message")," functions are called - and messages are\nproduced to the ",(0,o.kt)("inlineCode",{parentName:"p"},"employee_email")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"welcome_message")," topic."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'class Employee(BaseModel):\n name: str\n surname: str\n email: Optional[str] = None\n\n\nclass EmaiMessage(BaseModel):\n sender: str = "info@gmail.com"\n receiver: str\n subject: str\n message: str\n\nkafka_brokers = dict(localhost=[dict(url="server_1", port=9092)], production=[dict(url="production_server_1", port=9092)])\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n\n@app.consumes()\nasync def on_new_employee(msg: Employee):\n employee = await to_employee_email(msg)\n await to_welcome_message(employee)\n\n\n@app.produces()\nasync def to_employee_email(employee: Employee) -> Employee:\n # generate new email\n employee.email = employee.name + "." + employee.surname + "@gmail.com"\n return employee\n\n\n@app.produces()\nasync def to_welcome_message(employee: Employee) -> EmaiMessage:\n message = f"Dear {employee.name},\\nWelcome to the company"\n return EmaiMessage(receiver=employee.email, subject="Welcome", message=message)\n')),(0,o.kt)("h3",{id:"testing-the-application-1"},"Testing the application"),(0,o.kt)("p",null,"In this example ",(0,o.kt)("inlineCode",{parentName:"p"},"app")," has imlemented ",(0,o.kt)("inlineCode",{parentName:"p"},"on_new_employee"),",\n",(0,o.kt)("inlineCode",{parentName:"p"},"to_employee_email")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"to_welcome_message")," functions. We can now use\n",(0,o.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/Tester/"},"Tester")," to create their mirrored\nfunctions: ",(0,o.kt)("inlineCode",{parentName:"p"},"to_new_employee"),", ",(0,o.kt)("inlineCode",{parentName:"p"},"on_employee_email")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"on_welcome_message"),"."),(0,o.kt)("p",null,"Testing process:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},(0,o.kt)("inlineCode",{parentName:"p"},"tester")," produces message to the ",(0,o.kt)("inlineCode",{parentName:"p"},"new_employee")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Assert that the ",(0,o.kt)("inlineCode",{parentName:"p"},"app")," consumed the message from the ",(0,o.kt)("inlineCode",{parentName:"p"},"new_employee"),"\ntopic with the accurate argument")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Within ",(0,o.kt)("inlineCode",{parentName:"p"},"on_new_employee")," function, ",(0,o.kt)("inlineCode",{parentName:"p"},"to_employee_email")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"to_welcome_message")," functions are called - and messages are\nproduced to the ",(0,o.kt)("inlineCode",{parentName:"p"},"employee_email")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"welcome_message")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Assert that the ",(0,o.kt)("inlineCode",{parentName:"p"},"tester")," consumed the message by calling\n",(0,o.kt)("inlineCode",{parentName:"p"},"on_employee_email"))),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Assert that the ",(0,o.kt)("inlineCode",{parentName:"p"},"tester")," consumed the message by calling\n",(0,o.kt)("inlineCode",{parentName:"p"},"on_welcome_message")))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'assert app._kafka_config["bootstrap_servers_id"] == "localhost"\n\nasync with Tester(app).using_inmemory_broker(bootstrap_servers_id="production") as tester:\n assert app._kafka_config["bootstrap_servers_id"] == "production"\n assert tester._kafka_config["bootstrap_servers_id"] == "production"\n\n # produce the message to new_employee topic\n await tester.to_new_employee(Employee(name="Mickey", surname="Mouse"))\n # previous line is equal to:\n # await tester.mirrors[app.on_new_employee](Employee(name="Mickey", surname="Mouse"))\n\n # Assert app consumed the message\n await app.awaited_mocks.on_new_employee.assert_called_with(\n Employee(name="Mickey", surname="Mouse"), timeout=5\n )\n\n # If the the previous assert is true (on_new_employee was called),\n # to_employee_email and to_welcome_message were called inside on_new_employee function\n\n # Now we can check if this two messages were consumed\n await tester.awaited_mocks.on_employee_email.assert_called(timeout=5)\n await tester.awaited_mocks.on_welcome_message.assert_called(timeout=5)\n\nassert app._kafka_config["bootstrap_servers_id"] == "localhost"\n\nprint("ok")\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"23-07-31 10:38:40.069 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-07-31 10:38:40.070 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-07-31 10:38:40.070 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'\n23-07-31 10:38:40.071 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-07-31 10:38:40.071 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'\n23-07-31 10:38:40.072 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-07-31 10:38:40.091 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'\n23-07-31 10:38:40.091 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-07-31 10:38:40.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-07-31 10:38:40.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}\n23-07-31 10:38:40.093 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-07-31 10:38:40.093 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-07-31 10:38:40.094 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-07-31 10:38:40.094 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['new_employee']\n23-07-31 10:38:40.095 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-07-31 10:38:40.096 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-07-31 10:38:40.097 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}\n23-07-31 10:38:40.098 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-07-31 10:38:40.099 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-07-31 10:38:40.099 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-07-31 10:38:40.100 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['employee_email']\n23-07-31 10:38:40.100 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-07-31 10:38:40.101 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-07-31 10:38:40.101 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}\n23-07-31 10:38:40.102 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-07-31 10:38:40.103 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-07-31 10:38:40.103 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-07-31 10:38:40.103 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['welcome_message']\n23-07-31 10:38:40.104 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-07-31 10:38:44.092 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-07-31 10:38:44.093 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-07-31 10:38:44.093 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-07-31 10:38:44.094 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-07-31 10:38:44.094 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-07-31 10:38:44.095 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-07-31 10:38:44.095 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-07-31 10:38:44.096 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-07-31 10:38:44.096 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-07-31 10:38:44.096 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-07-31 10:38:44.097 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-07-31 10:38:44.097 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-07-31 10:38:44.097 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\nok\n")),(0,o.kt)("h3",{id:"final-script-1"},"Final script"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'import asyncio\nfrom fastkafka._application.app import FastKafka\nfrom fastkafka._application.tester import Tester\nfrom pydantic import BaseModel, Field\nfrom typing import Optional\n\n\nclass Employee(BaseModel):\n name: str\n surname: str\n email: Optional[str] = None\n\n\nclass EmaiMessage(BaseModel):\n sender: str = "info@gmail.com"\n receiver: str\n subject: str\n message: str\n\n\nkafka_brokers = dict(localhost=[dict(url="server_1", port=9092)], production=[dict(url="production_server_1", port=9092)])\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n\n@app.consumes()\nasync def on_new_employee(msg: Employee):\n employee = await to_employee_email(msg)\n await to_welcome_message(employee)\n\n\n@app.produces()\nasync def to_employee_email(employee: Employee) -> Employee:\n # generate new email\n employee.email = employee.name + "." + employee.surname + "@gmail.com"\n return employee\n\n\n@app.produces()\nasync def to_welcome_message(employee: Employee) -> EmaiMessage:\n message = f"Dear {employee.name},\\nWelcome to the company"\n return EmaiMessage(receiver=employee.email, subject="Welcome", message=message)\n\n\nasync def async_tests():\n assert app._kafka_config["bootstrap_servers_id"] == "localhost"\n \n async with Tester(app).using_inmemory_broker(bootstrap_servers_id="production") as tester:\n assert app._kafka_config["bootstrap_servers_id"] == "production"\n assert tester._kafka_config["bootstrap_servers_id"] == "production"\n \n # produce the message to new_employee topic\n await tester.to_new_employee(Employee(name="Mickey", surname="Mouse"))\n # previous line is equal to:\n # await tester.mirrors[app.on_new_employee](Employee(name="Mickey", surname="Mouse"))\n\n # Assert app consumed the message\n await app.awaited_mocks.on_new_employee.assert_called_with(\n Employee(name="Mickey", surname="Mouse"), timeout=5\n )\n\n # If the the previous assert is true (on_new_employee was called),\n # to_employee_email and to_welcome_message were called inside on_new_employee function\n\n # Now we can check if this two messages were consumed\n await tester.awaited_mocks.on_employee_email.assert_called(timeout=5)\n await tester.awaited_mocks.on_welcome_message.assert_called(timeout=5)\n \n assert app._kafka_config["bootstrap_servers_id"] == "localhost"\n print("ok")\n\n\nif __name__ == "__main__":\n loop = asyncio.get_event_loop()\n loop.run_until_complete(async_tests())\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"23-07-31 10:38:47.045 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-07-31 10:38:47.046 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-07-31 10:38:47.046 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'\n23-07-31 10:38:47.047 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-07-31 10:38:47.048 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'\n23-07-31 10:38:47.048 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-07-31 10:38:47.067 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'\n23-07-31 10:38:47.067 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-07-31 10:38:47.068 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-07-31 10:38:47.070 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}\n23-07-31 10:38:47.070 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-07-31 10:38:47.071 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-07-31 10:38:47.071 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-07-31 10:38:47.072 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['new_employee']\n23-07-31 10:38:47.072 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-07-31 10:38:47.072 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-07-31 10:38:47.073 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}\n23-07-31 10:38:47.074 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-07-31 10:38:47.074 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-07-31 10:38:47.074 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-07-31 10:38:47.075 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['employee_email']\n23-07-31 10:38:47.075 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-07-31 10:38:47.076 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-07-31 10:38:47.076 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}\n23-07-31 10:38:47.076 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-07-31 10:38:47.077 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-07-31 10:38:47.077 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-07-31 10:38:47.078 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['welcome_message']\n23-07-31 10:38:47.078 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-07-31 10:38:51.068 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-07-31 10:38:51.069 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-07-31 10:38:51.069 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-07-31 10:38:51.070 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-07-31 10:38:51.070 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-07-31 10:38:51.071 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-07-31 10:38:51.071 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-07-31 10:38:51.072 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-07-31 10:38:51.072 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-07-31 10:38:51.073 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-07-31 10:38:51.073 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-07-31 10:38:51.074 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-07-31 10:38:51.074 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\nok\n")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/06acf88d.54a41cbf.js b/assets/js/06acf88d.54a41cbf.js new file mode 100644 index 0000000..0a7d4dc --- /dev/null +++ b/assets/js/06acf88d.54a41cbf.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[99],{3905:(e,t,n)=>{n.d(t,{Zo:()=>l,kt:()=>m});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=r.createContext({}),s=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},l=function(e){var t=s(e.components);return r.createElement(c.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,l=u(e,["components","mdxType","originalType","parentName"]),p=s(n),d=o,m=p["".concat(c,".").concat(d)]||p[d]||f[d]||a;return n?r.createElement(m,i(i({ref:t},l),{},{components:n})):r.createElement(m,i({ref:t},l))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=d;var u={};for(var c in t)hasOwnProperty.call(t,c)&&(u[c]=t[c]);u.originalType=e,u[p]="string"==typeof e?e:o,i[1]=u;for(var s=2;s{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>f,frontMatter:()=>a,metadata:()=>u,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={},i="Authentication",u={unversionedId:"guides/Guide_03_Authentication",id:"version-0.7.0/guides/Guide_03_Authentication",title:"Authentication",description:"TLS Authentication",source:"@site/versioned_docs/version-0.7.0/guides/Guide_03_Authentication.md",sourceDirName:"guides",slug:"/guides/Guide_03_Authentication",permalink:"/docs/0.7.0/guides/Guide_03_Authentication",draft:!1,tags:[],version:"0.7.0",frontMatter:{}},c={},s=[{value:"TLS Authentication",id:"tls-authentication",level:2}],l={toc:s},p="wrapper";function f(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},l,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"authentication"},"Authentication"),(0,o.kt)("h2",{id:"tls-authentication"},"TLS Authentication"),(0,o.kt)("p",null,"sasl_mechanism (str) \u2013 Authentication mechanism when security_protocol\nis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN,\nGSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN"),(0,o.kt)("p",null,"sasl_plain_username (str) \u2013 username for SASL PLAIN authentication.\nDefault: None"),(0,o.kt)("p",null,"sasl_plain_password (str) \u2013 password for SASL PLAIN authentication.\nDefault: None"),(0,o.kt)("p",null,"sasl_oauth_token_provider (AbstractTokenProvider) \u2013 OAuthBearer token\nprovider instance. (See kafka.oauth.abstract). Default: None"))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/09cca5f2.a6e2df67.js b/assets/js/09cca5f2.a6e2df67.js new file mode 100644 index 0000000..14d2b71 --- /dev/null +++ b/assets/js/09cca5f2.a6e2df67.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[29],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),l=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},c=function(e){var t=l(e.components);return n.createElement(s.Provider,{value:t},e.children)},p="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,c=f(e,["components","mdxType","originalType","parentName"]),p=l(a),u=r,d=p["".concat(s,".").concat(u)]||p[u]||k[u]||o;return a?n.createElement(d,i(i({ref:t},c),{},{components:a})):n.createElement(d,i({ref:t},c))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=u;var f={};for(var s in t)hasOwnProperty.call(t,s)&&(f[s]=t[s]);f.originalType=e,f[p]="string"==typeof e?e:r,i[1]=f;for(var l=2;l{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>k,frontMatter:()=>o,metadata:()=>f,toc:()=>l});var n=a(7462),r=(a(7294),a(3905));const o={},i=void 0,f={unversionedId:"api/fastkafka/KafkaEvent",id:"version-0.6.0/api/fastkafka/KafkaEvent",title:"KafkaEvent",description:"fastkafka.KafkaEvent {fastkafka.KafkaEvent}",source:"@site/versioned_docs/version-0.6.0/api/fastkafka/KafkaEvent.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/KafkaEvent",permalink:"/docs/0.6.0/api/fastkafka/KafkaEvent",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"FastKafka",permalink:"/docs/0.6.0/api/fastkafka/"},next:{title:"AvroBase",permalink:"/docs/0.6.0/api/fastkafka/encoder/AvroBase"}},s={},l=[{value:"fastkafka.KafkaEvent",id:"fastkafka.KafkaEvent",level:2}],c={toc:l},p="wrapper";function k(e){let{components:t,...a}=e;return(0,r.kt)(p,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.KafkaEvent")),(0,r.kt)("p",null,"A generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"message"),": The message contained in the Kafka event, can be of type pydantic.BaseModel."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"key"),": The optional key used to identify the Kafka event.")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0a79db1f.58fed57a.js b/assets/js/0a79db1f.58fed57a.js new file mode 100644 index 0000000..ff82695 --- /dev/null +++ b/assets/js/0a79db1f.58fed57a.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[2732],{3905:(e,t,r)=>{r.d(t,{Zo:()=>d,kt:()=>k});var a=r(7294);function n(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,a)}return r}function l(e){for(var t=1;t=0||(n[r]=e[r]);return n}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(n[r]=e[r])}return n}var i=a.createContext({}),p=function(e){var t=a.useContext(i),r=t;return e&&(r="function"==typeof e?e(t):l(l({},t),e)),r},d=function(e){var t=p(e.components);return a.createElement(i.Provider,{value:t},e.children)},s="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},f=a.forwardRef((function(e,t){var r=e.components,n=e.mdxType,o=e.originalType,i=e.parentName,d=c(e,["components","mdxType","originalType","parentName"]),s=p(r),f=n,k=s["".concat(i,".").concat(f)]||s[f]||u[f]||o;return r?a.createElement(k,l(l({ref:t},d),{},{components:r})):a.createElement(k,l({ref:t},d))}));function k(e,t){var r=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var o=r.length,l=new Array(o);l[0]=f;var c={};for(var i in t)hasOwnProperty.call(t,i)&&(c[i]=t[i]);c.originalType=e,c[s]="string"==typeof e?e:n,l[1]=c;for(var p=2;p{r.r(t),r.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>u,frontMatter:()=>o,metadata:()=>c,toc:()=>p});var a=r(7462),n=(r(7294),r(3905));const o={},l=void 0,c={unversionedId:"api/fastkafka/encoder/avro_encoder",id:"api/fastkafka/encoder/avro_encoder",title:"avro_encoder",description:"avroencoder {fastkafka.encoder.avroencoder}",source:"@site/docs/api/fastkafka/encoder/avro_encoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avro_encoder",permalink:"/docs/next/api/fastkafka/encoder/avro_encoder",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avro_decoder",permalink:"/docs/next/api/fastkafka/encoder/avro_decoder"},next:{title:"avsc_to_pydantic",permalink:"/docs/next/api/fastkafka/encoder/avsc_to_pydantic"}},i={},p=[{value:"avro_encoder",id:"fastkafka.encoder.avro_encoder",level:3}],d={toc:p},s="wrapper";function u(e){let{components:t,...r}=e;return(0,n.kt)(s,(0,a.Z)({},d,r,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("h3",{id:"fastkafka.encoder.avro_encoder"},"avro_encoder"),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/encoder/avro.py#L239-L259",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-py"},"avro_encoder(\n msg\n)\n")),(0,n.kt)("p",null,"Encoder to encode pydantic instances to avro message"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Name"),(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"),(0,n.kt)("th",{parentName:"tr",align:null},"Default"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"msg")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"BaseModel")),(0,n.kt)("td",{parentName:"tr",align:null},"An instance of pydantic basemodel"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("em",{parentName:"td"},"required"))))),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"bytes")),(0,n.kt)("td",{parentName:"tr",align:null},"A bytes message which is encoded from pydantic basemodel")))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0d766b78.42ab4cf9.js b/assets/js/0d766b78.42ab4cf9.js new file mode 100644 index 0000000..eacce6e --- /dev/null +++ b/assets/js/0d766b78.42ab4cf9.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6492],{3905:(e,a,o)=>{o.d(a,{Zo:()=>c,kt:()=>f});var s=o(7294);function t(e,a,o){return a in e?Object.defineProperty(e,a,{value:o,enumerable:!0,configurable:!0,writable:!0}):e[a]=o,e}function n(e,a){var o=Object.keys(e);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);a&&(s=s.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),o.push.apply(o,s)}return o}function r(e){for(var a=1;a=0||(t[o]=e[o]);return t}(e,a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);for(s=0;s=0||Object.prototype.propertyIsEnumerable.call(e,o)&&(t[o]=e[o])}return t}var p=s.createContext({}),k=function(e){var a=s.useContext(p),o=a;return e&&(o="function"==typeof e?e(a):r(r({},a),e)),o},c=function(e){var a=k(e.components);return s.createElement(p.Provider,{value:a},e.children)},_="mdxType",l={inlineCode:"code",wrapper:function(e){var a=e.children;return s.createElement(s.Fragment,{},a)}},m=s.forwardRef((function(e,a){var o=e.components,t=e.mdxType,n=e.originalType,p=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),_=k(o),m=t,f=_["".concat(p,".").concat(m)]||_[m]||l[m]||n;return o?s.createElement(f,r(r({ref:a},c),{},{components:o})):s.createElement(f,r({ref:a},c))}));function f(e,a){var o=arguments,t=a&&a.mdxType;if("string"==typeof e||t){var n=o.length,r=new Array(n);r[0]=m;var i={};for(var p in a)hasOwnProperty.call(a,p)&&(i[p]=a[p]);i.originalType=e,i[_]="string"==typeof e?e:t,r[1]=i;for(var k=2;k{o.r(a),o.d(a,{assets:()=>p,contentTitle:()=>r,default:()=>l,frontMatter:()=>n,metadata:()=>i,toc:()=>k});var s=o(7462),t=(o(7294),o(3905));const n={},r="Using multiple Kafka clusters",i={unversionedId:"guides/Guide_24_Using_Multiple_Kafka_Clusters",id:"version-0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters",title:"Using multiple Kafka clusters",description:"Ready to take your FastKafka app to the next level? This guide shows you",source:"@site/versioned_docs/version-0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters.md",sourceDirName:"guides",slug:"/guides/Guide_24_Using_Multiple_Kafka_Clusters",permalink:"/docs/0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Encoding and Decoding Kafka Messages with FastKafka",permalink:"/docs/0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"},next:{title:"Using Redpanda to test FastKafka",permalink:"/docs/0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka"}},p={},k=[{value:"Test message",id:"test-message",level:3},{value:"Defining multiple broker configurations",id:"defining-multiple-broker-configurations",level:2},{value:"How it works",id:"how-it-works",level:4},{value:"Testing the application",id:"testing-the-application",level:2},{value:"Running the application",id:"running-the-application",level:2},{value:"Application documentation",id:"application-documentation",level:2},{value:"Examples on how to use multiple broker configurations",id:"examples-on-how-to-use-multiple-broker-configurations",level:2},{value:"Example #1",id:"example-1",level:3},{value:"Testing",id:"testing",level:4},{value:"Example #2",id:"example-2",level:3},{value:"Testing",id:"testing-1",level:4},{value:"Example #3",id:"example-3",level:3},{value:"Testing",id:"testing-2",level:4}],c={toc:k},_="wrapper";function l(e){let{components:a,...o}=e;return(0,t.kt)(_,(0,s.Z)({},c,o,{components:a,mdxType:"MDXLayout"}),(0,t.kt)("h1",{id:"using-multiple-kafka-clusters"},"Using multiple Kafka clusters"),(0,t.kt)("p",null,"Ready to take your FastKafka app to the next level? This guide shows you\nhow to connect to multiple Kafka clusters effortlessly. Consolidate\ntopics and produce messages across clusters like a pro. Unleash the full\npotential of your Kafka-powered app with FastKafka. Let\u2019s dive in and\nelevate your application\u2019s capabilities!"),(0,t.kt)("h3",{id:"test-message"},"Test message"),(0,t.kt)("p",null,"To showcase the functionalities of FastKafka and illustrate the concepts\ndiscussed, we can use a simple test message called ",(0,t.kt)("inlineCode",{parentName:"p"},"TestMsg"),". Here\u2019s the\ndefinition of the ",(0,t.kt)("inlineCode",{parentName:"p"},"TestMsg")," class:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},"class TestMsg(BaseModel):\n msg: str = Field(...)\n")),(0,t.kt)("h2",{id:"defining-multiple-broker-configurations"},"Defining multiple broker configurations"),(0,t.kt)("p",null,"When building a FastKafka application, you may need to consume messages\nfrom multiple Kafka clusters, each with its own set of broker\nconfigurations. FastKafka provides the flexibility to define different\nbroker clusters using the brokers argument in the consumes decorator.\nLet\u2019s explore an example code snippet"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\nkafka_brokers_1 = dict(\n development=dict(url="dev.server_1", port=9092),\n production=dict(url="prod.server_1", port=9092),\n)\nkafka_brokers_2 = dict(\n development=dict(url="dev.server_2", port=9092),\n production=dict(url="prod.server_1", port=9092),\n)\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals_1(msg: TestMsg):\n print(f"Received on s1: {msg=}")\n await to_predictions_1(msg)\n\n\n@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)\nasync def on_preprocessed_signals_2(msg: TestMsg):\n print(f"Received on s2: {msg=}")\n await to_predictions_2(msg)\n \n@app.produces(topic="predictions")\nasync def to_predictions_1(msg: TestMsg) -> TestMsg:\n return msg\n \n@app.produces(topic="predictions", brokers=kafka_brokers_2)\nasync def to_predictions_2(msg: TestMsg) -> TestMsg:\n return msg\n')),(0,t.kt)("p",null,"In this example, the application has two consumes endpoints, both of\nwhich will consume events from ",(0,t.kt)("inlineCode",{parentName:"p"},"preprocessed_signals")," topic.\n",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_1")," will consume events from ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1"),"\nconfiguration and ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_2")," will consume events from\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," configuration. When producing, ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_1")," will\nproduce to ",(0,t.kt)("inlineCode",{parentName:"p"},"predictions")," topic on ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," cluster and\n",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_2")," will produce to ",(0,t.kt)("inlineCode",{parentName:"p"},"predictions")," topic on\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," cluster."),(0,t.kt)("h4",{id:"how-it-works"},"How it works"),(0,t.kt)("p",null,"The ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," configuration represents the primary cluster,\nwhile ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," serves as an alternative cluster specified in\nthe decorator."),(0,t.kt)("p",null,"Using the FastKafka class, the app object is initialized with the\nprimary broker configuration (",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1"),"). By default, the\n",(0,t.kt)("inlineCode",{parentName:"p"},"@app.consumes")," decorator without the brokers argument consumes messages\nfrom the ",(0,t.kt)("inlineCode",{parentName:"p"},"preprocessed_signals")," topic on ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1"),"."),(0,t.kt)("p",null,"To consume messages from a different cluster, the ",(0,t.kt)("inlineCode",{parentName:"p"},"@app.consumes"),"\ndecorator includes the ",(0,t.kt)("inlineCode",{parentName:"p"},"brokers")," argument. This allows explicit\nspecification of the broker cluster in the ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_2"),"\nfunction, enabling consumption from the same topic but using the\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," configuration."),(0,t.kt)("p",null,"The brokers argument can also be used in the @app.produces decorator to\ndefine multiple broker clusters for message production."),(0,t.kt)("p",null,"It\u2019s important to ensure that all broker configurations have the same\nrequired settings as the primary cluster to ensure consistent behavior."),(0,t.kt)("h2",{id:"testing-the-application"},"Testing the application"),(0,t.kt)("p",null,"To test our FastKafka \u2018mirroring\u2019 application, we can use our testing\nframework. Lets take a look how it\u2019s done:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n # Send TestMsg to topic/broker pair on_preprocessed_signals_1 is consuming from\n await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal_s1"))\n # Assert on_preprocessed_signals_1 consumed sent message\n await app.awaited_mocks.on_preprocessed_signals_1.assert_called_with(\n TestMsg(msg="signal_s1"), timeout=5\n )\n # Assert app has produced a prediction\n await tester.mirrors[app.to_predictions_1].assert_called_with(\n TestMsg(msg="signal_s1"), timeout=5\n )\n\n # Send TestMsg to topic/broker pair on_preprocessed_signals_2 is consuming from\n await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal_s2"))\n # Assert on_preprocessed_signals_2 consumed sent message\n await app.awaited_mocks.on_preprocessed_signals_2.assert_called_with(\n TestMsg(msg="signal_s2"), timeout=5\n )\n # Assert app has produced a prediction\n await tester.mirrors[app.to_predictions_2].assert_called_with(\n TestMsg(msg="signal_s2"), timeout=5\n )\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-05-30 10:33:08.720 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n23-05-30 10:33:08.720 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-05-30 10:33:08.721 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-05-30 10:33:08.721 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\n23-05-30 10:33:08.722 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:08.722 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\n23-05-30 10:33:08.723 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:08.741 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\n23-05-30 10:33:08.741 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:08.742 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\n23-05-30 10:33:08.743 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:08.744 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:08.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\n23-05-30 10:33:08.746 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:08.747 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:08.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:08.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-05-30 10:33:08.749 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:33:08.754 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:08.754 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\n23-05-30 10:33:08.755 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:08.755 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:08.755 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:08.756 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-05-30 10:33:08.756 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:33:08.757 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:08.757 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\n23-05-30 10:33:08.758 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:08.758 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:08.759 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:08.759 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-05-30 10:33:08.759 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:33:08.760 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:08.761 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\n23-05-30 10:33:08.762 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:08.762 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:08.762 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:08.763 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-05-30 10:33:08.763 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\nReceived on s1: msg=TestMsg(msg='signal_s1')\nReceived on s2: msg=TestMsg(msg='signal_s2')\n23-05-30 10:33:13.745 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:13.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:13.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:13.747 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:13.747 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:13.748 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:13.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:13.749 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:13.749 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:13.750 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:13.750 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:13.751 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:13.751 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:13.751 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n23-05-30 10:33:13.754 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"The usage of the ",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors")," dictionary allows specifying the\ndesired topic/broker combination for sending the test messages,\nespecially when working with multiple Kafka clusters. This ensures that\nthe data is sent to the appropriate topic/broker based on the consuming\nfunction, and consumed from appropriate topic/broker based on the\nproducing function."),(0,t.kt)("h2",{id:"running-the-application"},"Running the application"),(0,t.kt)("p",null,"You can run your application using ",(0,t.kt)("inlineCode",{parentName:"p"},"fastkafka run")," CLI command in the\nsame way that you would run a single cluster app."),(0,t.kt)("p",null,"To start your app, copy the code above in multi_cluster_example.py and\nrun it by running:"),(0,t.kt)("p",null,"Now we can run the app. Copy the code above in multi_cluster_example.py,\nadjust your server configurations, and run it by running"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=development multi_cluster_example:app\n")),(0,t.kt)("p",null,"In your app logs, you should see your app starting up and your two\nconsumer functions connecting to different kafka clusters."),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"[90735]: 23-05-30 10:33:29.699 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n[90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:57647'}\n[90735]: 23-05-30 10:33:29.714 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[90735]: 23-05-30 10:33:29.714 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\n[90735]: 23-05-30 10:33:29.714 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\n[90735]: 23-05-30 10:33:29.714 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[90735]: 23-05-30 10:33:29.718 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[90735]: 23-05-30 10:33:29.718 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\n[90735]: 23-05-30 10:33:29.718 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\n[90735]: 23-05-30 10:33:29.718 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[90735]: 23-05-30 10:33:29.722 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \n[90735]: 23-05-30 10:33:29.723 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \nStarting process cleanup, this may take a few seconds...\n23-05-30 10:33:33.548 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 90735...\n[90735]: 23-05-30 10:33:34.666 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:34.777 [INFO] fastkafka._server: terminate_asyncio_process(): Process 90735 terminated.\n")),(0,t.kt)("h2",{id:"application-documentation"},"Application documentation"),(0,t.kt)("p",null,"At the moment the documentation for multicluster app is not yet\nimplemented, but it is under development and you can expecti it soon!"),(0,t.kt)("h2",{id:"examples-on-how-to-use-multiple-broker-configurations"},"Examples on how to use multiple broker configurations"),(0,t.kt)("h3",{id:"example-1"},"Example ","#","1"),(0,t.kt)("p",null,"In this section, we\u2019ll explore how you can effectively forward topics\nbetween different Kafka clusters, enabling seamless data synchronization\nfor your applications."),(0,t.kt)("p",null,"Imagine having two Kafka clusters, namely ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2"),", each hosting its own set of topics and messages. Now,\nif you want to forward a specific topic (in this case:\n",(0,t.kt)("inlineCode",{parentName:"p"},"preprocessed_signals"),") from kafka_brokers_1 to kafka_brokers_2,\nFastKafka provides an elegant solution."),(0,t.kt)("p",null,"Let\u2019s examine the code snippet that configures our application for topic\nforwarding:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals_original(msg: TestMsg):\n await to_preprocessed_signals_forward(msg)\n\n\n@app.produces(topic="preprocessed_signals", brokers=kafka_brokers_2)\nasync def to_preprocessed_signals_forward(data: TestMsg) -> TestMsg:\n return data\n')),(0,t.kt)("p",null,"Here\u2019s how it works: our FastKafka application is configured to consume\nmessages from ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," and process them in the\n",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_original")," function. We want to forward these\nmessages to ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2"),". To achieve this, we define the\n",(0,t.kt)("inlineCode",{parentName:"p"},"to_preprocessed_signals_forward")," function as a producer, seamlessly\nproducing the processed messages to the preprocessed_signals topic\nwithin the ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," cluster."),(0,t.kt)("h4",{id:"testing"},"Testing"),(0,t.kt)("p",null,"To test our FastKafka forwarding application, we can use our testing\nframework. Let\u2019s take a look at the testing code snippet:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n await tester.mirrors[app.on_preprocessed_signals_original](TestMsg(msg="signal"))\n await tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-05-30 10:33:40.969 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n23-05-30 10:33:40.970 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-05-30 10:33:40.971 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-05-30 10:33:40.972 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n23-05-30 10:33:40.972 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:40.982 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-05-30 10:33:40.982 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:40.983 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:40.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-05-30 10:33:40.984 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:40.985 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:40.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:40.986 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-05-30 10:33:40.987 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:33:40.988 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:40.988 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n23-05-30 10:33:40.989 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:40.989 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:40.990 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:40.990 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-05-30 10:33:40.991 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:33:44.983 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:44.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:44.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:44.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:44.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:44.986 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:44.987 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:44.987 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:44.987 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n23-05-30 10:33:44.988 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"With the help of the ",(0,t.kt)("strong",{parentName:"p"},"Tester")," object, we can simulate and verify the\nbehavior of our FastKafka application. Here\u2019s how it works:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"We create an instance of the ",(0,t.kt)("strong",{parentName:"p"},"Tester")," by passing in our ",(0,t.kt)("em",{parentName:"p"},"app"),"\nobject, which represents our FastKafka application.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Using the ",(0,t.kt)("strong",{parentName:"p"},"tester.mirrors")," dictionary, we can send a message to a\nspecific Kafka broker and topic combination. In this case, we use\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.on_preprocessed_signals_original]"),' to send a\nTestMsg message with the content \u201csignal" to the appropriate Kafka\nbroker and topic.')),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"After sending the message, we can perform assertions on the mirrored\nfunction using\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)"),".\nThis assertion ensures that the mirrored function has been called\nwithin a specified timeout period (in this case, 5 seconds)."))),(0,t.kt)("h3",{id:"example-2"},"Example ","#","2"),(0,t.kt)("p",null,"In this section, we\u2019ll explore how you can effortlessly consume data\nfrom multiple sources, process it, and aggregate the results into a\nsingle topic on a specific cluster."),(0,t.kt)("p",null,"Imagine you have two Kafka clusters: ",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," and\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_2"),", each hosting its own set of topics and messages.\nNow, what if you want to consume data from both clusters, perform some\nprocessing, and produce the results to a single topic on\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1"),"? FastKafka has got you covered!"),(0,t.kt)("p",null,"Let\u2019s take a look at the code snippet that configures our application\nfor aggregating multiple clusters:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals_1(msg: TestMsg):\n print(f"Default: {msg=}")\n await to_predictions(msg)\n\n\n@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)\nasync def on_preprocessed_signals_2(msg: TestMsg):\n print(f"Specified: {msg=}")\n await to_predictions(msg)\n\n\n@app.produces(topic="predictions")\nasync def to_predictions(prediction: TestMsg) -> TestMsg:\n print(f"Sending prediction: {prediction}")\n return [prediction]\n')),(0,t.kt)("p",null,'Here\u2019s the idea: our FastKafka application is set to consume messages\nfrom the topic \u201cpreprocessed_signals" on ',(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," cluster, as\nwell as from the same topic on ",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_2")," cluster. We have two\nconsuming functions, ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_1")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_2"),", that handle the messages from their\nrespective clusters. These functions perform any required processing, in\nthis case, just calling the to_predictions function."),(0,t.kt)("p",null,'The exciting part is that the to_predictions function acts as a\nproducer, sending the processed results to the \u201cpredictions" topic on\n',(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1 cluster"),". By doing so, we effectively aggregate the\ndata from multiple sources into a single topic on a specific cluster."),(0,t.kt)("p",null,"This approach enables you to consume data from multiple Kafka clusters,\nprocess it, and produce the aggregated results to a designated topic.\nWhether you\u2019re generating predictions, performing aggregations, or any\nother form of data processing, FastKafka empowers you to harness the\nfull potential of multiple clusters."),(0,t.kt)("h4",{id:"testing-1"},"Testing"),(0,t.kt)("p",null,"Let\u2019s take a look at the testing code snippet:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal"))\n await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal"))\n await tester.on_predictions.assert_called(timeout=5)\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-05-30 10:33:50.827 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n23-05-30 10:33:50.827 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-05-30 10:33:50.828 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-05-30 10:33:50.829 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-05-30 10:33:50.829 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:50.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-05-30 10:33:50.875 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:50.876 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n23-05-30 10:33:50.876 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:50.877 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:50.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-05-30 10:33:50.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:50.879 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:50.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:50.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-05-30 10:33:50.880 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:33:50.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:50.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n23-05-30 10:33:50.881 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:50.882 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:50.882 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:50.883 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-05-30 10:33:50.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:33:50.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:50.884 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-05-30 10:33:50.885 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:50.885 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:50.886 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:50.886 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-05-30 10:33:50.886 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\nDefault: msg=TestMsg(msg='signal')\nSending prediction: msg='signal'\nSpecified: msg=TestMsg(msg='signal')\nSending prediction: msg='signal'\n23-05-30 10:33:54.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:54.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:54.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:54.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:54.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:54.881 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:54.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:54.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:54.882 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:54.882 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:54.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"Here\u2019s how the code above works:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Within an ",(0,t.kt)("inlineCode",{parentName:"p"},"async with")," block, create an instance of the Tester by\npassing in your app object, representing your FastKafka application.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Using the tester.mirrors dictionary, you can send messages to\nspecific Kafka broker and topic combinations. In this case, we use\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.on_preprocessed_signals_1]")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.on_preprocessed_signals_2]"),' to send TestMsg\nmessages with the content \u201csignal" to the corresponding Kafka broker\nand topic combinations.')),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"After sending the messages, you can perform assertions on the\n",(0,t.kt)("strong",{parentName:"p"},"on_predictions")," function using\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.on_predictions.assert_called(timeout=5)"),". This assertion\nensures that the on_predictions function has been called within a\nspecified timeout period (in this case, 5 seconds)."))),(0,t.kt)("h3",{id:"example-3"},"Example ","#","3"),(0,t.kt)("p",null,"In some scenarios, you may need to produce messages to multiple Kafka\nclusters simultaneously. FastKafka simplifies this process by allowing\nyou to configure your application to produce messages to multiple\nclusters effortlessly. Let\u2019s explore how you can achieve this:"),(0,t.kt)("p",null,"Consider the following code snippet that demonstrates producing messages\nto multiple clusters:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals(msg: TestMsg):\n print(f"{msg=}")\n await to_predictions_1(TestMsg(msg="prediction"))\n await to_predictions_2(TestMsg(msg="prediction"))\n\n\n@app.produces(topic="predictions")\nasync def to_predictions_1(prediction: TestMsg) -> TestMsg:\n print(f"Sending prediction to s1: {prediction}")\n return [prediction]\n\n\n@app.produces(topic="predictions", brokers=kafka_brokers_2)\nasync def to_predictions_2(prediction: TestMsg) -> TestMsg:\n print(f"Sending prediction to s2: {prediction}")\n return [prediction]\n')),(0,t.kt)("p",null,"Here\u2019s what you need to know about producing to multiple clusters:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"We define two Kafka broker configurations: ",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," and\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_2"),", representing different clusters with their\nrespective connection details.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"We create an instance of the FastKafka application, specifying\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," as the primary cluster for producing messages.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"The ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals"),' function serves as a consumer,\nhandling incoming messages from the \u201cpreprocessed_signals" topic.\nWithin this function, we invoke two producer functions:\n',(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_1")," and ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_2"),".")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"The ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_1"),' function sends predictions to the\n\u201cpredictions" topic on ',(0,t.kt)("em",{parentName:"p"},"kafka_brokers_1")," cluster.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Additionally, the ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_2"),' function sends the same\npredictions to the \u201cpredictions" topic on ',(0,t.kt)("em",{parentName:"p"},"kafka_brokers_2")," cluster.\nThis allows for producing the same data to multiple clusters\nsimultaneously."))),(0,t.kt)("p",null,"By utilizing this approach, you can seamlessly produce messages to\nmultiple Kafka clusters, enabling you to distribute data across\ndifferent environments or leverage the strengths of various clusters."),(0,t.kt)("p",null,"Feel free to customize the producer functions as per your requirements,\nperforming any necessary data transformations or enrichment before\nsending the predictions."),(0,t.kt)("p",null,"With FastKafka, producing to multiple clusters becomes a breeze,\nempowering you to harness the capabilities of multiple environments\neffortlessly."),(0,t.kt)("h4",{id:"testing-2"},"Testing"),(0,t.kt)("p",null,"Let\u2019s take a look at the testing code snippet:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n await tester.to_preprocessed_signals(TestMsg(msg="signal"))\n await tester.mirrors[to_predictions_1].assert_called(timeout=5)\n await tester.mirrors[to_predictions_2].assert_called(timeout=5)\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-05-30 10:34:00.033 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n23-05-30 10:34:00.034 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-05-30 10:34:00.035 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-05-30 10:34:00.036 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-05-30 10:34:00.037 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:34:00.038 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n23-05-30 10:34:00.038 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:34:00.052 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-05-30 10:34:00.053 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:34:00.054 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:34:00.055 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-05-30 10:34:00.055 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:34:00.056 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:34:00.056 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:34:00.057 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-05-30 10:34:00.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:34:00.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:34:00.058 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-05-30 10:34:00.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:34:00.059 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:34:00.059 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:34:00.062 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-05-30 10:34:00.062 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:34:00.063 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:34:00.063 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n23-05-30 10:34:00.064 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:34:00.064 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:34:00.065 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:34:00.065 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-05-30 10:34:00.065 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\nmsg=TestMsg(msg='signal')\nSending prediction to s1: msg='prediction'\nSending prediction to s2: msg='prediction'\n23-05-30 10:34:04.055 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:34:04.055 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:34:04.056 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:34:04.056 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:34:04.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:34:04.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:34:04.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:34:04.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:34:04.058 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:34:04.059 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:34:04.059 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"Here\u2019s how you can perform the necessary tests:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Within an async with block, create an instance of the ",(0,t.kt)("strong",{parentName:"p"},"Tester")," by\npassing in your app object, representing your FastKafka application.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Using the ",(0,t.kt)("inlineCode",{parentName:"p"},"tester.to_preprocessed_signals"),' method, you can send a\nTestMsg message with the content \u201csignal".')),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"After sending the message, you can perform assertions on the\nto_predictions_1 and to_predictions_2 functions using\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[to_predictions_1].assert_called(timeout=5)")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[to_predictions_2].assert_called(timeout=5)"),". These\nassertions ensure that the respective producer functions have\nproduced data to their respective topic/broker combinations."))),(0,t.kt)("p",null,"By employing this testing approach, you can verify that the producing\nfunctions correctly send messages to their respective clusters. The\ntesting framework provided by FastKafka enables you to ensure the\naccuracy and reliability of your application\u2019s producing logic."))}l.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0d927e9a.ee128e68.js b/assets/js/0d927e9a.ee128e68.js new file mode 100644 index 0000000..56a19ff --- /dev/null +++ b/assets/js/0d927e9a.ee128e68.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9652],{3905:(e,t,n)=>{n.d(t,{Zo:()=>h,kt:()=>m});var i=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function a(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=i.createContext({}),l=function(e){var t=i.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},h=function(e){var t=l(e.components);return i.createElement(c.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},p=i.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,c=e.parentName,h=s(e,["components","mdxType","originalType","parentName"]),d=l(n),p=o,m=d["".concat(c,".").concat(p)]||d[p]||u[p]||r;return n?i.createElement(m,a(a({ref:t},h),{},{components:n})):i.createElement(m,a({ref:t},h))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,a=new Array(r);a[0]=p;var s={};for(var c in t)hasOwnProperty.call(t,c)&&(s[c]=t[c]);s.originalType=e,s[d]="string"==typeof e?e:o,a[1]=s;for(var l=2;l{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>a,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>l});var i=n(7462),o=(n(7294),n(3905));const r={},a=void 0,s={unversionedId:"LICENSE",id:"version-0.8.0/LICENSE",title:"LICENSE",description:"Apache License",source:"@site/versioned_docs/version-0.8.0/LICENSE.md",sourceDirName:".",slug:"/LICENSE",permalink:"/docs/LICENSE",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"run_fastkafka_server_process",permalink:"/docs/cli/run_fastkafka_server_process"},next:{title:"Contributing to FastKafka",permalink:"/docs/CONTRIBUTING"}},c={},l=[],h={toc:l},d="wrapper";function u(e){let{components:t,...n}=e;return(0,o.kt)(d,(0,i.Z)({},h,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Apache License\nVersion 2.0, January 2004\n",(0,o.kt)("a",{parentName:"p",href:"http://www.apache.org/licenses/"},"http://www.apache.org/licenses/")),(0,o.kt)("p",null," TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Definitions."),(0,o.kt)("p",{parentName:"li"},'"License" shall mean the terms and conditions for use, reproduction,\nand distribution as defined by Sections 1 through 9 of this document.'),(0,o.kt)("p",{parentName:"li"},'"Licensor" shall mean the copyright owner or entity authorized by\nthe copyright owner that is granting the License.'),(0,o.kt)("p",{parentName:"li"},'"Legal Entity" shall mean the union of the acting entity and all\nother entities that control, are controlled by, or are under common\ncontrol with that entity. For the purposes of this definition,\n"control" means (i) the power, direct or indirect, to cause the\ndirection or management of such entity, whether by contract or\notherwise, or (ii) ownership of fifty percent (50%) or more of the\noutstanding shares, or (iii) beneficial ownership of such entity.'),(0,o.kt)("p",{parentName:"li"},'"You" (or "Your") shall mean an individual or Legal Entity\nexercising permissions granted by this License.'),(0,o.kt)("p",{parentName:"li"},'"Source" form shall mean the preferred form for making modifications,\nincluding but not limited to software source code, documentation\nsource, and configuration files.'),(0,o.kt)("p",{parentName:"li"},'"Object" form shall mean any form resulting from mechanical\ntransformation or translation of a Source form, including but\nnot limited to compiled object code, generated documentation,\nand conversions to other media types.'),(0,o.kt)("p",{parentName:"li"},'"Work" shall mean the work of authorship, whether in Source or\nObject form, made available under the License, as indicated by a\ncopyright notice that is included in or attached to the work\n(an example is provided in the Appendix below).'),(0,o.kt)("p",{parentName:"li"},'"Derivative Works" shall mean any work, whether in Source or Object\nform, that is based on (or derived from) the Work and for which the\neditorial revisions, annotations, elaborations, or other modifications\nrepresent, as a whole, an original work of authorship. For the purposes\nof this License, Derivative Works shall not include works that remain\nseparable from, or merely link (or bind by name) to the interfaces of,\nthe Work and Derivative Works thereof.'),(0,o.kt)("p",{parentName:"li"},'"Contribution" shall mean any work of authorship, including\nthe original version of the Work and any modifications or additions\nto that Work or Derivative Works thereof, that is intentionally\nsubmitted to Licensor for inclusion in the Work by the copyright owner\nor by an individual or Legal Entity authorized to submit on behalf of\nthe copyright owner. For the purposes of this definition, "submitted"\nmeans any form of electronic, verbal, or written communication sent\nto the Licensor or its representatives, including but not limited to\ncommunication on electronic mailing lists, source code control systems,\nand issue tracking systems that are managed by, or on behalf of, the\nLicensor for the purpose of discussing and improving the Work, but\nexcluding communication that is conspicuously marked or otherwise\ndesignated in writing by the copyright owner as "Not a Contribution."'),(0,o.kt)("p",{parentName:"li"},'"Contributor" shall mean Licensor and any individual or Legal Entity\non behalf of whom a Contribution has been received by Licensor and\nsubsequently incorporated within the Work.')),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Grant of Copyright License. Subject to the terms and conditions of\nthis License, each Contributor hereby grants to You a perpetual,\nworldwide, non-exclusive, no-charge, royalty-free, irrevocable\ncopyright license to reproduce, prepare Derivative Works of,\npublicly display, publicly perform, sublicense, and distribute the\nWork and such Derivative Works in Source or Object form.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Grant of Patent License. Subject to the terms and conditions of\nthis License, each Contributor hereby grants to You a perpetual,\nworldwide, non-exclusive, no-charge, royalty-free, irrevocable\n(except as stated in this section) patent license to make, have made,\nuse, offer to sell, sell, import, and otherwise transfer the Work,\nwhere such license applies only to those patent claims licensable\nby such Contributor that are necessarily infringed by their\nContribution(s) alone or by combination of their Contribution(s)\nwith the Work to which such Contribution(s) was submitted. If You\ninstitute patent litigation against any entity (including a\ncross-claim or counterclaim in a lawsuit) alleging that the Work\nor a Contribution incorporated within the Work constitutes direct\nor contributory patent infringement, then any patent licenses\ngranted to You under this License for that Work shall terminate\nas of the date such litigation is filed.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Redistribution. You may reproduce and distribute copies of the\nWork or Derivative Works thereof in any medium, with or without\nmodifications, and in Source or Object form, provided that You\nmeet the following conditions:"),(0,o.kt)("p",{parentName:"li"},"(a) You must give any other recipients of the Work or\nDerivative Works a copy of this License; and"),(0,o.kt)("p",{parentName:"li"},"(b) You must cause any modified files to carry prominent notices\nstating that You changed the files; and"),(0,o.kt)("p",{parentName:"li"},"(c) You must retain, in the Source form of any Derivative Works\nthat You distribute, all copyright, patent, trademark, and\nattribution notices from the Source form of the Work,\nexcluding those notices that do not pertain to any part of\nthe Derivative Works; and"),(0,o.kt)("p",{parentName:"li"},'(d) If the Work includes a "NOTICE" text file as part of its\ndistribution, then any Derivative Works that You distribute must\ninclude a readable copy of the attribution notices contained\nwithin such NOTICE file, excluding those notices that do not\npertain to any part of the Derivative Works, in at least one\nof the following places: within a NOTICE text file distributed\nas part of the Derivative Works; within the Source form or\ndocumentation, if provided along with the Derivative Works; or,\nwithin a display generated by the Derivative Works, if and\nwherever such third-party notices normally appear. The contents\nof the NOTICE file are for informational purposes only and\ndo not modify the License. You may add Your own attribution\nnotices within Derivative Works that You distribute, alongside\nor as an addendum to the NOTICE text from the Work, provided\nthat such additional attribution notices cannot be construed\nas modifying the License.'),(0,o.kt)("p",{parentName:"li"},"You may add Your own copyright statement to Your modifications and\nmay provide additional or different license terms and conditions\nfor use, reproduction, or distribution of Your modifications, or\nfor any such Derivative Works as a whole, provided Your use,\nreproduction, and distribution of the Work otherwise complies with\nthe conditions stated in this License.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Submission of Contributions. Unless You explicitly state otherwise,\nany Contribution intentionally submitted for inclusion in the Work\nby You to the Licensor shall be under the terms and conditions of\nthis License, without any additional terms or conditions.\nNotwithstanding the above, nothing herein shall supersede or modify\nthe terms of any separate license agreement you may have executed\nwith Licensor regarding such Contributions.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Trademarks. This License does not grant permission to use the trade\nnames, trademarks, service marks, or product names of the Licensor,\nexcept as required for reasonable and customary use in describing the\norigin of the Work and reproducing the content of the NOTICE file.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},'Disclaimer of Warranty. Unless required by applicable law or\nagreed to in writing, Licensor provides the Work (and each\nContributor provides its Contributions) on an "AS IS" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\nimplied, including, without limitation, any warranties or conditions\nof TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\nPARTICULAR PURPOSE. You are solely responsible for determining the\nappropriateness of using or redistributing the Work and assume any\nrisks associated with Your exercise of permissions under this License.')),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Limitation of Liability. In no event and under no legal theory,\nwhether in tort (including negligence), contract, or otherwise,\nunless required by applicable law (such as deliberate and grossly\nnegligent acts) or agreed to in writing, shall any Contributor be\nliable to You for damages, including any direct, indirect, special,\nincidental, or consequential damages of any character arising as a\nresult of this License or out of the use or inability to use the\nWork (including but not limited to damages for loss of goodwill,\nwork stoppage, computer failure or malfunction, or any and all\nother commercial damages or losses), even if such Contributor\nhas been advised of the possibility of such damages.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Accepting Warranty or Additional Liability. While redistributing\nthe Work or Derivative Works thereof, You may choose to offer,\nand charge a fee for, acceptance of support, warranty, indemnity,\nor other liability obligations and/or rights consistent with this\nLicense. However, in accepting such obligations, You may act only\non Your own behalf and on Your sole responsibility, not on behalf\nof any other Contributor, and only if You agree to indemnify,\ndefend, and hold each Contributor harmless for any liability\nincurred by, or claims asserted against, such Contributor by reason\nof your accepting any such warranty or additional liability."),(0,o.kt)("p",{parentName:"li"},"END OF TERMS AND CONDITIONS"),(0,o.kt)("p",{parentName:"li"},"APPENDIX: How to apply the Apache License to your work."),(0,o.kt)("p",{parentName:"li"},' To apply the Apache License to your work, attach the following\nboilerplate notice, with the fields enclosed by brackets "[]"\nreplaced with your own identifying information. (Don\'t include\nthe brackets!) The text should be enclosed in the appropriate\ncomment syntax for the file format. We also recommend that a\nfile or class name and description of purpose be included on the\nsame "printed page" as the copyright notice for easier\nidentification within third-party archives.'),(0,o.kt)("p",{parentName:"li"},"Copyright ","[yyyy][name of copyright owner]"),(0,o.kt)("p",{parentName:"li"},'Licensed under the Apache License, Version 2.0 (the "License");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at'),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre"},"http://www.apache.org/licenses/LICENSE-2.0\n")),(0,o.kt)("p",{parentName:"li"},'Unless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an "AS IS" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.'))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0fb5d45b.a1df13f6.js b/assets/js/0fb5d45b.a1df13f6.js new file mode 100644 index 0000000..e944bfd --- /dev/null +++ b/assets/js/0fb5d45b.a1df13f6.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3671],{3905:(e,n,t)=>{t.d(n,{Zo:()=>c,kt:()=>f});var a=t(7294);function i(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(i[t]=e[t]);return i}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(i[t]=e[t])}return i}var l=a.createContext({}),p=function(e){var n=a.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},c=function(e){var n=p(e.components);return a.createElement(l.Provider,{value:n},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},m=a.forwardRef((function(e,n){var t=e.components,i=e.mdxType,r=e.originalType,l=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=p(t),m=i,f=d["".concat(l,".").concat(m)]||d[m]||u[m]||r;return t?a.createElement(f,o(o({ref:n},c),{},{components:t})):a.createElement(f,o({ref:n},c))}));function f(e,n){var t=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var r=t.length,o=new Array(r);o[0]=m;var s={};for(var l in n)hasOwnProperty.call(n,l)&&(s[l]=n[l]);s.originalType=e,s[d]="string"==typeof e?e:i,o[1]=s;for(var p=2;p{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>o,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>p});var a=t(7462),i=(t(7294),t(3905));const r={},o="Lifespan Events",s={unversionedId:"guides/Guide_05_Lifespan_Handler",id:"guides/Guide_05_Lifespan_Handler",title:"Lifespan Events",description:"Did you know that you can define some special code that runs before and",source:"@site/docs/guides/Guide_05_Lifespan_Handler.md",sourceDirName:"guides",slug:"/guides/Guide_05_Lifespan_Handler",permalink:"/docs/next/guides/Guide_05_Lifespan_Handler",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Batch producing",permalink:"/docs/next/guides/Guide_23_Batch_Producing"},next:{title:"Encoding and Decoding Kafka Messages with FastKafka",permalink:"/docs/next/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"}},l={},p=[{value:"Lifespan example - Iris prediction model",id:"lifespan-example---iris-prediction-model",level:2},{value:"Lifespan",id:"lifespan",level:3},{value:"Async context manager",id:"async-context-manager",level:3},{value:"App demo",id:"app-demo",level:2},{value:"FastKafka app",id:"fastkafka-app",level:3},{value:"Data modeling",id:"data-modeling",level:3},{value:"Consumers and producers",id:"consumers-and-producers",level:3},{value:"Final app",id:"final-app",level:3},{value:"Running the app",id:"running-the-app",level:3},{value:"Recap",id:"recap",level:2}],c={toc:p},d="wrapper";function u(e){let{components:n,...t}=e;return(0,i.kt)(d,(0,a.Z)({},c,t,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"lifespan-events"},"Lifespan Events"),(0,i.kt)("p",null,"Did you know that you can define some special code that runs before and\nafter your Kafka application? This code will be executed just once, but\nit covers the whole lifespan of your app! \ud83d\ude80"),(0,i.kt)("p",null,"Lets break it down:"),(0,i.kt)("p",null,"You can define logic (code) that should be executed before the\napplication starts up. This is like a warm-up for your app, getting it\nready to consume and produce messages."),(0,i.kt)("p",null,"Similarly, you can define logic (code) that should be executed when the\napplication is shutting down. This is like a cool-down for your app,\nmaking sure everything is properly closed and cleaned up."),(0,i.kt)("p",null,"By executing code before consuming and after producing, you cover the\nentire lifecycle of your application \ud83c\udf89"),(0,i.kt)("p",null,"This is super handy for setting up shared resources that are needed\nacross consumers and producers, like a database connection pool or a\nmachine learning model. And the best part? You can clean up these\nresources when the app is shutting down!"),(0,i.kt)("p",null,"So lets give it a try and see how it can make your Kafka app even more\nawesome! \ud83d\udcaa"),(0,i.kt)("h2",{id:"lifespan-example---iris-prediction-model"},"Lifespan example - Iris prediction model"),(0,i.kt)("p",null,"Let\u2019s dive into an example to see how you can leverage the lifecycle\nhandler to solve a common use case. Imagine that you have some machine\nlearning models that need to consume incoming messages and produce\nresponse/prediction messages. These models are shared among consumers\nand producers, which means you don\u2019t want to load them for every\nmessage."),(0,i.kt)("p",null,"Here\u2019s where the lifecycle handler comes to the rescue! By loading the\nmodel before the messages are consumed and produced, but only right\nbefore the application starts receiving messages, you can ensure that\nthe model is ready to use without compromising the performance of your\ntests. In the upcoming sections, we\u2019ll walk you through how to\ninitialize an Iris species prediction model and use it in your developed\napplication."),(0,i.kt)("h3",{id:"lifespan"},"Lifespan"),(0,i.kt)("p",null,"You can define this startup and shutdown logic using the lifespan\nparameter of the FastKafka app, and an async context manager."),(0,i.kt)("p",null,"Let\u2019s start with an example and then see it in detail."),(0,i.kt)("p",null,"We create an async function lifespan() with yield like this:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n print("Loading the model!")\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n yield\n # Clean up the ML models and release the resources\n \n print("Exiting, clearing model dict!")\n ml_models.clear()\n \n')),(0,i.kt)("p",null,"The first thing to notice, is that we are defining an async function\nwith ",(0,i.kt)("inlineCode",{parentName:"p"},"yield"),". This is very similar to Dependencies with ",(0,i.kt)("inlineCode",{parentName:"p"},"yield"),"."),(0,i.kt)("p",null,"The first part of the function, before the ",(0,i.kt)("inlineCode",{parentName:"p"},"yield"),", will be executed\n",(0,i.kt)("strong",{parentName:"p"},"before")," the application starts. And the part after the ",(0,i.kt)("inlineCode",{parentName:"p"},"yield")," will\nbe executed ",(0,i.kt)("strong",{parentName:"p"},"after")," the application has finished."),(0,i.kt)("p",null,"This lifespan will create an iris_prediction model on application\nstartup and cleanup the references after the app is shutdown."),(0,i.kt)("p",null,"The lifespan will be passed an KafkaApp reference on startup of your\napplication, which you can use to reference your application on startup."),(0,i.kt)("p",null,"For demonstration sake, we also added prints so that when running the\napp we can see that our lifespan was called."),(0,i.kt)("h3",{id:"async-context-manager"},"Async context manager"),(0,i.kt)("p",null,"Context managers can be used in ",(0,i.kt)("inlineCode",{parentName:"p"},"with")," blocks, our lifespan, for example\ncould be used like this:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},"ml_models = {}\nasync with lifespan(None):\n print(ml_models)\n")),(0,i.kt)("p",null,"When you create a context manager or an async context manager, what it\ndoes is that, before entering the ",(0,i.kt)("inlineCode",{parentName:"p"},"with")," block, it will execute the code\nbefore the ",(0,i.kt)("inlineCode",{parentName:"p"},"yield"),", and after exiting the ",(0,i.kt)("inlineCode",{parentName:"p"},"with")," block, it will execute\nthe code after the ",(0,i.kt)("inlineCode",{parentName:"p"},"yield"),"."),(0,i.kt)("p",null,"If you want to learn more about context managers and contextlib\ndecorators, please visit ",(0,i.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/contextlib.html"},"Python official\ndocs")),(0,i.kt)("h2",{id:"app-demo"},"App demo"),(0,i.kt)("h3",{id:"fastkafka-app"},"FastKafka app"),(0,i.kt)("p",null,"Lets now create our application using the created lifespan handler."),(0,i.kt)("p",null,"Notice how we passed our lifespan handler to the app when constructing\nit trough the ",(0,i.kt)("inlineCode",{parentName:"p"},"lifespan")," argument."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "",\n "description": "local development kafka broker",\n "port": "",\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n')),(0,i.kt)("h3",{id:"data-modeling"},"Data modeling"),(0,i.kt)("p",null,"Lets model the Iris data for our app:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("h3",{id:"consumers-and-producers"},"Consumers and producers"),(0,i.kt)("p",null,"Lets create a consumer and producer for our app that will generate\npredictions from input iris data."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h3",{id:"final-app"},"Final app"),(0,i.kt)("p",null,"The final app looks like this:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n print("Loading the model!")\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n yield\n # Clean up the ML models and release the resources\n \n print("Exiting, clearing model dict!")\n ml_models.clear()\n \nkafka_brokers = {\n "localhost": {\n "url": "",\n "description": "local development kafka broker",\n "port": "",\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h3",{id:"running-the-app"},"Running the app"),(0,i.kt)("p",null,"Now we can run the app with your custom lifespan handler. Copy the code\nabove in lifespan_example.py and run it by running"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app\n")),(0,i.kt)("p",null,"When you run the app, you should see a simmilar output to the one below:"),(0,i.kt)("h2",{id:"recap"},"Recap"),(0,i.kt)("p",null,"In this guide we have defined a lifespan handler and passed to our\nFastKafka app."),(0,i.kt)("p",null,"Some important points are:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Lifespan handler is implemented as\n",(0,i.kt)("a",{parentName:"li",href:"https://docs.python.org/3/library/contextlib.html#contextlib.asynccontextmanager"},"AsyncContextManager")),(0,i.kt)("li",{parentName:"ol"},"Code ",(0,i.kt)("strong",{parentName:"li"},"before")," yield in lifespan will be executed ",(0,i.kt)("strong",{parentName:"li"},"before"),"\napplication ",(0,i.kt)("strong",{parentName:"li"},"startup")),(0,i.kt)("li",{parentName:"ol"},"Code ",(0,i.kt)("strong",{parentName:"li"},"after")," yield in lifespan will be executed ",(0,i.kt)("strong",{parentName:"li"},"after"),"\napplication ",(0,i.kt)("strong",{parentName:"li"},"shutdown")),(0,i.kt)("li",{parentName:"ol"},"You can pass your lifespan handler to FastKafka app on\ninitialisation by passing a ",(0,i.kt)("inlineCode",{parentName:"li"},"lifespan")," argument")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0ff0556c.0b867a3c.js b/assets/js/0ff0556c.0b867a3c.js new file mode 100644 index 0000000..c79dd87 --- /dev/null +++ b/assets/js/0ff0556c.0b867a3c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7624],{3905:(e,t,a)=>{a.d(t,{Zo:()=>k,kt:()=>d});var r=a(7294);function n(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,r)}return a}function i(e){for(var t=1;t=0||(n[a]=e[a]);return n}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(n[a]=e[a])}return n}var s=r.createContext({}),p=function(e){var t=r.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},k=function(e){var t=p(e.components);return r.createElement(s.Provider,{value:t},e.children)},c="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},u=r.forwardRef((function(e,t){var a=e.components,n=e.mdxType,o=e.originalType,s=e.parentName,k=l(e,["components","mdxType","originalType","parentName"]),c=p(a),u=n,d=c["".concat(s,".").concat(u)]||c[u]||f[u]||o;return a?r.createElement(d,i(i({ref:t},k),{},{components:a})):r.createElement(d,i({ref:t},k))}));function d(e,t){var a=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var o=a.length,i=new Array(o);i[0]=u;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:n,i[1]=l;for(var p=2;p{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>f,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var r=a(7462),n=(a(7294),a(3905));const o={},i=void 0,l={unversionedId:"api/fastkafka/testing/ApacheKafkaBroker",id:"version-0.7.0/api/fastkafka/testing/ApacheKafkaBroker",title:"ApacheKafkaBroker",description:"fastkafka.testing.ApacheKafkaBroker {fastkafka.testing.ApacheKafkaBroker}",source:"@site/versioned_docs/version-0.7.0/api/fastkafka/testing/ApacheKafkaBroker.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/ApacheKafkaBroker",permalink:"/docs/0.7.0/api/fastkafka/testing/ApacheKafkaBroker",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"SequentialExecutor",permalink:"/docs/0.7.0/api/fastkafka/executors/SequentialExecutor"},next:{title:"LocalRedpandaBroker",permalink:"/docs/0.7.0/api/fastkafka/testing/LocalRedpandaBroker"}},s={},p=[{value:"fastkafka.testing.ApacheKafkaBroker",id:"fastkafka.testing.ApacheKafkaBroker",level:2},{value:"__init__",id:"init",level:3},{value:"get_service_config_string",id:"get_service_config_string",level:3},{value:"start",id:"start",level:3},{value:"stop",id:"stop",level:3}],k={toc:p},c="wrapper";function f(e){let{components:t,...a}=e;return(0,n.kt)(c,(0,r.Z)({},k,a,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"fastkafka.testing.ApacheKafkaBroker"},(0,n.kt)("inlineCode",{parentName:"h2"},"fastkafka.testing.ApacheKafkaBroker")),(0,n.kt)("p",null,"ApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing."),(0,n.kt)("h3",{id:"init"},(0,n.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None")),(0,n.kt)("p",null,"Initialises the ApacheKafkaBroker object"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"zookeeper_port"),": Port for clients (Kafka brokes) to connect"),(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect")),(0,n.kt)("h3",{id:"get_service_config_string"},(0,n.kt)("inlineCode",{parentName:"h3"},"get_service_config_string")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def get_service_config_string(self: fastkafka.testing.ApacheKafkaBroker, service: str, data_dir: pathlib.Path) -> str")),(0,n.kt)("p",null,"Gets the configuration string for a service."),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"service"),': Name of the service ("kafka" or "zookeeper").'),(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the service will save data.")),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"The service configuration string.")),(0,n.kt)("h3",{id:"start"},(0,n.kt)("inlineCode",{parentName:"h3"},"start")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def start(self: fastkafka.testing.ApacheKafkaBroker) -> str")),(0,n.kt)("p",null,"Starts a local Kafka broker and ZooKeeper instance synchronously."),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"The Kafka broker bootstrap server address in string format: host:port.")),(0,n.kt)("h3",{id:"stop"},(0,n.kt)("inlineCode",{parentName:"h3"},"stop")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def stop(self: fastkafka.testing.ApacheKafkaBroker) -> None")),(0,n.kt)("p",null,"Stops a local kafka broker and zookeeper instance synchronously"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"None")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/1030.7d064482.js b/assets/js/1030.7d064482.js new file mode 100644 index 0000000..978b064 --- /dev/null +++ b/assets/js/1030.7d064482.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1030],{1876:(e,t,n)=>{n.d(t,{Mt:()=>ne,Ol:()=>ee,Qd:()=>T,UQ:()=>j,on:()=>X});var r=n(7294);function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function b(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}function y(e){var t=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],(function(){}))),!0}catch(e){return!1}}();return function(){var n,r=p(e);if(t){var o=p(this).constructor;n=Reflect.construct(r,arguments,o)}else n=r.apply(this,arguments);return function(e,t){if(t&&("object"==typeof t||"function"==typeof t))return t;if(void 0!==t)throw new TypeError("Derived constructors may only return object or undefined");return b(e)}(this,n)}}function h(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null==n)return;var r,o,a=[],l=!0,i=!1;try{for(n=n.call(e);!(l=(r=n.next()).done)&&(a.push(r.value),!t||a.length!==t);l=!0);}catch(u){i=!0,o=u}finally{try{l||null==n.return||n.return()}finally{if(i)throw o}}return a}(e,t)||v(e,t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function g(e){return function(e){if(Array.isArray(e))return w(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||v(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function v(e,t){if(e){if("string"==typeof e)return w(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);return"Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n?Array.from(e):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?w(e,t):void 0}}function w(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=new Array(t);n{n.d(t,{Z:()=>l});var r=n(7294),o=n(7418),a=n.n(o);const l=({url:e,allowFullScreen:t,position:n,display:o,height:l,width:i,overflow:u,styles:c,onLoad:d,onMouseOver:s,onMouseOut:p,scrolling:f,id:m,frameBorder:b,ariaHidden:y,sandbox:h,allow:g,className:v,title:w,ariaLabel:E,ariaLabelledby:x,name:A,target:O,loading:I,importance:P,referrerpolicy:j,allowpaymentrequest:k,src:C,key:S})=>{const D=a()({src:C||e,target:O||null,style:{position:n||null,display:o||"initial",overflow:u||null},scrolling:f||null,allowpaymentrequest:k||null,importance:P||null,sandbox:h&&[...h].join(" ")||null,loading:I||null,styles:c||null,name:A||null,className:v||null,allowFullScreen:"allowFullScreen",referrerpolicy:j||null,title:w||null,allow:g||null,id:m||null,"aria-labelledby":x||null,"aria-hidden":y||null,"aria-label":E||null,width:i||null,height:l||null,onLoad:d||null,onMouseOver:s||null,onMouseOut:p||null,key:S||"iframe"});let M=Object.create(null);for(let r of Object.keys(D))null!=D[r]&&(M[r]=D[r]);for(let r of Object.keys(M.style))null==M.style[r]&&delete M.style[r];if(M.styles)for(let r of Object.keys(M.styles))M.styles.hasOwnProperty(r)&&(M.style[r]=M.styles[r]),Object.keys(M.styles).pop()==r&&delete M.styles;if(t)if("allow"in M){const e=M.allow.replace("fullscreen","");M.allow=`fullscreen ${e.trim()}`.trim()}else M.allow="fullscreen";return b>=0&&(M.style.hasOwnProperty("border")||(M.style.border=b)),r.createElement("iframe",Object.assign({},M))}}}]); \ No newline at end of file diff --git a/assets/js/10df9fdc.6033a07f.js b/assets/js/10df9fdc.6033a07f.js new file mode 100644 index 0000000..62c5a39 --- /dev/null +++ b/assets/js/10df9fdc.6033a07f.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7368],{3905:(e,a,o)=>{o.d(a,{Zo:()=>c,kt:()=>d});var t=o(7294);function n(e,a,o){return a in e?Object.defineProperty(e,a,{value:o,enumerable:!0,configurable:!0,writable:!0}):e[a]=o,e}function s(e,a){var o=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),o.push.apply(o,t)}return o}function r(e){for(var a=1;a=0||(n[o]=e[o]);return n}(e,a);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,o)&&(n[o]=e[o])}return n}var i=t.createContext({}),p=function(e){var a=t.useContext(i),o=a;return e&&(o="function"==typeof e?e(a):r(r({},a),e)),o},c=function(e){var a=p(e.components);return t.createElement(i.Provider,{value:a},e.children)},m="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},k=t.forwardRef((function(e,a){var o=e.components,n=e.mdxType,s=e.originalType,i=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),m=p(o),k=n,d=m["".concat(i,".").concat(k)]||m[k]||u[k]||s;return o?t.createElement(d,r(r({ref:a},c),{},{components:o})):t.createElement(d,r({ref:a},c))}));function d(e,a){var o=arguments,n=a&&a.mdxType;if("string"==typeof e||n){var s=o.length,r=new Array(s);r[0]=k;var l={};for(var i in a)hasOwnProperty.call(a,i)&&(l[i]=a[i]);l.originalType=e,l[m]="string"==typeof e?e:n,r[1]=l;for(var p=2;p{o.r(a),o.d(a,{assets:()=>i,contentTitle:()=>r,default:()=>u,frontMatter:()=>s,metadata:()=>l,toc:()=>p});var t=o(7462),n=(o(7294),o(3905));const s={},r="@consumes basics",l={unversionedId:"guides/Guide_11_Consumes_Basics",id:"guides/Guide_11_Consumes_Basics",title:"@consumes basics",description:"You can use @consumes decorator to consume messages from Kafka topics.",source:"@site/docs/guides/Guide_11_Consumes_Basics.md",sourceDirName:"guides",slug:"/guides/Guide_11_Consumes_Basics",permalink:"/docs/next/guides/Guide_11_Consumes_Basics",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"FastKafka",permalink:"/docs/next/"},next:{title:"Batch consuming",permalink:"/docs/next/guides/Guide_12_Batch_Consuming"}},i={},p=[{value:"Import FastKafka",id:"import-fastkafka",level:2},{value:"Define the structure of the messages",id:"define-the-structure-of-the-messages",level:2},{value:"Create a base FastKafka app",id:"create-a-base-fastkafka-app",level:2},{value:"Create a consumer function and decorate it with @consumes",id:"create-a-consumer-function-and-decorate-it-with-consumes",level:2},{value:"Final app",id:"final-app",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Send the message to kafka topic",id:"send-the-message-to-kafka-topic",level:2},{value:"Choosing a topic",id:"choosing-a-topic",level:2},{value:"Message data",id:"message-data",level:2},{value:"Message metadata",id:"message-metadata",level:2},{value:"Create a consumer function with metadata",id:"create-a-consumer-function-with-metadata",level:3},{value:"Dealing with high latency consuming functions",id:"dealing-with-high-latency-consuming-functions",level:2}],c={toc:p},m="wrapper";function u(e){let{components:a,...o}=e;return(0,n.kt)(m,(0,t.Z)({},c,o,{components:a,mdxType:"MDXLayout"}),(0,n.kt)("h1",{id:"consumes-basics"},"@consumes basics"),(0,n.kt)("p",null,"You can use ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator to consume messages from Kafka topics."),(0,n.kt)("p",null,"In this guide we will create a simple FastKafka app that will consume\n",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages from hello_world topic."),(0,n.kt)("h2",{id:"import-fastkafka"},"Import ",(0,n.kt)("a",{parentName:"h2",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,n.kt)("inlineCode",{parentName:"a"},"FastKafka"))),(0,n.kt)("p",null,"To use the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator, first we need to import the base\nFastKafka app to create our application."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka import FastKafka\n")),(0,n.kt)("p",null,"In this demo we will log the messages to the output so that we can\ninspect and verify that our app is consuming properly. For that we need\nto import the logger."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n")),(0,n.kt)("h2",{id:"define-the-structure-of-the-messages"},"Define the structure of the messages"),(0,n.kt)("p",null,"Next, you need to define the structure of the messages you want to\nconsume from the topic using ",(0,n.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"pydantic"),". For\nthe guide we\u2019ll stick to something basic, but you are free to define any\ncomplex message structure you wish in your project, just make sure it\ncan be JSON encoded."),(0,n.kt)("p",null,"Let\u2019s import ",(0,n.kt)("inlineCode",{parentName:"p"},"BaseModel")," and ",(0,n.kt)("inlineCode",{parentName:"p"},"Field")," from pydantic and create a simple\n",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class containing one string parameter ",(0,n.kt)("inlineCode",{parentName:"p"},"msg")),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},"from pydantic import BaseModel, Field\n")),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'class HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n')),(0,n.kt)("h2",{id:"create-a-base-fastkafka-app"},"Create a base FastKafka app"),(0,n.kt)("p",null,"Now we will create and define a base FastKafka app, replace the\n",(0,n.kt)("inlineCode",{parentName:"p"},"")," and\n",(0,n.kt)("inlineCode",{parentName:"p"},"")," with the actual values of your\nKafka bootstrap server"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'kafka_brokers = {\n "demo_broker": {\n "url": "",\n "description": "local demo kafka broker",\n "port": "",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n')),(0,n.kt)("h2",{id:"create-a-consumer-function-and-decorate-it-with-consumes"},"Create a consumer function and decorate it with ",(0,n.kt)("inlineCode",{parentName:"h2"},"@consumes")),(0,n.kt)("p",null,"Let\u2019s create a consumer function that will consume ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages\nfrom ",(0,n.kt)("em",{parentName:"p"},"hello_world")," topic and log them."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("p",null,"The function decorated with the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will be called\nwhen a message is produced to Kafka."),(0,n.kt)("p",null,"The message will then be injected into the typed ",(0,n.kt)("em",{parentName:"p"},"msg")," argument of the\nfunction and its type will be used to parse the message."),(0,n.kt)("p",null,"In this example case, when the message is sent into a ",(0,n.kt)("em",{parentName:"p"},"hello_world"),"\ntopic, it will be parsed into a HelloWorld class and ",(0,n.kt)("inlineCode",{parentName:"p"},"on_hello_world"),"\nfunction will be called with the parsed class as ",(0,n.kt)("em",{parentName:"p"},"msg")," argument value."),(0,n.kt)("h2",{id:"final-app"},"Final app"),(0,n.kt)("p",null,"Your app code should look like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "",\n "description": "local demo kafka broker",\n "port": "",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("h2",{id:"run-the-app"},"Run the app"),(0,n.kt)("p",null,"Now we can run the app. Copy the code above in consumer_example.py and\nrun it by running"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n")),(0,n.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre"},"[14442]: 23-06-15 07:16:00.564 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[14442]: 23-06-15 07:16:00.564 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n[14442]: 23-06-15 07:16:00.577 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[14442]: 23-06-15 07:16:00.577 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n[14442]: 23-06-15 07:16:00.577 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n[14442]: 23-06-15 07:16:00.577 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[14442]: 23-06-15 07:16:00.585 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \nStarting process cleanup, this may take a few seconds...\n23-06-15 07:16:04.626 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 14442...\n[14442]: 23-06-15 07:16:05.735 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[14442]: 23-06-15 07:16:05.735 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-15 07:16:05.853 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 14442 terminated.\n")),(0,n.kt)("h2",{id:"send-the-message-to-kafka-topic"},"Send the message to kafka topic"),(0,n.kt)("p",null,"Lets send a ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message to the ",(0,n.kt)("em",{parentName:"p"},"hello_world")," topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-shell"},'echo { \\"msg\\": \\"Hello world\\" } | kafka-console-producer.sh --topic=hello_world --bootstrap-server=\n')),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre"},"[15588]: 23-06-15 07:16:15.282 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[15588]: 23-06-15 07:16:15.282 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n[15588]: 23-06-15 07:16:15.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[15588]: 23-06-15 07:16:15.294 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n[15588]: 23-06-15 07:16:15.295 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n[15588]: 23-06-15 07:16:15.295 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[15588]: 23-06-15 07:16:15.302 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n[15588]: 23-06-15 07:16:25.867 [INFO] consumer_example: Got msg: msg='Hello world'\nStarting process cleanup, this may take a few seconds...\n23-06-15 07:16:34.168 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 15588...\n[15588]: 23-06-15 07:16:35.358 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[15588]: 23-06-15 07:16:35.359 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-15 07:16:35.475 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 15588 terminated.\n")),(0,n.kt)("p",null,"You should see the \u201cGot msg: msg='Hello world'\" being logged by your\nconsumer."),(0,n.kt)("h2",{id:"choosing-a-topic"},"Choosing a topic"),(0,n.kt)("p",null,"You probably noticed that you didn\u2019t define which topic you are\nreceiving the message from, this is because the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator\ndetermines the topic by default from your function name. The decorator\nwill take your function name and strip the default \u201con","_",'" prefix from it\nand use the rest as the topic name. In this example case, the topic is\n',(0,n.kt)("em",{parentName:"p"},"hello_world"),"."),(0,n.kt)("p",null,"You can choose your custom prefix by defining the ",(0,n.kt)("inlineCode",{parentName:"p"},"prefix")," parameter in\nconsumes decorator, like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes(prefix="read_from_")\nasync def read_from_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("p",null,"Also, you can define the topic name completely by defining the ",(0,n.kt)("inlineCode",{parentName:"p"},"topic"),"\nin parameter in consumes decorator, like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes(topic="my_special_topic")\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("h2",{id:"message-data"},"Message data"),(0,n.kt)("p",null,"The message received from kafka is translated from binary JSON\nrepresentation int the class defined by typing of ",(0,n.kt)("em",{parentName:"p"},"msg")," parameter in the\nfunction decorated by the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator."),(0,n.kt)("p",null,"In this example case, the message will be parsed into a ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld"),"\nclass."),(0,n.kt)("h2",{id:"message-metadata"},"Message metadata"),(0,n.kt)("p",null,"If you need any of Kafka message metadata such as timestamp, partition\nor headers you can access the metadata by adding a EventMetadata typed\nargument to your consumes function and the metadata from the incoming\nmessage will be automatically injected when calling the consumes\nfunction."),(0,n.kt)("p",null,"Let\u2019s demonstrate that."),(0,n.kt)("h3",{id:"create-a-consumer-function-with-metadata"},"Create a consumer function with metadata"),(0,n.kt)("p",null,"The only difference from the original basic consume function is that we\nare now passing the ",(0,n.kt)("inlineCode",{parentName:"p"},"meta: EventMetadata")," argument to the function. The\n",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will register that and, when a message is\nconsumed, it will also pass the metadata to your function. Now you can\nuse the metadata in your consume function. Lets log it to see what it\ncontains."),(0,n.kt)("p",null,"First, we need to import the EventMetadata"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka import EventMetadata\n")),(0,n.kt)("p",null,"Now we can add the ",(0,n.kt)("inlineCode",{parentName:"p"},"meta")," argument to our consuming function."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes()\nasync def on_hello_world(msg: HelloWorld, meta: EventMetadata):\n logger.info(f"Got metadata: {meta}")\n')),(0,n.kt)("p",null,"Your final app should look like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka import EventMetadata\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\nkafka_brokers = {\n "demo_broker": {\n "url": "",\n "description": "local demo kafka broker",\n "port": "",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld, meta: EventMetadata):\n logger.info(f"Got metadata: {meta}")\n')),(0,n.kt)("p",null,"Now lets run the app and send a message to the broker to see the logged\nmessage metadata."),(0,n.kt)("p",null,"You should see a similar log as the one below and the metadata being\nlogged in your app."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre"},"[20050]: 23-06-15 07:18:55.661 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[20050]: 23-06-15 07:18:55.661 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n[20050]: 23-06-15 07:18:55.675 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[20050]: 23-06-15 07:18:55.675 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n[20050]: 23-06-15 07:18:55.675 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n[20050]: 23-06-15 07:18:55.675 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[20050]: 23-06-15 07:18:55.682 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n[20050]: 23-06-15 07:19:06.337 [INFO] consumer_example: Got metadata: EventMetadata(topic='hello_world', partition=0, offset=0, timestamp=1686813546255, timestamp_type=0, key=None, value=b'{ \"msg\": \"Hello world\" }', checksum=None, serialized_key_size=-1, serialized_value_size=24, headers=())\nStarting process cleanup, this may take a few seconds...\n23-06-15 07:19:14.547 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 20050...\n[20050]: 23-06-15 07:19:15.630 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[20050]: 23-06-15 07:19:15.630 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-15 07:19:15.742 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 20050 terminated.\n")),(0,n.kt)("p",null,"As you can see in the log, from the metadata you now have the\ninformation about the partition, offset, timestamp, key and headers.\n\ud83c\udf89"),(0,n.kt)("h2",{id:"dealing-with-high-latency-consuming-functions"},"Dealing with high latency consuming functions"),(0,n.kt)("p",null,"If your functions have high latency due to, for example, lengthy\ndatabase calls you will notice a big decrease in performance. This is\ndue to the issue of how the consumes decorator executes your consume\nfunctions when consuming events. By default, the consume function will\nrun the consuming funtions for one topic sequentially, this is the most\nstraightforward approach and results with the least amount of overhead."),(0,n.kt)("p",null,"But, to handle those high latency tasks and run them in parallel,\nFastKafka has a\n",(0,n.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/executors/DynamicTaskExecutor#fastkafka.executors.DynamicTaskExecutor"},(0,n.kt)("inlineCode",{parentName:"a"},"DynamicTaskExecutor")),"\nprepared for your consumers. This executor comes with additional\noverhead, so use it only when you need to handle high latency functions."),(0,n.kt)("p",null,"Lets demonstrate how to use it."),(0,n.kt)("p",null,"To your consumes decorator, add an ",(0,n.kt)("inlineCode",{parentName:"p"},"executor")," option and set it to\n",(0,n.kt)("inlineCode",{parentName:"p"},'"DynamicTaskExecutor"'),", this will enable the consumer to handle high\nlatency functions effectively."),(0,n.kt)("p",null,"Your consuming function should now look like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes(executor="DynamicTaskExecutor")\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("p",null,"And the complete app should now look like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "",\n "description": "local demo kafka broker",\n "port": "",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.consumes(executor="DynamicTaskExecutor")\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("p",null,"You can now run your app using the CLI commands described in this guide."),(0,n.kt)("p",null,"Lets send a ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message to the ",(0,n.kt)("em",{parentName:"p"},"hello_world")," topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-shell"},'echo { \\"msg\\": \\"Hello world\\" } | kafka-console-producer.sh --topic=hello_world --bootstrap-server=\n')),(0,n.kt)("p",null,"You should see a similar log as the one below."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre"},"[21539]: 23-06-15 07:19:25.135 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[21539]: 23-06-15 07:19:25.135 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n[21539]: 23-06-15 07:19:25.147 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[21539]: 23-06-15 07:19:25.147 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n[21539]: 23-06-15 07:19:25.147 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n[21539]: 23-06-15 07:19:25.147 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[21539]: 23-06-15 07:19:25.154 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n[21539]: 23-06-15 07:19:35.512 [INFO] consumer_example: Got msg: msg='Hello world'\nStarting process cleanup, this may take a few seconds...\n23-06-15 07:19:44.023 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 21539...\n[21539]: 23-06-15 07:19:45.202 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[21539]: 23-06-15 07:19:45.203 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-15 07:19:45.313 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 21539 terminated.\n")),(0,n.kt)("p",null,"Inside the log, you should see the \u201cGot msg: msg='Hello world'\" being\nlogged by your consumer."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/111ae602.55a30aac.js b/assets/js/111ae602.55a30aac.js new file mode 100644 index 0000000..b13003f --- /dev/null +++ b/assets/js/111ae602.55a30aac.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4379],{3905:(e,t,a)=>{a.d(t,{Zo:()=>k,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},k=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,k=s(e,["components","mdxType","originalType","parentName"]),u=l(a),d=r,f=u["".concat(p,".").concat(d)]||u[d]||c[d]||o;return a?n.createElement(f,i(i({ref:t},k),{},{components:a})):n.createElement(f,i({ref:t},k))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=d;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[u]="string"==typeof e?e:r,i[1]=s;for(var l=2;l{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>c,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var n=a(7462),r=(a(7294),a(3905));const o={},i="Defining a partition key",s={unversionedId:"guides/Guide_22_Partition_Keys",id:"version-0.5.0/guides/Guide_22_Partition_Keys",title:"Defining a partition key",description:"Partition keys are used in Apache Kafka to determine which partition a",source:"@site/versioned_docs/version-0.5.0/guides/Guide_22_Partition_Keys.md",sourceDirName:"guides",slug:"/guides/Guide_22_Partition_Keys",permalink:"/docs/0.5.0/guides/Guide_22_Partition_Keys",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"@produces basics",permalink:"/docs/0.5.0/guides/Guide_21_Produces_Basics"},next:{title:"Lifespan Events",permalink:"/docs/0.5.0/guides/Guide_05_Lifespan_Handler"}},p={},l=[{value:"Return a key from the producing function",id:"return-a-key-from-the-producing-function",level:2},{value:"App example",id:"app-example",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the message was sent to the Kafka topic with the desired key",id:"check-if-the-message-was-sent-to-the-kafka-topic-with-the-desired-key",level:2}],k={toc:l},u="wrapper";function c(e){let{components:t,...a}=e;return(0,r.kt)(u,(0,n.Z)({},k,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"defining-a-partition-key"},"Defining a partition key"),(0,r.kt)("p",null,"Partition keys are used in Apache Kafka to determine which partition a\nmessage should be written to. This ensures that related messages are\nkept together in the same partition, which can be useful for ensuring\norder or for grouping related messages together for efficient\nprocessing. Additionally, partitioning data across multiple partitions\nallows Kafka to distribute load across multiple brokers and scale\nhorizontally, while replicating data across multiple brokers provides\nfault tolerance."),(0,r.kt)("p",null,"You can define your partition keys when using the ",(0,r.kt)("inlineCode",{parentName:"p"},"@produces")," decorator,\nthis guide will demonstrate to you this feature."),(0,r.kt)("h2",{id:"return-a-key-from-the-producing-function"},"Return a key from the producing function"),(0,r.kt)("p",null,"To define a key for the message that you want to produce to Kafka topic,\nyou need to wrap the response into\n",(0,r.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/KafkaEvent/#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass and set the key value. Check the example below:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")\n')),(0,r.kt)("p",null,"In the example, we want to return the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message class with\nthe key defined as ",(0,r.kt)("em",{parentName:"p"},"my_key"),". So, we wrap the message and key into a\nKafkaEvent class and return it as such."),(0,r.kt)("p",null,"While generating the documentation, the\n",(0,r.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/KafkaEvent/#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass will be unwrapped and the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class will be documented in\nthe definition of message type, same way if you didn\u2019t use the key."),(0,r.kt)("p",null,'!!! info "Which key to choose?"'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.\n")),(0,r.kt)("h2",{id:"app-example"},"App example"),(0,r.kt)("p",null,"We will modify the app example from ",(0,r.kt)("strong",{parentName:"p"},"@producer basics")," guide to return\nthe ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," with our key. The final app will look like this (make\nsure you replace the ",(0,r.kt)("inlineCode",{parentName:"p"},"")," and\n",(0,r.kt)("inlineCode",{parentName:"p"},"")," with the actual values):"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "",\n "description": "local demo kafka broker",\n "port": "",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,r.kt)("h2",{id:"run-the-app"},"Run the app"),(0,r.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n")),(0,r.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"[347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.\n")),(0,r.kt)("h2",{id:"check-if-the-message-was-sent-to-the-kafka-topic-with-the-desired-key"},"Check if the message was sent to the Kafka topic with the desired key"),(0,r.kt)("p",null,'Lets check the topic and see if there is a \u201cHello world!" message in the\nhello_world topic with the defined key. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=\n")),(0,r.kt)("p",null,"You should see the ",(0,r.kt)("em",{parentName:"p"},'my_key {\u201cmsg": \u201cHello world!"}')," messages in your\ntopic appearing, the ",(0,r.kt)("em",{parentName:"p"},"my_key")," part of the message is the key that we\ndefined in our producing function."))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/1128ab4d.708c7b33.js b/assets/js/1128ab4d.708c7b33.js new file mode 100644 index 0000000..d4759c1 --- /dev/null +++ b/assets/js/1128ab4d.708c7b33.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8952],{3905:(e,t,a)=>{a.d(t,{Zo:()=>m,kt:()=>N});var n=a(7294);function l(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function d(e){for(var t=1;t=0||(l[a]=e[a]);return l}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(l[a]=e[a])}return l}var p=n.createContext({}),o=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):d(d({},t),e)),a},m=function(e){var t=o(e.components);return n.createElement(p.Provider,{value:t},e.children)},s="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,l=e.mdxType,r=e.originalType,p=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),s=o(a),u=l,N=s["".concat(p,".").concat(u)]||s[u]||k[u]||r;return a?n.createElement(N,d(d({ref:t},m),{},{components:a})):n.createElement(N,d({ref:t},m))}));function N(e,t){var a=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var r=a.length,d=new Array(r);d[0]=u;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[s]="string"==typeof e?e:l,d[1]=i;for(var o=2;o{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>d,default:()=>k,frontMatter:()=>r,metadata:()=>i,toc:()=>o});var n=a(7462),l=(a(7294),a(3905));const r={},d=void 0,i={unversionedId:"api/fastkafka/encoder/AvroBase",id:"api/fastkafka/encoder/AvroBase",title:"AvroBase",description:"fastkafka.encoder.AvroBase {fastkafka.encoder.AvroBase}",source:"@site/docs/api/fastkafka/encoder/AvroBase.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/AvroBase",permalink:"/docs/next/api/fastkafka/encoder/AvroBase",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"KafkaEvent",permalink:"/docs/next/api/fastkafka/KafkaEvent"},next:{title:"avro_decoder",permalink:"/docs/next/api/fastkafka/encoder/avro_decoder"}},p={},o=[{value:"fastkafka.encoder.AvroBase",id:"fastkafka.encoder.AvroBase",level:2},{value:"init",id:"pydantic.main.BaseModel.init",level:3},{value:"avro_schema",id:"fastkafka._components.encoder.avro.AvroBase.avro_schema",level:3},{value:"avro_schema_for_pydantic_class",id:"fastkafka._components.encoder.avro.AvroBase.avro_schema_for_pydantic_class",level:3},{value:"avro_schema_for_pydantic_object",id:"fastkafka._components.encoder.avro.AvroBase.avro_schema_for_pydantic_object",level:3},{value:"construct",id:"pydantic.main.BaseModel.construct",level:3},{value:"copy",id:"pydantic.main.BaseModel.copy",level:3},{value:"dict",id:"pydantic.main.BaseModel.dict",level:3},{value:"from_orm",id:"pydantic.main.BaseModel.from_orm",level:3},{value:"json",id:"pydantic.main.BaseModel.json",level:3},{value:"model_computed_fields",id:"pydantic.main.BaseModel.model_computed_fields",level:3},{value:"model_construct",id:"pydantic.main.BaseModel.model_construct",level:3},{value:"model_copy",id:"pydantic.main.BaseModel.model_copy",level:3},{value:"model_dump",id:"pydantic.main.BaseModel.model_dump",level:3},{value:"model_dump_json",id:"pydantic.main.BaseModel.model_dump_json",level:3},{value:"model_extra",id:"pydantic.main.BaseModel.model_extra",level:3},{value:"model_fields_set",id:"pydantic.main.BaseModel.model_fields_set",level:3},{value:"model_json_schema",id:"pydantic.main.BaseModel.model_json_schema",level:3},{value:"model_parametrized_name",id:"pydantic.main.BaseModel.model_parametrized_name",level:3},{value:"model_post_init",id:"pydantic.main.BaseModel.model_post_init",level:3},{value:"model_rebuild",id:"pydantic.main.BaseModel.model_rebuild",level:3},{value:"model_validate",id:"pydantic.main.BaseModel.model_validate",level:3},{value:"model_validate_json",id:"pydantic.main.BaseModel.model_validate_json",level:3},{value:"parse_file",id:"pydantic.main.BaseModel.parse_file",level:3},{value:"parse_obj",id:"pydantic.main.BaseModel.parse_obj",level:3},{value:"parse_raw",id:"pydantic.main.BaseModel.parse_raw",level:3},{value:"schema",id:"pydantic.main.BaseModel.schema",level:3},{value:"schema_json",id:"pydantic.main.BaseModel.schema_json",level:3},{value:"update_forward_refs",id:"pydantic.main.BaseModel.update_forward_refs",level:3},{value:"validate",id:"pydantic.main.BaseModel.validate",level:3}],m={toc:o},s="wrapper";function k(e){let{components:t,...a}=e;return(0,l.kt)(s,(0,n.Z)({},m,a,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"fastkafka.encoder.AvroBase"},"fastkafka.encoder.AvroBase"),(0,l.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/encoder/avro.py#L22-L235",class:"link-to-source",target:"_blank"},"View source"),(0,l.kt)("p",null,"This is base pydantic class that will add some methods"),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.init"},(0,l.kt)("strong",{parentName:"h3"},"init")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n __pydantic_self__, data\n)\n")),(0,l.kt)("p",null,"Create a new model by parsing and validating input data from keyword arguments."),(0,l.kt)("p",null,"Raises ","[",(0,l.kt)("inlineCode",{parentName:"p"},"ValidationError"),"][pydantic_core.ValidationError]"," if the input data cannot be\nvalidated to form a valid model."),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"__init__")," uses ",(0,l.kt)("inlineCode",{parentName:"p"},"__pydantic_self__")," instead of the more common ",(0,l.kt)("inlineCode",{parentName:"p"},"self")," for the first arg to\nallow ",(0,l.kt)("inlineCode",{parentName:"p"},"self")," as a field name."),(0,l.kt)("h3",{id:"fastkafka._components.encoder.avro.AvroBase.avro_schema"},"avro_schema"),(0,l.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/encoder/avro.py#L80-L99",class:"link-to-source",target:"_blank"},"View source"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\navro_schema(\n by_alias=True, namespace=None\n)\n")),(0,l.kt)("p",null,"Returns the Avro schema for the Pydantic class."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"by_alias")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"bool")),(0,l.kt)("td",{parentName:"tr",align:null},"Generate schemas using aliases defined. Defaults to True."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"True"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"namespace")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,l.kt)("td",{parentName:"tr",align:null},"Optional namespace string for schema generation."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"None"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Dict[str, Any]")),(0,l.kt)("td",{parentName:"tr",align:null},"The Avro schema for the model.")))),(0,l.kt)("h3",{id:"fastkafka._components.encoder.avro.AvroBase.avro_schema_for_pydantic_class"},"avro_schema_for_pydantic_class"),(0,l.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/encoder/avro.py#L53-L77",class:"link-to-source",target:"_blank"},"View source"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\navro_schema_for_pydantic_class(\n pydantic_model, by_alias=True, namespace=None\n)\n")),(0,l.kt)("p",null,"Returns the Avro schema for the given Pydantic class."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"pydantic_model")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Type[pydantic.main.BaseModel]")),(0,l.kt)("td",{parentName:"tr",align:null},"The Pydantic class."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("em",{parentName:"td"},"required"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"by_alias")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"bool")),(0,l.kt)("td",{parentName:"tr",align:null},"Generate schemas using aliases defined. Defaults to True."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"True"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"namespace")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,l.kt)("td",{parentName:"tr",align:null},"Optional namespace string for schema generation."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"None"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Dict[str, Any]")),(0,l.kt)("td",{parentName:"tr",align:null},"The Avro schema for the model.")))),(0,l.kt)("h3",{id:"fastkafka._components.encoder.avro.AvroBase.avro_schema_for_pydantic_object"},"avro_schema_for_pydantic_object"),(0,l.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/encoder/avro.py#L26-L50",class:"link-to-source",target:"_blank"},"View source"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\navro_schema_for_pydantic_object(\n pydantic_model, by_alias=True, namespace=None\n)\n")),(0,l.kt)("p",null,"Returns the Avro schema for the given Pydantic object."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"pydantic_model")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"BaseModel")),(0,l.kt)("td",{parentName:"tr",align:null},"The Pydantic object."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("em",{parentName:"td"},"required"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"by_alias")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"bool")),(0,l.kt)("td",{parentName:"tr",align:null},"Generate schemas using aliases defined. Defaults to True."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"True"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"namespace")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,l.kt)("td",{parentName:"tr",align:null},"Optional namespace string for schema generation."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"None"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Dict[str, Any]")),(0,l.kt)("td",{parentName:"tr",align:null},"The Avro schema for the model.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.construct"},"construct"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nconstruct(\n _fields_set=None, values\n)\n")),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.copy"},"copy"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"copy(\n self, include=None, exclude=None, update=None, deep=False\n)\n")),(0,l.kt)("p",null,"Returns a copy of the model."),(0,l.kt)("p",null,'!!! warning "Deprecated"\nThis method is now deprecated; use ',(0,l.kt)("inlineCode",{parentName:"p"},"model_copy")," instead."),(0,l.kt)("p",null,"If you need ",(0,l.kt)("inlineCode",{parentName:"p"},"include")," or ",(0,l.kt)("inlineCode",{parentName:"p"},"exclude"),", use:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"data = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"include")),(0,l.kt)("td",{parentName:"tr",align:null},"AbstractSetIntStr"),(0,l.kt)("td",{parentName:"tr",align:null},"MappingIntStrAny"),(0,l.kt)("td",{parentName:"tr",align:null},"None")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude")),(0,l.kt)("td",{parentName:"tr",align:null},"AbstractSetIntStr"),(0,l.kt)("td",{parentName:"tr",align:null},"MappingIntStrAny"),(0,l.kt)("td",{parentName:"tr",align:null},"None")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"update")),(0,l.kt)("td",{parentName:"tr",align:null},"`Dict","[str, Any]"),(0,l.kt)("td",{parentName:"tr",align:null},"None`"),(0,l.kt)("td",{parentName:"tr",align:null},"Optional dictionary of field-value pairs to override field valuesin the copied model.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"deep")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"If True, the values of fields that are Pydantic models will be deep copied."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Model")),(0,l.kt)("td",{parentName:"tr",align:null},"A copy of the model with included, excluded and updated fields as specified.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.dict"},"dict"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"dict(\n self,\n include=None,\n exclude=None,\n by_alias=False,\n exclude_unset=False,\n exclude_defaults=False,\n exclude_none=False,\n)\n")),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.from_orm"},"from_orm"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nfrom_orm(\n obj\n)\n")),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.json"},"json"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"json(\n self,\n include=None,\n exclude=None,\n by_alias=False,\n exclude_unset=False,\n exclude_defaults=False,\n exclude_none=False,\n encoder=PydanticUndefined,\n models_as_dict=PydanticUndefined,\n dumps_kwargs,\n)\n")),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_computed_fields"},"model_computed_fields"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@property\nmodel_computed_fields(\n self\n)\n")),(0,l.kt)("p",null,"Get the computed fields of this model instance."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"dict[str, ComputedFieldInfo]")),(0,l.kt)("td",{parentName:"tr",align:null},"A dictionary of computed field names and their corresponding ",(0,l.kt)("inlineCode",{parentName:"td"},"ComputedFieldInfo")," objects.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_construct"},"model_construct"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nmodel_construct(\n _fields_set=None, values\n)\n")),(0,l.kt)("p",null,"Creates a new instance of the ",(0,l.kt)("inlineCode",{parentName:"p"},"Model")," class with validated data."),(0,l.kt)("p",null,"Creates a new model setting ",(0,l.kt)("inlineCode",{parentName:"p"},"__dict__")," and ",(0,l.kt)("inlineCode",{parentName:"p"},"__pydantic_fields_set__")," from trusted or pre-validated data.\nDefault values are respected, but no other validation is performed.\nBehaves as if ",(0,l.kt)("inlineCode",{parentName:"p"},"Config.extra = 'allow'")," was set since it adds all passed values"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"_fields_set")),(0,l.kt)("td",{parentName:"tr",align:null},"set","[str]"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"The set of field names accepted for the Model instance.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"values")),(0,l.kt)("td",{parentName:"tr",align:null},"Any"),(0,l.kt)("td",{parentName:"tr",align:null},"Trusted or pre-validated data dictionary."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("em",{parentName:"td"},"required"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Model")),(0,l.kt)("td",{parentName:"tr",align:null},"A new instance of the ",(0,l.kt)("inlineCode",{parentName:"td"},"Model")," class with validated data.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_copy"},"model_copy"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"model_copy(\n self, update=None, deep=False\n)\n")),(0,l.kt)("p",null,"Usage docs: ",(0,l.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/2.2/usage/serialization/#model_copy"},"https://docs.pydantic.dev/2.2/usage/serialization/#model_copy")),(0,l.kt)("p",null,"Returns a copy of the model."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"update")),(0,l.kt)("td",{parentName:"tr",align:null},"dict","[str, Any]"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"deep")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Set to ",(0,l.kt)("inlineCode",{parentName:"td"},"True")," to make a deep copy of the model."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Model")),(0,l.kt)("td",{parentName:"tr",align:null},"New model instance.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_dump"},"model_dump"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"model_dump(\n self,\n mode='python',\n include=None,\n exclude=None,\n by_alias=False,\n exclude_unset=False,\n exclude_defaults=False,\n exclude_none=False,\n round_trip=False,\n warnings=True,\n)\n")),(0,l.kt)("p",null,"Usage docs: ",(0,l.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump"},"https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump")),(0,l.kt)("p",null,"Generate a dictionary representation of the model, optionally specifying which fields to include or exclude."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"mode")),(0,l.kt)("td",{parentName:"tr",align:null},"Literal","['json', 'python']"),(0,l.kt)("td",{parentName:"tr",align:null},"str"),(0,l.kt)("td",{parentName:"tr",align:null},"The mode in which ",(0,l.kt)("inlineCode",{parentName:"td"},"to_python")," should run.If mode is 'json', the dictionary will only contain JSON serializable types.If mode is 'python', the dictionary may contain any Python objects.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"include")),(0,l.kt)("td",{parentName:"tr",align:null},"IncEx"),(0,l.kt)("td",{parentName:"tr",align:null},"A list of fields to include in the output."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"None"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude")),(0,l.kt)("td",{parentName:"tr",align:null},"IncEx"),(0,l.kt)("td",{parentName:"tr",align:null},"A list of fields to exclude from the output."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"None"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"by_alias")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to use the field's alias in the dictionary key if defined."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude_unset")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to exclude fields that are unset or None from the output."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude_defaults")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to exclude fields that are set to their default value from the output."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude_none")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to exclude fields that have a value of ",(0,l.kt)("inlineCode",{parentName:"td"},"None")," from the output."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"round_trip")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to enable serialization and deserialization round-trip support."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"warnings")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to log warnings when invalid fields are encountered."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"True"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"dict[str, Any]")),(0,l.kt)("td",{parentName:"tr",align:null},"A dictionary representation of the model.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_dump_json"},"model_dump_json"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"model_dump_json(\n self,\n indent=None,\n include=None,\n exclude=None,\n by_alias=False,\n exclude_unset=False,\n exclude_defaults=False,\n exclude_none=False,\n round_trip=False,\n warnings=True,\n)\n")),(0,l.kt)("p",null,"Usage docs: ",(0,l.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump_json"},"https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump_json")),(0,l.kt)("p",null,"Generates a JSON representation of the model using Pydantic's ",(0,l.kt)("inlineCode",{parentName:"p"},"to_json")," method."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"indent")),(0,l.kt)("td",{parentName:"tr",align:null},"int"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"Indentation to use in the JSON output. If None is passed, the output will be compact.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"include")),(0,l.kt)("td",{parentName:"tr",align:null},"IncEx"),(0,l.kt)("td",{parentName:"tr",align:null},"Field(s) to include in the JSON output. Can take either a string or set of strings."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"None"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude")),(0,l.kt)("td",{parentName:"tr",align:null},"IncEx"),(0,l.kt)("td",{parentName:"tr",align:null},"Field(s) to exclude from the JSON output. Can take either a string or set of strings."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"None"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"by_alias")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to serialize using field aliases."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude_unset")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to exclude fields that have not been explicitly set."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude_defaults")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to exclude fields that have the default value."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude_none")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to exclude fields that have a value of ",(0,l.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"round_trip")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to use serialization/deserialization between JSON and class instance."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"warnings")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to show any warnings that occurred during serialization."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"True"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"str")),(0,l.kt)("td",{parentName:"tr",align:null},"A JSON string representation of the model.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_extra"},"model_extra"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@property\nmodel_extra(\n self\n)\n")),(0,l.kt)("p",null,"Get extra fields set during validation."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"`dict","[str, Any]"),(0,l.kt)("td",{parentName:"tr",align:null},"None`")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_fields_set"},"model_fields_set"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@property\nmodel_fields_set(\n self\n)\n")),(0,l.kt)("p",null,"Returns the set of fields that have been set on this model instance."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"set[str]")),(0,l.kt)("td",{parentName:"tr",align:null},"A set of strings representing the fields that have been set,i.e. that were not filled from defaults.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_json_schema"},"model_json_schema"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nmodel_json_schema(\n by_alias=True,\n ref_template='#/$defs/{model}',\n schema_generator=,\n mode='validation',\n)\n")),(0,l.kt)("p",null,"Generates a JSON schema for a model class."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"by_alias")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to use attribute aliases or not."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"True"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"ref_template")),(0,l.kt)("td",{parentName:"tr",align:null},"str"),(0,l.kt)("td",{parentName:"tr",align:null},"The reference template."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"'#/$defs/{model}'"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"schema_generator")),(0,l.kt)("td",{parentName:"tr",align:null},"type","[GenerateJsonSchema]"),(0,l.kt)("td",{parentName:"tr",align:null},"To override the logic used to generate the JSON schema, as a subclass of",(0,l.kt)("inlineCode",{parentName:"td"},"GenerateJsonSchema")," with your desired modifications"),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},""))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"mode")),(0,l.kt)("td",{parentName:"tr",align:null},"JsonSchemaMode"),(0,l.kt)("td",{parentName:"tr",align:null},"The mode in which to generate the schema."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"'validation'"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"dict[str, Any]")),(0,l.kt)("td",{parentName:"tr",align:null},"The JSON schema for the given model class.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_parametrized_name"},"model_parametrized_name"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nmodel_parametrized_name(\n params\n)\n")),(0,l.kt)("p",null,"Compute the class name for parametrizations of generic classes."),(0,l.kt)("p",null,"This method can be overridden to achieve a custom naming scheme for generic BaseModels."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"params")),(0,l.kt)("td",{parentName:"tr",align:null},"tuple[type","[Any]",", ...]"),(0,l.kt)("td",{parentName:"tr",align:null},"Tuple of types of the class. Given a generic class",(0,l.kt)("inlineCode",{parentName:"td"},"Model")," with 2 type variables and a concrete model ",(0,l.kt)("inlineCode",{parentName:"td"},"Model[str, int]"),",the value ",(0,l.kt)("inlineCode",{parentName:"td"},"(str, int)")," would be passed to ",(0,l.kt)("inlineCode",{parentName:"td"},"params"),"."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("em",{parentName:"td"},"required"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"str")),(0,l.kt)("td",{parentName:"tr",align:null},"String representing the new class where ",(0,l.kt)("inlineCode",{parentName:"td"},"params")," are passed to ",(0,l.kt)("inlineCode",{parentName:"td"},"cls")," as type variables.")))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"TypeError")),(0,l.kt)("td",{parentName:"tr",align:null},"Raised when trying to generate concrete names for non-generic models.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_post_init"},"model_post_init"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"model_post_init(\n self, _BaseModel__context\n)\n")),(0,l.kt)("p",null,"Override this method to perform additional initialization after ",(0,l.kt)("inlineCode",{parentName:"p"},"__init__")," and ",(0,l.kt)("inlineCode",{parentName:"p"},"model_construct"),"."),(0,l.kt)("p",null,"This is useful if you want to do some validation that requires the entire model to be initialized."),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_rebuild"},"model_rebuild"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nmodel_rebuild(\n force=False,\n raise_errors=True,\n _parent_namespace_depth=2,\n _types_namespace=None,\n)\n")),(0,l.kt)("p",null,"Try to rebuild the pydantic-core schema for the model."),(0,l.kt)("p",null,"This may be necessary when one of the annotations is a ForwardRef which could not be resolved during\nthe initial attempt to build the schema, and automatic rebuilding fails."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"force")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to force the rebuilding of the model schema, defaults to ",(0,l.kt)("inlineCode",{parentName:"td"},"False"),"."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"raise_errors")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to raise errors, defaults to ",(0,l.kt)("inlineCode",{parentName:"td"},"True"),"."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"True"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"_parent_namespace_depth")),(0,l.kt)("td",{parentName:"tr",align:null},"int"),(0,l.kt)("td",{parentName:"tr",align:null},"The depth level of the parent namespace, defaults to 2."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"2"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"_types_namespace")),(0,l.kt)("td",{parentName:"tr",align:null},"dict","[str, Any]"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"The types namespace, defaults to ",(0,l.kt)("inlineCode",{parentName:"td"},"None"),".")))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"`bool"),(0,l.kt)("td",{parentName:"tr",align:null},"None`")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_validate"},"model_validate"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nmodel_validate(\n obj, strict=None, from_attributes=None, context=None\n)\n")),(0,l.kt)("p",null,"Validate a pydantic model instance."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"obj")),(0,l.kt)("td",{parentName:"tr",align:null},"Any"),(0,l.kt)("td",{parentName:"tr",align:null},"The object to validate."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("em",{parentName:"td"},"required"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"strict")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to raise an exception on invalid fields.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"from_attributes")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to extract data from object attributes.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"context")),(0,l.kt)("td",{parentName:"tr",align:null},"dict","[str, Any]"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"Additional context to pass to the validator.")))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Model")),(0,l.kt)("td",{parentName:"tr",align:null},"The validated model instance.")))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"ValidationError")),(0,l.kt)("td",{parentName:"tr",align:null},"If the object could not be validated.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_validate_json"},"model_validate_json"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nmodel_validate_json(\n json_data, strict=None, context=None\n)\n")),(0,l.kt)("p",null,"Validate the given JSON data against the Pydantic model."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"json_data")),(0,l.kt)("td",{parentName:"tr",align:null},"str"),(0,l.kt)("td",{parentName:"tr",align:null},"bytes"),(0,l.kt)("td",{parentName:"tr",align:null},"bytearray")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"strict")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to enforce types strictly.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"context")),(0,l.kt)("td",{parentName:"tr",align:null},"dict","[str, Any]"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"Extra variables to pass to the validator.")))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Model")),(0,l.kt)("td",{parentName:"tr",align:null},"The validated Pydantic model.")))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"ValueError")),(0,l.kt)("td",{parentName:"tr",align:null},"If ",(0,l.kt)("inlineCode",{parentName:"td"},"json_data")," is not a JSON string.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.parse_file"},"parse_file"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nparse_file(\n path,\n content_type=None,\n encoding='utf8',\n proto=None,\n allow_pickle=False,\n)\n")),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.parse_obj"},"parse_obj"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nparse_obj(\n obj\n)\n")),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.parse_raw"},"parse_raw"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nparse_raw(\n b,\n content_type=None,\n encoding='utf8',\n proto=None,\n allow_pickle=False,\n)\n")),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.schema"},"schema"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nschema(\n by_alias=True, ref_template='#/$defs/{model}'\n)\n")),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.schema_json"},"schema_json"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nschema_json(\n by_alias=True, ref_template='#/$defs/{model}', dumps_kwargs\n)\n")),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.update_forward_refs"},"update_forward_refs"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nupdate_forward_refs(\n localns\n)\n")),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.validate"},"validate"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nvalidate(\n value\n)\n")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/1187a271.6b0af2cc.js b/assets/js/1187a271.6b0af2cc.js new file mode 100644 index 0000000..2f1690f --- /dev/null +++ b/assets/js/1187a271.6b0af2cc.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5198],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var l=n.createContext({}),p=function(e){var t=n.useContext(l),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},c=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,l=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),d=p(a),k=r,f=d["".concat(l,".").concat(k)]||d[k]||u[k]||o;return a?n.createElement(f,s(s({ref:t},c),{},{components:a})):n.createElement(f,s({ref:t},c))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,s=new Array(o);s[0]=k;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[d]="string"==typeof e?e:r,s[1]=i;for(var p=2;p{a.r(t),a.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>u,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={},s="Batch producing",i={unversionedId:"guides/Guide_23_Batch_Producing",id:"version-0.8.0/guides/Guide_23_Batch_Producing",title:"Batch producing",description:"If you want to send your data in batches @produces decorator makes",source:"@site/versioned_docs/version-0.8.0/guides/Guide_23_Batch_Producing.md",sourceDirName:"guides",slug:"/guides/Guide_23_Batch_Producing",permalink:"/docs/guides/Guide_23_Batch_Producing",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Defining a partition key",permalink:"/docs/guides/Guide_22_Partition_Keys"},next:{title:"Lifespan Events",permalink:"/docs/guides/Guide_05_Lifespan_Handler"}},l={},p=[{value:"Return a batch from the producing function",id:"return-a-batch-from-the-producing-function",level:2},{value:"App example",id:"app-example",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the batch was sent to the Kafka topic with the defined key",id:"check-if-the-batch-was-sent-to-the-kafka-topic-with-the-defined-key",level:2},{value:"Batch key",id:"batch-key",level:2},{value:"Check if the batch was sent to the Kafka topic",id:"check-if-the-batch-was-sent-to-the-kafka-topic",level:2}],c={toc:p},d="wrapper";function u(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"batch-producing"},"Batch producing"),(0,r.kt)("p",null,"If you want to send your data in batches ",(0,r.kt)("inlineCode",{parentName:"p"},"@produces")," decorator makes\nthat possible for you. By returning a ",(0,r.kt)("inlineCode",{parentName:"p"},"list")," of messages you want to\nsend in a batch the producer will collect the messages and send them in\na batch to a Kafka broker."),(0,r.kt)("p",null,"This guide will demonstrate how to use this feature."),(0,r.kt)("h2",{id:"return-a-batch-from-the-producing-function"},"Return a batch from the producing function"),(0,r.kt)("p",null,"To define a batch that you want to produce to Kafka topic, you need to\nreturn the ",(0,r.kt)("inlineCode",{parentName:"p"},"List")," of the messages that you want to be batched from your\nproducing function."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n return [HelloWorld(msg=msg) for msg in msgs]\n")),(0,r.kt)("p",null,"In the example, we want to return the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message class batch\nthat is created from a list of msgs we passed into our producing\nfunction."),(0,r.kt)("p",null,'Lets also prepare a backgound task that will send a batch of \u201chello\nworld" messages when the app starts.'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n')),(0,r.kt)("h2",{id:"app-example"},"App example"),(0,r.kt)("p",null,"We will modify the app example from ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_21_Produces_Basics"},"@producer\nbasics")," guide to return the\n",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," batch. The final app will look like this (make sure you\nreplace the ",(0,r.kt)("inlineCode",{parentName:"p"},"")," and\n",(0,r.kt)("inlineCode",{parentName:"p"},"")," with the actual values):"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "",\n "description": "local demo kafka broker",\n "port": "",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n return [HelloWorld(msg=msg) for msg in msgs]\n')),(0,r.kt)("h2",{id:"run-the-app"},"Run the app"),(0,r.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n")),(0,r.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"[46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task\n[46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.\n")),(0,r.kt)("h2",{id:"check-if-the-batch-was-sent-to-the-kafka-topic-with-the-defined-key"},"Check if the batch was sent to the Kafka topic with the defined key"),(0,r.kt)("p",null,'Lets check the topic and see if there are \u201cHello world" messages in the\nhello_world topic. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=\n")),(0,r.kt)("p",null,"You should see the batch of messages in your topic."),(0,r.kt)("h2",{id:"batch-key"},"Batch key"),(0,r.kt)("p",null,"To define a key for your batch like in ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_22_Partition_Keys"},"Defining a partition\nkey")," guide you can wrap the\nreturning value in a\n",(0,r.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/KafkaEvent#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass. To learn more about defining a partition ke and\n",(0,r.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/KafkaEvent#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass, please, have a look at ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_22_Partition_Keys"},"Defining a partition\nkey")," guide."),(0,r.kt)("p",null,"Let\u2019s demonstrate that."),(0,r.kt)("p",null,"To define a key, we just need to modify our producing function, like\nthis:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")\n')),(0,r.kt)("p",null,"Now our app looks like this:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "",\n "description": "local demo kafka broker",\n "port": "",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")\n')),(0,r.kt)("h2",{id:"check-if-the-batch-was-sent-to-the-kafka-topic"},"Check if the batch was sent to the Kafka topic"),(0,r.kt)("p",null,'Lets check the topic and see if there are \u201cHello world" messages in the\nhello_world topic, containing a defined key. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=\n")),(0,r.kt)("p",null,"You should see the batch of messages with the defined key in your topic."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/11c86bb5.38ea69ae.js b/assets/js/11c86bb5.38ea69ae.js new file mode 100644 index 0000000..5e47e84 --- /dev/null +++ b/assets/js/11c86bb5.38ea69ae.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1612],{3905:(e,r,t)=>{t.d(r,{Zo:()=>p,kt:()=>k});var a=t(7294);function n(e,r,t){return r in e?Object.defineProperty(e,r,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[r]=t,e}function o(e,r){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);r&&(a=a.filter((function(r){return Object.getOwnPropertyDescriptor(e,r).enumerable}))),t.push.apply(t,a)}return t}function c(e){for(var r=1;r=0||(n[t]=e[t]);return n}(e,r);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(n[t]=e[t])}return n}var s=a.createContext({}),f=function(e){var r=a.useContext(s),t=r;return e&&(t="function"==typeof e?e(r):c(c({},r),e)),t},p=function(e){var r=f(e.components);return a.createElement(s.Provider,{value:r},e.children)},l="mdxType",d={inlineCode:"code",wrapper:function(e){var r=e.children;return a.createElement(a.Fragment,{},r)}},u=a.forwardRef((function(e,r){var t=e.components,n=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),l=f(t),u=n,k=l["".concat(s,".").concat(u)]||l[u]||d[u]||o;return t?a.createElement(k,c(c({ref:r},p),{},{components:t})):a.createElement(k,c({ref:r},p))}));function k(e,r){var t=arguments,n=r&&r.mdxType;if("string"==typeof e||n){var o=t.length,c=new Array(o);c[0]=u;var i={};for(var s in r)hasOwnProperty.call(r,s)&&(i[s]=r[s]);i.originalType=e,i[l]="string"==typeof e?e:n,c[1]=i;for(var f=2;f{t.r(r),t.d(r,{assets:()=>s,contentTitle:()=>c,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>f});var a=t(7462),n=(t(7294),t(3905));const o={},c=void 0,i={unversionedId:"api/fastkafka/encoder/AvroBase",id:"version-0.7.1/api/fastkafka/encoder/AvroBase",title:"AvroBase",description:"fastkafka.encoder.AvroBase {fastkafka.encoder.AvroBase}",source:"@site/versioned_docs/version-0.7.1/api/fastkafka/encoder/AvroBase.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/AvroBase",permalink:"/docs/0.7.1/api/fastkafka/encoder/AvroBase",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"KafkaEvent",permalink:"/docs/0.7.1/api/fastkafka/KafkaEvent"},next:{title:"avro_decoder",permalink:"/docs/0.7.1/api/fastkafka/encoder/avro_decoder"}},s={},f=[{value:"fastkafka.encoder.AvroBase",id:"fastkafka.encoder.AvroBase",level:2}],p={toc:f},l="wrapper";function d(e){let{components:r,...t}=e;return(0,n.kt)(l,(0,a.Z)({},p,t,{components:r,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"fastkafka.encoder.AvroBase"},(0,n.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.AvroBase")),(0,n.kt)("p",null,"This is base pydantic class that will add some methods"))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/1244450e.79248efa.js b/assets/js/1244450e.79248efa.js new file mode 100644 index 0000000..6e43ca9 --- /dev/null +++ b/assets/js/1244450e.79248efa.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5805],{3905:(e,t,a)=>{a.d(t,{Zo:()=>k,kt:()=>m});var n=a(7294);function l(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t=0||(l[a]=e[a]);return l}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(l[a]=e[a])}return l}var i=n.createContext({}),p=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},k=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var a=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,k=o(e,["components","mdxType","originalType","parentName"]),f=p(a),d=l,m=f["".concat(i,".").concat(d)]||f[d]||u[d]||r;return a?n.createElement(m,s(s({ref:t},k),{},{components:a})):n.createElement(m,s({ref:t},k))}));function m(e,t){var a=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var r=a.length,s=new Array(r);s[0]=d;var o={};for(var i in t)hasOwnProperty.call(t,i)&&(o[i]=t[i]);o.originalType=e,o[f]="string"==typeof e?e:l,s[1]=o;for(var p=2;p{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var n=a(7462),l=(a(7294),a(3905));const r={},s="fastkafka",o={unversionedId:"cli/fastkafka",id:"version-0.7.0/cli/fastkafka",title:"fastkafka",description:"Usage:",source:"@site/versioned_docs/version-0.7.0/cli/fastkafka.md",sourceDirName:"cli",slug:"/cli/fastkafka",permalink:"/docs/0.7.0/cli/fastkafka",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Tester",permalink:"/docs/0.7.0/api/fastkafka/testing/Tester"},next:{title:"run_fastkafka_server_process",permalink:"/docs/0.7.0/cli/run_fastkafka_server_process"}},i={},p=[{value:"fastkafka docs",id:"fastkafka-docs",level:2},{value:"fastkafka docs generate",id:"fastkafka-docs-generate",level:3},{value:"fastkafka docs install_deps",id:"fastkafka-docs-install_deps",level:3},{value:"fastkafka docs serve",id:"fastkafka-docs-serve",level:3},{value:"fastkafka run",id:"fastkafka-run",level:2},{value:"fastkafka testing",id:"fastkafka-testing",level:2},{value:"fastkafka testing install_deps",id:"fastkafka-testing-install_deps",level:3}],k={toc:p},f="wrapper";function u(e){let{components:t,...a}=e;return(0,l.kt)(f,(0,n.Z)({},k,a,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h1",{id:"fastkafka"},(0,l.kt)("inlineCode",{parentName:"h1"},"fastkafka")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--install-completion"),": Install completion for the current shell."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--show-completion"),": Show completion for the current shell, to copy it or customize the installation."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"docs"),": Commands for managing fastkafka app..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"run"),": Runs Fast Kafka API application"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"testing"),": Commands for managing fastkafka testing")),(0,l.kt)("h2",{id:"fastkafka-docs"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka docs")),(0,l.kt)("p",null,"Commands for managing fastkafka app documentation"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"generate"),": Generates documentation for a FastKafka..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"install_deps"),": Installs dependencies for FastKafka..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"serve"),": Generates and serves documentation for a...")),(0,l.kt)("h3",{id:"fastkafka-docs-generate"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs generate")),(0,l.kt)("p",null,"Generates documentation for a FastKafka application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs generate [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--root-path TEXT"),": root path under which documentation will be created; default is current directory"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h3",{id:"fastkafka-docs-install_deps"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs install_deps")),(0,l.kt)("p",null,"Installs dependencies for FastKafka documentation generation"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs install_deps [OPTIONS]\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h3",{id:"fastkafka-docs-serve"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs serve")),(0,l.kt)("p",null,"Generates and serves documentation for a FastKafka application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs serve [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--root-path TEXT"),": root path under which documentation will be created; default is current directory"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--bind TEXT"),": Some info ","[default: 127.0.0.1]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--port INTEGER"),": Some info ","[default: 8000]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h2",{id:"fastkafka-run"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka run")),(0,l.kt)("p",null,"Runs Fast Kafka API application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka run [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--num-workers INTEGER"),": Number of FastKafka instances to run, defaults to number of CPU cores. ","[default: 4]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--kafka-broker TEXT"),": kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. ","[default: localhost]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h2",{id:"fastkafka-testing"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka testing")),(0,l.kt)("p",null,"Commands for managing fastkafka testing"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka testing [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"install_deps"),": Installs dependencies for FastKafka app...")),(0,l.kt)("h3",{id:"fastkafka-testing-install_deps"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka testing install_deps")),(0,l.kt)("p",null,"Installs dependencies for FastKafka app testing"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka testing install_deps [OPTIONS]\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/13bdfbad.a7bd1580.js b/assets/js/13bdfbad.a7bd1580.js new file mode 100644 index 0000000..9c169aa --- /dev/null +++ b/assets/js/13bdfbad.a7bd1580.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5896],{3905:(e,t,a)=>{a.d(t,{Zo:()=>f,kt:()=>d});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},f=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},u="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},c=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,r=e.originalType,p=e.parentName,f=s(e,["components","mdxType","originalType","parentName"]),u=l(a),c=o,d=u["".concat(p,".").concat(c)]||u[c]||k[c]||r;return a?n.createElement(d,i(i({ref:t},f),{},{components:a})):n.createElement(d,i({ref:t},f))}));function d(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=a.length,i=new Array(r);i[0]=c;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[u]="string"==typeof e?e:o,i[1]=s;for(var l=2;l{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>k,frontMatter:()=>r,metadata:()=>s,toc:()=>l});var n=a(7462),o=(a(7294),a(3905));const r={},i="Deploy FastKafka docs to GitHub Pages",s={unversionedId:"guides/Guide_04_Github_Actions_Workflow",id:"version-0.8.0/guides/Guide_04_Github_Actions_Workflow",title:"Deploy FastKafka docs to GitHub Pages",description:"Getting started",source:"@site/versioned_docs/version-0.8.0/guides/Guide_04_Github_Actions_Workflow.md",sourceDirName:"guides",slug:"/guides/Guide_04_Github_Actions_Workflow",permalink:"/docs/guides/Guide_04_Github_Actions_Workflow",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using Redpanda to test FastKafka",permalink:"/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka"},next:{title:"Deploying FastKafka using Docker",permalink:"/docs/guides/Guide_30_Using_docker_to_deploy_fastkafka"}},p={},l=[{value:"Getting started",id:"getting-started",level:2},{value:"Options",id:"options",level:2},{value:"Set app location",id:"set-app-location",level:3},{value:"Example Repository",id:"example-repository",level:2}],f={toc:l},u="wrapper";function k(e){let{components:t,...a}=e;return(0,o.kt)(u,(0,n.Z)({},f,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"deploy-fastkafka-docs-to-github-pages"},"Deploy FastKafka docs to GitHub Pages"),(0,o.kt)("h2",{id:"getting-started"},"Getting started"),(0,o.kt)("p",null,"Add your workflow file ",(0,o.kt)("inlineCode",{parentName:"p"},".github/workflows/fastkafka_docs_deploy.yml")," and\npush it to your remote default branch."),(0,o.kt)("p",null,"Here is an example workflow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'name: Deploy FastKafka Generated Documentation to GitHub Pages\n\non:\n push:\n branches: [ "main", "master" ]\n workflow_dispatch:\n\njobs:\n deploy:\n runs-on: ubuntu-latest\n permissions:\n contents: write\n steps:\n - uses: airtai/workflows/fastkafka-ghp@main\n with:\n app: "test_fastkafka.application:kafka_app"\n')),(0,o.kt)("h2",{id:"options"},"Options"),(0,o.kt)("h3",{id:"set-app-location"},"Set app location"),(0,o.kt)("p",null,"Input in the form of ",(0,o.kt)("inlineCode",{parentName:"p"},"path:app"),", where ",(0,o.kt)("inlineCode",{parentName:"p"},"path")," is the path to a Python\nfile and ",(0,o.kt)("inlineCode",{parentName:"p"},"app")," is an object of type\n",(0,o.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),":"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'- name: Deploy\n uses: airtai/workflows/fastkafka-ghp@main\n with:\n app: "test_fastkafka.application:kafka_app"\n')),(0,o.kt)("p",null,"In the above example,\n",(0,o.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp is named as ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_app")," and it is available in the ",(0,o.kt)("inlineCode",{parentName:"p"},"application"),"\nsubmodule of the ",(0,o.kt)("inlineCode",{parentName:"p"},"test_fastkafka")," module."),(0,o.kt)("h2",{id:"example-repository"},"Example Repository"),(0,o.kt)("p",null,"A\n",(0,o.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\nlibrary that uses the above-mentioned workfow actions to publish\nFastKafka docs to ",(0,o.kt)("inlineCode",{parentName:"p"},"Github Pages")," can be found\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_project/blob/main/.github/workflows/fastkafka_deploy.yaml"},"here"),"."))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/14056c2c.8964dc73.js b/assets/js/14056c2c.8964dc73.js new file mode 100644 index 0000000..4bb3cf5 --- /dev/null +++ b/assets/js/14056c2c.8964dc73.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6133],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=r.createContext({}),u=function(e){var t=r.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=u(e.components);return r.createElement(l.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=u(n),k=o,d=p["".concat(l,".").concat(k)]||p[k]||f[k]||a;return n?r.createElement(d,i(i({ref:t},c),{},{components:n})):r.createElement(d,i({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=k;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,i[1]=s;for(var u=2;u{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>f,frontMatter:()=>a,metadata:()=>s,toc:()=>u});var r=n(7462),o=(n(7294),n(3905));const a={},i="Intro",s={unversionedId:"guides/Guide_01_Intro",id:"version-0.7.0/guides/Guide_01_Intro",title:"Intro",description:"This tutorial will show you how to use FastKafkaAPI, step by",source:"@site/versioned_docs/version-0.7.0/guides/Guide_01_Intro.md",sourceDirName:"guides",slug:"/guides/Guide_01_Intro",permalink:"/docs/0.7.0/guides/Guide_01_Intro",draft:!1,tags:[],version:"0.7.0",frontMatter:{}},l={},u=[{value:"Installing FastKafkaAPI",id:"installing-fastkafkaapi",level:2},{value:"Preparing a Kafka broker",id:"preparing-a-kafka-broker",level:2},{value:"Running the code",id:"running-the-code",level:2}],c={toc:u},p="wrapper";function f(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"intro"},"Intro"),(0,o.kt)("p",null,"This tutorial will show you how to use ",(0,o.kt)("b",null,"FastKafkaAPI"),", step by\nstep."),(0,o.kt)("p",null,"The goal of FastKafkaAPI is to simplify the use of Apache Kafka in\nPython inspired by FastAPI look and feel."),(0,o.kt)("p",null,"In this Intro tutorial we\u2019ll go trough the basic requirements to run the\ndemos presented in future steps."),(0,o.kt)("h2",{id:"installing-fastkafkaapi"},"Installing FastKafkaAPI"),(0,o.kt)("p",null,"First step is to install FastKafkaAPI"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"$ pip install fastkafka\n")),(0,o.kt)("h2",{id:"preparing-a-kafka-broker"},"Preparing a Kafka broker"),(0,o.kt)("p",null,"Next step is to prepare the Kafka environment, our consumers and\nproducers will need some channel of communication."),(0,o.kt)("p",null,'!!! info "Hey, your first info!"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. \n")),(0,o.kt)("p",null,"To go through the tutorial, we recommend that you use dockerized Kafka\nbrokers, if you have Docker and docker-compose installed the setup\nshould take you no time (if we exclude the container download times)."),(0,o.kt)("p",null,'!!! warning "Listen! This is important."'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},'To be able to setup this configuration you need to have Docker and docker-compose installed\n\nSee here for more info on Docker and docker compose\n')),(0,o.kt)("p",null,"To setup the recommended environment, first, create a new folder wher\nyou want to save your demo files (e.g.\xa0fastkafka_demo). Inside the new\nfolder create a new YAML file named ",(0,o.kt)("b",null,"kafka_demo.yml")," and copy the\nfollowing configuration into it:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'version: "3"\nservices:\n zookeeper:\n image: wurstmeister/zookeeper\n hostname: zookeeper\n container_name: zookeeper\n networks:\n - fastkafka-network\n ports:\n - "2181:2181"\n - "22:22"\n - "2888:2888"\n - "3888:3888"\n kafka:\n image: wurstmeister/kafka\n container_name: kafka\n ports:\n - "9093:9093"\n environment:\n HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d\' \' -f 2"\n KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"\n KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT\n KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093\n KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093\n KAFKA_INTER_BROKER_LISTENER_NAME: INTER\n KAFKA_CREATE_TOPICS: "hello:1:1"\n volumes:\n - /var/run/docker.sock:/var/run/docker.sock\n depends_on:\n - zookeeper\n healthcheck:\n test: [ "CMD", "kafka-topics.sh", "--list", "--zookeeper", "zookeeper:2181" ]\n interval: 5s\n timeout: 10s\n retries: 5\n networks:\n - fastkafka-network\nnetworks:\n fastkafka-network:\n name: "fastkafka-network"\n')),(0,o.kt)("p",null,"This configuration will start a single instance of Zookeeper, single\ninstance of Kafka broker and create a \u2018hello\u2019 topic (quite enough for a\nstart). To start the configuration, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"$ docker-compose -f kafka_demo.yaml up -d --wait\n")),(0,o.kt)("p",null,"This will start the necessary containers and wait till they report that\nthey are Healthy. After the command finishes, you are good to go to try\nout the FastKafkaAPI capabilities! \ud83c\udf8a"),(0,o.kt)("h2",{id:"running-the-code"},"Running the code"),(0,o.kt)("p",null,"After installing FastKafkaAPI and initialising the Kafka broker you can\nproceed to the \u2018First Steps\u2019 part of the tutorial. There, you will write\nyour first Kafka client and producer apps, run them, and interact with\nthem."),(0,o.kt)("p",null,"You are highly encouraged to follow along the tutorials not just by\nreading trough them but by implementing the code examples in your own\nenvironment. This will not only help you remember the use cases better\nbut also, hopefully, demonstrate to you the ease of use of this library."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/14111b0c.d1701b86.js b/assets/js/14111b0c.d1701b86.js new file mode 100644 index 0000000..1d54a52 --- /dev/null +++ b/assets/js/14111b0c.d1701b86.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1856],{3905:(a,e,t)=>{t.d(e,{Zo:()=>f,kt:()=>d});var n=t(7294);function i(a,e,t){return e in a?Object.defineProperty(a,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):a[e]=t,a}function s(a,e){var t=Object.keys(a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(a);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(a,e).enumerable}))),t.push.apply(t,n)}return t}function o(a){for(var e=1;e=0||(i[t]=a[t]);return i}(a,e);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(a);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(a,t)&&(i[t]=a[t])}return i}var p=n.createContext({}),l=function(a){var e=n.useContext(p),t=e;return a&&(t="function"==typeof a?a(e):o(o({},e),a)),t},f=function(a){var e=l(a.components);return n.createElement(p.Provider,{value:e},a.children)},k="mdxType",c={inlineCode:"code",wrapper:function(a){var e=a.children;return n.createElement(n.Fragment,{},e)}},u=n.forwardRef((function(a,e){var t=a.components,i=a.mdxType,s=a.originalType,p=a.parentName,f=r(a,["components","mdxType","originalType","parentName"]),k=l(t),u=i,d=k["".concat(p,".").concat(u)]||k[u]||c[u]||s;return t?n.createElement(d,o(o({ref:e},f),{},{components:t})):n.createElement(d,o({ref:e},f))}));function d(a,e){var t=arguments,i=e&&e.mdxType;if("string"==typeof a||i){var s=t.length,o=new Array(s);o[0]=u;var r={};for(var p in e)hasOwnProperty.call(e,p)&&(r[p]=e[p]);r.originalType=a,r[k]="string"==typeof a?a:i,o[1]=r;for(var l=2;l{t.r(e),t.d(e,{assets:()=>p,contentTitle:()=>o,default:()=>c,frontMatter:()=>s,metadata:()=>r,toc:()=>l});var n=t(7462),i=(t(7294),t(3905));const s={},o="Using FastAPI to Run FastKafka Application",r={unversionedId:"guides/Guide_32_Using_fastapi_to_run_fastkafka_application",id:"guides/Guide_32_Using_fastapi_to_run_fastkafka_application",title:"Using FastAPI to Run FastKafka Application",description:"When deploying a FastKafka application, the default approach is to",source:"@site/docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application.md",sourceDirName:"guides",slug:"/guides/Guide_32_Using_fastapi_to_run_fastkafka_application",permalink:"/docs/next/guides/Guide_32_Using_fastapi_to_run_fastkafka_application",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Deploying FastKafka using Docker",permalink:"/docs/next/guides/Guide_30_Using_docker_to_deploy_fastkafka"},next:{title:"Benchmarking FastKafka app",permalink:"/docs/next/guides/Guide_06_Benchmarking_FastKafka"}},p={},l=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"1. Basic FastKafka app",id:"1-basic-fastkafka-app",level:2},{value:"2. Using fastapi_lifespan method",id:"2-using-fastapi_lifespan-method",level:2},{value:"Putting it all together",id:"putting-it-all-together",level:2}],f={toc:l},k="wrapper";function c(a){let{components:e,...t}=a;return(0,i.kt)(k,(0,n.Z)({},f,t,{components:e,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"using-fastapi-to-run-fastkafka-application"},"Using FastAPI to Run FastKafka Application"),(0,i.kt)("p",null,"When deploying a FastKafka application, the default approach is to\nutilize the ",(0,i.kt)("a",{parentName:"p",href:"/docs/cli/fastkafka#fastkafka-run"},(0,i.kt)("inlineCode",{parentName:"a"},"fastkafka run"))," CLI\ncommand. This command allows you to launch your FastKafka application as\na standalone service. However, if you already have a FastAPI application\nin place and wish to run FastKafka application alongside it, you have an\nalternative option."),(0,i.kt)("p",null,"FastKafka provides a method called\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nthat leverages ",(0,i.kt)("a",{parentName:"p",href:"https://fastapi.tiangolo.com/advanced/events/#lifespan-events"},"FastAPI\u2019s\nlifespan"),"\nfeature. This method allows you to run your FastKafka application\ntogether with your existing FastAPI app, seamlessly integrating their\nfunctionalities. By using the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nmethod, you can start the FastKafka application within the same process\nas the FastAPI app."),(0,i.kt)("p",null,"The\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nmethod ensures that both FastAPI and FastKafka are initialized and start\nworking simultaneously. This approach enables the execution of\nKafka-related tasks, such as producing and consuming messages, while\nalso handling HTTP requests through FastAPI\u2019s routes."),(0,i.kt)("p",null,"By combining FastAPI and FastKafka in this manner, you can build a\ncomprehensive application that harnesses the power of both frameworks.\nWhether you require real-time messaging capabilities or traditional HTTP\nendpoints, this approach allows you to leverage the strengths of FastAPI\nand FastKafka within a single deployment setup."),(0,i.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A basic knowledge of\n",(0,i.kt)("a",{parentName:"li",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nis needed to proceed with this guide. If you are not familiar with\n",(0,i.kt)("a",{parentName:"li",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),",\nplease go through the ",(0,i.kt)("a",{parentName:"li",href:"/docs#tutorial"},"tutorial")," first."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("a",{parentName:"li",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nand ",(0,i.kt)("inlineCode",{parentName:"li"},"FastAPI")," libraries needs to be installed.")),(0,i.kt)("p",null,"This guide will provide a step-by-step explanation, taking you through\neach stage individually, before combining all the components in the\nfinal section for a comprehensive understanding of the process."),(0,i.kt)("h2",{id:"1-basic-fastkafka-app"},"1. Basic FastKafka app"),(0,i.kt)("p",null,"In this step, we will begin by creating a simple FastKafka application."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\nfrom typing import *\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Greetings",\n kafka_brokers=kafka_brokers,\n)\n\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\n\n@kafka_app.consumes()\nasync def on_names(msg: TestMsg):\n await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))\n\n\n@kafka_app.produces()\nasync def to_greetings(greeting: TestMsg) -> TestMsg:\n return greeting\n')),(0,i.kt)("p",null,"In the above example, we consume messages from a topic called ",(0,i.kt)("inlineCode",{parentName:"p"},"names"),',\nwe prepend \u201cHello" to the message, and send it back to another topic\ncalled ',(0,i.kt)("inlineCode",{parentName:"p"},"greetings"),"."),(0,i.kt)("p",null,"We now have a simple\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp to produce and consume from two topics."),(0,i.kt)("h2",{id:"2-using-fastapi_lifespan-method"},"2. Using fastapi_lifespan method"),(0,i.kt)("p",null,"In this step of the guide, we will explore the integration of a\nFastKafka application with a FastAPI application using the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nmethod. The\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nmethod is a feature provided by FastKafka, which allows you to\nseamlessly integrate a FastKafka application with a FastAPI application\nby leveraging FastAPI\u2019s lifespan feature."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from fastapi import FastAPI\n\nfastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(kafka_broker_name="localhost"))\n\n\n@fastapi_app.get("/hello")\nasync def hello():\n return {"msg": "hello there"}\n')),(0,i.kt)("p",null,"In the above example, a new instance of the ",(0,i.kt)("inlineCode",{parentName:"p"},"FastAPI")," app is created,\nand when the app is started using uvicorn, it also runs the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napplication concurrently."),(0,i.kt)("h2",{id:"putting-it-all-together"},"Putting it all together"),(0,i.kt)("p",null,"Let\u2019s put the above code together and write it in a file called\n",(0,i.kt)("inlineCode",{parentName:"p"},"fast_apps.py"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "fast_apps.py" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\nfrom typing import *\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Greetings",\n kafka_brokers=kafka_brokers,\n)\n\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\n\n@kafka_app.consumes()\nasync def on_names(msg: TestMsg):\n await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))\n\n\n@kafka_app.produces()\nasync def to_greetings(greeting: TestMsg) -> TestMsg:\n return greeting\n\n\nfrom fastapi import FastAPI\n\nfastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan("localhost"))\n\n@fastapi_app.get("/hello")\nasync def hello():\n return {"msg": "hello there"}\n')),(0,i.kt)("p",null,"Finally, you can run the FastAPI application using a web server of your\nchoice, such as Uvicorn or Hypercorn by running the below command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"uvicorn fast_apps:fastapi_app --host=0.0.0.0 --port=8080\n")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/14f7f42b.27993dc9.js b/assets/js/14f7f42b.27993dc9.js new file mode 100644 index 0000000..8bc1453 --- /dev/null +++ b/assets/js/14f7f42b.27993dc9.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5547],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>u});var n=a(7294);function i(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t=0||(i[a]=e[a]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var p=n.createContext({}),s=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},c=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},d="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=s(a),m=i,u=d["".concat(p,".").concat(m)]||d[m]||k[m]||r;return a?n.createElement(u,o(o({ref:t},c),{},{components:a})):n.createElement(u,o({ref:t},c))}));function u(e,t){var a=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=a.length,o=new Array(r);o[0]=m;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[d]="string"==typeof e?e:i,o[1]=l;for(var s=2;s{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>k,frontMatter:()=>r,metadata:()=>l,toc:()=>s});var n=a(7462),i=(a(7294),a(3905));const r={},o="Deploying FastKafka using Docker",l={unversionedId:"guides/Guide_30_Using_docker_to_deploy_fastkafka",id:"version-0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka",title:"Deploying FastKafka using Docker",description:"Building a Docker Image",source:"@site/versioned_docs/version-0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",sourceDirName:"guides",slug:"/guides/Guide_30_Using_docker_to_deploy_fastkafka",permalink:"/docs/0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Deploy FastKafka docs to GitHub Pages",permalink:"/docs/0.7.0/guides/Guide_04_Github_Actions_Workflow"},next:{title:"Using FastAPI to Run FastKafka Application",permalink:"/docs/0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application"}},p={},s=[{value:"Building a Docker Image",id:"building-a-docker-image",level:2},{value:"Creating FastKafka Code",id:"creating-fastkafka-code",level:3},{value:"Creating requirements.txt file",id:"creating-requirementstxt-file",level:3},{value:"Creating Dockerfile",id:"creating-dockerfile",level:3},{value:"Build the Docker Image",id:"build-the-docker-image",level:3},{value:"Start the Docker Container",id:"start-the-docker-container",level:3},{value:"Additional Security",id:"additional-security",level:2},{value:"Example repo",id:"example-repo",level:2}],c={toc:s},d="wrapper";function k(e){let{components:t,...a}=e;return(0,i.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"deploying-fastkafka-using-docker"},"Deploying FastKafka using Docker"),(0,i.kt)("h2",{id:"building-a-docker-image"},"Building a Docker Image"),(0,i.kt)("p",null,"To build a Docker image for a FastKafka project, we need the following\nitems:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A library that is built using FastKafka."),(0,i.kt)("li",{parentName:"ol"},"A file in which the requirements are specified. This could be a\nrequirements.txt file, a setup.py file, or even a wheel file."),(0,i.kt)("li",{parentName:"ol"},"A Dockerfile to build an image that will include the two files\nmentioned above.")),(0,i.kt)("h3",{id:"creating-fastkafka-code"},"Creating FastKafka Code"),(0,i.kt)("p",null,"Let\u2019s create a ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka"),"-based application and write it to the\n",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file based on the ",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h3",{id:"creating-requirementstxt-file"},"Creating requirements.txt file"),(0,i.kt)("p",null,"The above code only requires ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka"),". So, we will add only\n",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka")," to the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file, but you can add additional\nrequirements to it as well."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"fastkafka>=0.3.0\n")),(0,i.kt)("p",null,"Here we are using ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," to store the project\u2019s\ndependencies. However, other methods like ",(0,i.kt)("inlineCode",{parentName:"p"},"setup.py"),", ",(0,i.kt)("inlineCode",{parentName:"p"},"pipenv"),", and\n",(0,i.kt)("inlineCode",{parentName:"p"},"wheel")," files can also be used. ",(0,i.kt)("inlineCode",{parentName:"p"},"setup.py")," is commonly used for\npackaging and distributing Python modules, while ",(0,i.kt)("inlineCode",{parentName:"p"},"pipenv")," is a tool used\nfor managing virtual environments and package dependencies. ",(0,i.kt)("inlineCode",{parentName:"p"},"wheel"),"\nfiles are built distributions of Python packages that can be installed\nwith pip."),(0,i.kt)("h3",{id:"creating-dockerfile"},"Creating Dockerfile"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-dockerfile"},'# (1)\nFROM python:3.9-slim-bullseye\n# (2)\nWORKDIR /project\n# (3)\nCOPY application.py requirements.txt /project/\n# (4)\nRUN pip install --no-cache-dir --upgrade -r /project/requirements.txt\n# (5)\nCMD ["fastkafka", "run", "--num-workers", "2", "--kafka-broker", "production", "application:kafka_app"]\n')),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Start from the official Python base image.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Set the current working directory to ",(0,i.kt)("inlineCode",{parentName:"p"},"/project"),"."),(0,i.kt)("p",{parentName:"li"},"This is where we\u2019ll put the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file and the\n",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Copy the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file and ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file inside\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"/project")," directory.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Install the package dependencies in the requirements file."),(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"--no-cache-dir")," option tells ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," to not save the downloaded\npackages locally, as that is only if ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," was going to be run again\nto install the same packages, but that\u2019s not the case when working\nwith containers."),(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"--upgrade")," option tells ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," to upgrade the packages if they\nare already installed.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Set the ",(0,i.kt)("strong",{parentName:"p"},"command")," to run the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command."),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"p"},"CMD")," takes a list of strings, each of these strings is what you\nwould type in the command line separated by spaces."),(0,i.kt)("p",{parentName:"li"},"This command will be run from the ",(0,i.kt)("strong",{parentName:"p"},"current working directory"),", the\nsame ",(0,i.kt)("inlineCode",{parentName:"p"},"/project")," directory you set above with ",(0,i.kt)("inlineCode",{parentName:"p"},"WORKDIR /project"),"."),(0,i.kt)("p",{parentName:"li"},"We supply additional parameters ",(0,i.kt)("inlineCode",{parentName:"p"},"--num-workers")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"--kafka-broker"),"\nfor the run command. Finally, we specify the location of our\n",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka")," application location as a command argument."),(0,i.kt)("p",{parentName:"li"},"To learn more about ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command please check the ",(0,i.kt)("a",{parentName:"p",href:"../../cli/fastkafka/#fastkafka-run"},"CLI\ndocs"),"."))),(0,i.kt)("h3",{id:"build-the-docker-image"},"Build the Docker Image"),(0,i.kt)("p",null,"Now that all the files are in place, let\u2019s build the container image."),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Go to the project directory (where your ",(0,i.kt)("inlineCode",{parentName:"p"},"Dockerfile")," is, containing\nyour ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file).")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Run the following command to build the image:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"docker build -t fastkafka_project_image .\n")),(0,i.kt)("p",{parentName:"li"},"This command will create a docker image with the name\n",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," and the ",(0,i.kt)("inlineCode",{parentName:"p"},"latest")," tag."))),(0,i.kt)("p",null,"That\u2019s it! You have now built a docker image for your FastKafka project."),(0,i.kt)("h3",{id:"start-the-docker-container"},"Start the Docker Container"),(0,i.kt)("p",null,"Run a container based on the built image:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"docker run -d --name fastkafka_project_container fastkafka_project_image\n")),(0,i.kt)("h2",{id:"additional-security"},"Additional Security"),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"Trivy")," is an open-source tool that scans Docker images for\nvulnerabilities. It can be integrated into your CI/CD pipeline to ensure\nthat your images are secure and free from known vulnerabilities. Here\u2019s\nhow you can use ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," to scan your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image"),":"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Install ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," on your local machine by following the instructions\nprovided in the ",(0,i.kt)("a",{parentName:"p",href:"https://aquasecurity.github.io/trivy/latest/getting-started/installation/"},"official ",(0,i.kt)("inlineCode",{parentName:"a"},"trivy"),"\ndocumentation"),".")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Run the following command to scan your fastkafka_project_image:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"trivy image fastkafka_project_image\n")),(0,i.kt)("p",{parentName:"li"},"This command will scan your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," for any\nvulnerabilities and provide you with a report of its findings.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Fix any vulnerabilities identified by ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy"),". You can do this by\nupdating the vulnerable package to a more secure version or by using\na different package altogether.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Rebuild your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," and repeat steps 2 and 3\nuntil ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," reports no vulnerabilities."))),(0,i.kt)("p",null,"By using ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," to scan your Docker images, you can ensure that your\ncontainers are secure and free from known vulnerabilities."),(0,i.kt)("h2",{id:"example-repo"},"Example repo"),(0,i.kt)("p",null,"A ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," based library which uses above mentioned Dockerfile to\nbuild a docker image can be found\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_project/"},"here")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/15aa5f44.44966933.js b/assets/js/15aa5f44.44966933.js new file mode 100644 index 0000000..5db744a --- /dev/null +++ b/assets/js/15aa5f44.44966933.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3051],{3905:(e,t,r)=>{r.d(t,{Zo:()=>c,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function s(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var l=n.createContext({}),p=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},c=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,l=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),f=p(r),k=a,m=f["".concat(l,".").concat(k)]||f[k]||u[k]||o;return r?n.createElement(m,s(s({ref:t},c),{},{components:r})):n.createElement(m,s({ref:t},c))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,s=new Array(o);s[0]=k;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[f]="string"==typeof e?e:a,s[1]=i;for(var p=2;p{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>u,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var n=r(7462),a=(r(7294),r(3905));const o={},s="run_fastkafka_server_process",i={unversionedId:"cli/run_fastkafka_server_process",id:"version-0.7.1/cli/run_fastkafka_server_process",title:"run_fastkafka_server_process",description:"Usage:",source:"@site/versioned_docs/version-0.7.1/cli/run_fastkafka_server_process.md",sourceDirName:"cli",slug:"/cli/run_fastkafka_server_process",permalink:"/docs/0.7.1/cli/run_fastkafka_server_process",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"fastkafka",permalink:"/docs/0.7.1/cli/fastkafka"},next:{title:"LICENSE",permalink:"/docs/0.7.1/LICENSE"}},l={},p=[],c={toc:p},f="wrapper";function u(e){let{components:t,...r}=e;return(0,a.kt)(f,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"run_fastkafka_server_process"},(0,a.kt)("inlineCode",{parentName:"h1"},"run_fastkafka_server_process")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Usage"),":"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-console"},"$ run_fastkafka_server_process [OPTIONS] APP\n")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"APP"),": Input in the form of 'path:app', where ",(0,a.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,a.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,a.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Options"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--kafka-broker TEXT"),": Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class. ","[required]"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--install-completion"),": Install completion for the current shell."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--show-completion"),": Show completion for the current shell, to copy it or customize the installation."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/15f1310d.015cd27f.js b/assets/js/15f1310d.015cd27f.js new file mode 100644 index 0000000..fddcd74 --- /dev/null +++ b/assets/js/15f1310d.015cd27f.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5997],{3905:(a,e,t)=>{t.d(e,{Zo:()=>c,kt:()=>f});var n=t(7294);function o(a,e,t){return e in a?Object.defineProperty(a,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):a[e]=t,a}function s(a,e){var t=Object.keys(a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(a);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(a,e).enumerable}))),t.push.apply(t,n)}return t}function r(a){for(var e=1;e=0||(o[t]=a[t]);return o}(a,e);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(a);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(a,t)&&(o[t]=a[t])}return o}var p=n.createContext({}),l=function(a){var e=n.useContext(p),t=e;return a&&(t="function"==typeof a?a(e):r(r({},e),a)),t},c=function(a){var e=l(a.components);return n.createElement(p.Provider,{value:e},a.children)},k="mdxType",u={inlineCode:"code",wrapper:function(a){var e=a.children;return n.createElement(n.Fragment,{},e)}},d=n.forwardRef((function(a,e){var t=a.components,o=a.mdxType,s=a.originalType,p=a.parentName,c=i(a,["components","mdxType","originalType","parentName"]),k=l(t),d=o,f=k["".concat(p,".").concat(d)]||k[d]||u[d]||s;return t?n.createElement(f,r(r({ref:e},c),{},{components:t})):n.createElement(f,r({ref:e},c))}));function f(a,e){var t=arguments,o=e&&e.mdxType;if("string"==typeof a||o){var s=t.length,r=new Array(s);r[0]=d;var i={};for(var p in e)hasOwnProperty.call(e,p)&&(i[p]=e[p]);i.originalType=a,i[k]="string"==typeof a?a:o,r[1]=i;for(var l=2;l{t.r(e),t.d(e,{assets:()=>p,contentTitle:()=>r,default:()=>u,frontMatter:()=>s,metadata:()=>i,toc:()=>l});var n=t(7462),o=(t(7294),t(3905));const s={},r="FastKafka",i={unversionedId:"index",id:"version-0.7.0/index",title:"FastKafka",description:"Effortless Kafka integration for your web services",source:"@site/versioned_docs/version-0.7.0/index.md",sourceDirName:".",slug:"/",permalink:"/docs/0.7.0/",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",next:{title:"@consumes basics",permalink:"/docs/0.7.0/guides/Guide_11_Consumes_Basics"}},p={},l=[{value:"\u2b50\u2b50\u2b50 Stay in touch \u2b50\u2b50\u2b50",id:"-stay-in-touch-",level:4},{value:"\ud83d\udc1d\ud83d\udc1d\ud83d\udc1d We were busy lately \ud83d\udc1d\ud83d\udc1d\ud83d\udc1d",id:"-we-were-busy-lately-",level:4},{value:"Install",id:"install",level:2},{value:"Tutorial",id:"tutorial",level:2},{value:"Writing server code",id:"writing-server-code",level:2},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"Testing the service",id:"testing-the-service",level:2},{value:"Recap",id:"recap",level:3},{value:"Running the service",id:"running-the-service",level:2},{value:"Documentation",id:"documentation",level:2},{value:"License",id:"license",level:2}],c={toc:l},k="wrapper";function u(a){let{components:e,...t}=a;return(0,o.kt)(k,(0,n.Z)({},c,t,{components:e,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"fastkafka"},"FastKafka"),(0,o.kt)("b",null,"Effortless Kafka integration for your web services"),(0,o.kt)("hr",null),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/v/fastkafka.png",alt:"PyPI"})," ",(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/dm/fastkafka.png",alt:"PyPI -\nDownloads"})," ",(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/pyversions/fastkafka.png",alt:"PyPI - Python\nVersion"})),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/github/actions/workflow/status/airtai/fastkafka/test.yaml",alt:"GitHub Workflow\nStatus"}),"\n",(0,o.kt)("img",{parentName:"p",src:"https://github.com/airtai/fastkafka//actions/workflows/codeql.yml/badge.svg",alt:"CodeQL"}),"\n",(0,o.kt)("img",{parentName:"p",src:"https://github.com/airtai/fastkafka//actions/workflows/dependency-review.yml/badge.svg",alt:"Dependency\nReview"})),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/github/license/airtai/fastkafka.png",alt:"GitHub"})),(0,o.kt)("hr",null),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,o.kt)("hr",null),(0,o.kt)("h4",{id:"-stay-in-touch-"},"\u2b50\u2b50\u2b50 Stay in touch \u2b50\u2b50\u2b50"),(0,o.kt)("p",null,"Please show your support and stay in touch by:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"giving our ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/"},"GitHub repository")," a\nstar, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"joining our ",(0,o.kt)("a",{parentName:"p",href:"https://discord.gg/CJWmYpyFbc"},"Discord server"),"."))),(0,o.kt)("p",null,"Your support helps us to stay in touch with you and encourages us to\ncontinue developing and improving the library. Thank you for your\nsupport!"),(0,o.kt)("hr",null),(0,o.kt)("h4",{id:"-we-were-busy-lately-"},"\ud83d\udc1d\ud83d\udc1d\ud83d\udc1d We were busy lately \ud83d\udc1d\ud83d\udc1d\ud83d\udc1d"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://repobeats.axiom.co/api/embed/21f36049093d5eb8e5fdad18c3c5d8df5428ca30.svg",alt:"Activity",title:"Repobeats analytics image"})),(0,o.kt)("h2",{id:"install"},"Install"),(0,o.kt)("p",null,"FastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install base version of ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka")," with ",(0,o.kt)("inlineCode",{parentName:"p"},"pip")," as usual:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka\n")),(0,o.kt)("p",null,"To install fastkafka with testing features please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[test]\n")),(0,o.kt)("p",null,"To install fastkafka with asyncapi docs please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[docs]\n")),(0,o.kt)("p",null,"To install fastkafka with all the features please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[test,docs]\n")),(0,o.kt)("h2",{id:"tutorial"},"Tutorial"),(0,o.kt)("p",null,"You can start an interactive tutorial in Google Colab by clicking the\nbutton below:"),(0,o.kt)("a",{href:"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/index.ipynb",target:"_blank"},(0,o.kt)("img",{src:"https://colab.research.google.com/assets/colab-badge.svg",alt:"Open in Colab"})),(0,o.kt)("h2",{id:"writing-server-code"},"Writing server code"),(0,o.kt)("p",null,"To demonstrate FastKafka simplicity of using ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes"),"\ndecorators, we will focus on a simple app."),(0,o.kt)("p",null,"The app will consume jsons containig positive floats from one topic, log\nthem and then produce incremented values to another topic."),(0,o.kt)("h3",{id:"messages"},"Messages"),(0,o.kt)("p",null,"FastKafka uses ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,o.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,o.kt)("p",null,"This example defines one ",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," mesage class. This Class will model the\nconsumed and produced data in our app demo, it contains one\n",(0,o.kt)("inlineCode",{parentName:"p"},"NonNegativeFloat")," field ",(0,o.kt)("inlineCode",{parentName:"p"},"data"),' that will be logged and \u201cprocessed"\nbefore being produced to another topic.'),(0,o.kt)("p",null,"These message class will be used to parse and validate incoming data in\nKafka consumers and producers."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass Data(BaseModel):\n data: NonNegativeFloat = Field(\n ..., example=0.5, description="Float data example"\n )\n')),(0,o.kt)("h3",{id:"application"},"Application"),(0,o.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,o.kt)("p",null,"It starts by defining a dictionary called ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,o.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,o.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\nboth generating the documentation and later to run the actual server\nagainst one of the given kafka broker."),(0,o.kt)("p",null,"Next, an object of the ",(0,o.kt)("inlineCode",{parentName:"p"},"FastKafka")," class is initialized with the minimum\nset of arguments:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generation of documentation")),(0,o.kt)("p",null,"We will also import and create a logger so that we can log the incoming\ndata in our consuming function."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from logging import getLogger\nfrom fastkafka import FastKafka\n\nlogger = getLogger("Demo Kafka app")\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Demo Kafka app",\n kafka_brokers=kafka_brokers,\n)\n')),(0,o.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,o.kt)("p",null,"FastKafka provides convenient function decorators ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,o.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,o.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,o.kt)("p",null,"This following example shows how to use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," message class. Specifying the type of the\nsingle argument is instructing the Pydantic to use ",(0,o.kt)("inlineCode",{parentName:"p"},"Data.parse_raw()"),"\non the consumed message before passing it to the user defined function\n",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"to_output_data"),' function,\nwhich specifies that this function should produce a message to the\n\u201coutput_data" Kafka topic whenever it is called. The ',(0,o.kt)("inlineCode",{parentName:"p"},"to_output_data"),"\nfunction takes a single float argument ",(0,o.kt)("inlineCode",{parentName:"p"},"data"),". It it increments the\ndata returns it wrapped in a ",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," object. The framework will call\nthe ",(0,o.kt)("inlineCode",{parentName:"p"},'Data.json().encode("utf-8")')," function on the returned value and\nproduce it to the specified topic."))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: Data):\n logger.info(f"Got data: {msg.data}")\n await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic="output_data")\nasync def to_output_data(data: float) -> Data:\n processed_data = Data(data=data+1.0)\n return processed_data\n')),(0,o.kt)("h2",{id:"testing-the-service"},"Testing the service"),(0,o.kt)("p",null,"The service can be tested using the ",(0,o.kt)("inlineCode",{parentName:"p"},"Tester")," instances which internally\nstarts InMemory implementation of Kafka broker."),(0,o.kt)("p",null,"The Tester will redirect your consumes and produces decorated functions\nto the InMemory Kafka broker so that you can quickly test your app\nwithout the need for a running Kafka broker and all its dependencies."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka.testing import Tester\n\nmsg = Data(\n data=0.1,\n)\n\n# Start Tester app and create InMemory Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n # Send Data message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with incremented data in output_data topic\n await tester.awaited_mocks.on_output_data.assert_awaited_with(\n Data(data=1.1), timeout=2\n )\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] Demo Kafka app: Got data: 0.1\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,o.kt)("h3",{id:"recap"},"Recap"),(0,o.kt)("p",null,"We have created a simple fastkafka application. The app will consume the\n",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," from the ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic, log it and produce the incremented\ndata to ",(0,o.kt)("inlineCode",{parentName:"p"},"output_data")," topic."),(0,o.kt)("p",null,"To test the app we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Created the app")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Started our Tester class which mirrors the developed app topics for\ntesting purposes")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Sent Data message to ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Asserted and checked that the developed service has reacted to Data\nmessage"))),(0,o.kt)("h2",{id:"running-the-service"},"Running the service"),(0,o.kt)("p",null,"The service can be started using builtin faskafka run CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file ",(0,o.kt)("inlineCode",{parentName:"p"},'"application.py"')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass Data(BaseModel):\n data: NonNegativeFloat = Field(\n ..., example=0.5, description="Float data example"\n )\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Demo Kafka app",\n kafka_brokers=kafka_brokers,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: Data):\n logger.info(f"Got data: {msg.data}")\n await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic="output_data")\nasync def to_output_data(data: float) -> Data:\n processed_data = Data(data=data+1.0)\n return processed_data\n')),(0,o.kt)("p",null,"To run the service, use the FastKafka CLI command and pass the module\n(in this case, the file where the app implementation is located) and the\napp simbol to the command."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka run --num-workers=1 --kafka-broker localhost application:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see the following output in your\ncommand line:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[1504]: 23-05-31 11:36:45.874 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1504]: 23-05-31 11:36:45.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1506]: 23-05-31 11:36:45.993 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1506]: 23-05-31 11:36:45.994 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1506]: 23-05-31 11:36:46.014 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1506]: 23-05-31 11:36:46.015 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1506]: 23-05-31 11:36:46.040 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1506]: 23-05-31 11:36:46.042 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1506]: 23-05-31 11:36:46.043 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1506]: 23-05-31 11:36:46.043 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1506]: 23-05-31 11:36:46.068 [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata\n[1506]: 23-05-31 11:36:46.070 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1504]: 23-05-31 11:36:46.131 [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1504]: 23-05-31 11:36:46.132 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable to update metadata from [0]\n[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable to update metadata from [0]\n[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\nStarting process cleanup, this may take a few seconds...\n23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1504...\n23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1506...\n[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-31 11:37:00.607 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1506 was already terminated.\n23-05-31 11:37:00.822 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1504 was already terminated.\n")),(0,o.kt)("h2",{id:"documentation"},"Documentation"),(0,o.kt)("p",null,"The kafka app comes with builtin documentation generation using\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/tools/generator"},"AsyncApi HTML generator"),"."),(0,o.kt)("p",null,"AsyncApi requires Node.js to be installed and we provide the following\nconvenience command line for it:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"23-05-31 11:38:24.128 [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n")),(0,o.kt)("p",null,"To generate the documentation programatically you just need to call the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs generate application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"23-05-31 11:38:25.113 [INFO] fastkafka._components.asyncapi: Old async specifications at '/content/asyncapi/spec/asyncapi.yml' does not exist.\n23-05-31 11:38:25.118 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'\n23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /content/asyncapi/docs.\n")),(0,o.kt)("p",null,"This will generate the ",(0,o.kt)("em",{parentName:"p"},"asyncapi")," folder in relative path where all your\ndocumentation will be saved. You can check out the content of it with:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"ls -l asyncapi\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"total 8\ndrwxr-xr-x 4 root root 4096 May 31 11:38 docs\ndrwxr-xr-x 2 root root 4096 May 31 11:38 spec\n")),(0,o.kt)("p",null,"In docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka docs serve"),"\nCLI command (more on that in our guides)."),(0,o.kt)("p",null,"In spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application."),(0,o.kt)("p",null,"We can locally preview the generated documentation by running the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs serve application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},'23-05-31 11:38:45.250 [INFO] fastkafka._components.asyncapi: New async specifications generated at: \'/content/asyncapi/spec/asyncapi.yml\'\n23-05-31 11:39:04.410 [INFO] fastkafka._components.asyncapi: Async docs generated at \'asyncapi/docs\'\n23-05-31 11:39:04.411 [INFO] fastkafka._components.asyncapi: Output of \'$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write\'\n\nDone! \u2728\nCheck out your shiny new generated files at /content/asyncapi/docs.\n\n\nServing documentation on http://127.0.0.1:8000\n127.0.0.1 - - [31/May/2023 11:39:14] "GET / HTTP/1.1" 200 -\n127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/global.min.css HTTP/1.1" 200 -\n127.0.0.1 - - [31/May/2023 11:39:14] "GET /js/asyncapi-ui.min.js HTTP/1.1" 200 -\n127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/asyncapi.min.css HTTP/1.1" 200 -\nInterupting serving of documentation and cleaning up...\n')),(0,o.kt)("p",null,"From the parameters passed to the application constructor, we get the\ndocumentation bellow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Demo Kafka app",\n kafka_brokers=kafka_brokers,\n)\n')),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png",alt:"Kafka_servers"})),(0,o.kt)("p",null,"The following documentation snippet are for the consumer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png",alt:"Kafka_consumer"})),(0,o.kt)("p",null,"The following documentation snippet are for the producer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png",alt:"Kafka_producer"})),(0,o.kt)("p",null,"Finally, all messages as defined as subclasses of ",(0,o.kt)("em",{parentName:"p"},"BaseModel")," are\ndocumented as well:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png",alt:"Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)"})),(0,o.kt)("h2",{id:"license"},"License"),(0,o.kt)("p",null,"FastKafka is licensed under the Apache License 2.0"),(0,o.kt)("p",null,"A permissive license whose main conditions require preservation of\ncopyright and license notices. Contributors provide an express grant of\npatent rights. Licensed works, modifications, and larger works may be\ndistributed under different terms and without source code."),(0,o.kt)("p",null,"The full text of the license can be found\n",(0,o.kt)("a",{parentName:"p",href:"https://raw.githubusercontent.com/airtai/fastkafka/main/LICENSE"},"here"),"."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/1674a630.97d72d03.js b/assets/js/1674a630.97d72d03.js new file mode 100644 index 0000000..ca10d8e --- /dev/null +++ b/assets/js/1674a630.97d72d03.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5252],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>u});var n=a(7294);function i(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t=0||(i[a]=e[a]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var p=n.createContext({}),s=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},c=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},d="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=s(a),m=i,u=d["".concat(p,".").concat(m)]||d[m]||k[m]||r;return a?n.createElement(u,o(o({ref:t},c),{},{components:a})):n.createElement(u,o({ref:t},c))}));function u(e,t){var a=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=a.length,o=new Array(r);o[0]=m;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[d]="string"==typeof e?e:i,o[1]=l;for(var s=2;s{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>k,frontMatter:()=>r,metadata:()=>l,toc:()=>s});var n=a(7462),i=(a(7294),a(3905));const r={},o="Deploying FastKafka using Docker",l={unversionedId:"guides/Guide_30_Using_docker_to_deploy_fastkafka",id:"version-0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka",title:"Deploying FastKafka using Docker",description:"Building a Docker Image",source:"@site/versioned_docs/version-0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",sourceDirName:"guides",slug:"/guides/Guide_30_Using_docker_to_deploy_fastkafka",permalink:"/docs/0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Deploy FastKafka docs to GitHub Pages",permalink:"/docs/0.6.0/guides/Guide_04_Github_Actions_Workflow"},next:{title:"Benchmarking FastKafka app",permalink:"/docs/0.6.0/guides/Guide_06_Benchmarking_FastKafka"}},p={},s=[{value:"Building a Docker Image",id:"building-a-docker-image",level:2},{value:"Creating FastKafka Code",id:"creating-fastkafka-code",level:3},{value:"Creating requirements.txt file",id:"creating-requirementstxt-file",level:3},{value:"Creating Dockerfile",id:"creating-dockerfile",level:3},{value:"Build the Docker Image",id:"build-the-docker-image",level:3},{value:"Start the Docker Container",id:"start-the-docker-container",level:3},{value:"Additional Security",id:"additional-security",level:2},{value:"Example repo",id:"example-repo",level:2}],c={toc:s},d="wrapper";function k(e){let{components:t,...a}=e;return(0,i.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"deploying-fastkafka-using-docker"},"Deploying FastKafka using Docker"),(0,i.kt)("h2",{id:"building-a-docker-image"},"Building a Docker Image"),(0,i.kt)("p",null,"To build a Docker image for a FastKafka project, we need the following\nitems:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A library that is built using FastKafka."),(0,i.kt)("li",{parentName:"ol"},"A file in which the requirements are specified. This could be a\nrequirements.txt file, a setup.py file, or even a wheel file."),(0,i.kt)("li",{parentName:"ol"},"A Dockerfile to build an image that will include the two files\nmentioned above.")),(0,i.kt)("h3",{id:"creating-fastkafka-code"},"Creating FastKafka Code"),(0,i.kt)("p",null,"Let\u2019s create a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\napplication and write it to the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file based on the\n",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h3",{id:"creating-requirementstxt-file"},"Creating requirements.txt file"),(0,i.kt)("p",null,"The above code only requires ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka"),". So, we will add only\n",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka")," to the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file, but you can add additional\nrequirements to it as well."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"fastkafka>=0.3.0\n")),(0,i.kt)("p",null,"Here we are using ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," to store the project\u2019s\ndependencies. However, other methods like ",(0,i.kt)("inlineCode",{parentName:"p"},"setup.py"),", ",(0,i.kt)("inlineCode",{parentName:"p"},"pipenv"),", and\n",(0,i.kt)("inlineCode",{parentName:"p"},"wheel")," files can also be used. ",(0,i.kt)("inlineCode",{parentName:"p"},"setup.py")," is commonly used for\npackaging and distributing Python modules, while ",(0,i.kt)("inlineCode",{parentName:"p"},"pipenv")," is a tool used\nfor managing virtual environments and package dependencies. ",(0,i.kt)("inlineCode",{parentName:"p"},"wheel"),"\nfiles are built distributions of Python packages that can be installed\nwith pip."),(0,i.kt)("h3",{id:"creating-dockerfile"},"Creating Dockerfile"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-dockerfile"},'# (1)\nFROM python:3.9-slim-bullseye\n# (2)\nWORKDIR /project\n# (3)\nCOPY application.py requirements.txt /project/\n# (4)\nRUN pip install --no-cache-dir --upgrade -r /project/requirements.txt\n# (5)\nCMD ["fastkafka", "run", "--num-workers", "2", "--kafka-broker", "production", "application:kafka_app"]\n')),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Start from the official Python base image.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Set the current working directory to ",(0,i.kt)("inlineCode",{parentName:"p"},"/project"),"."),(0,i.kt)("p",{parentName:"li"},"This is where we\u2019ll put the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file and the\n",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Copy the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file and ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file inside\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"/project")," directory.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Install the package dependencies in the requirements file."),(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"--no-cache-dir")," option tells ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," to not save the downloaded\npackages locally, as that is only if ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," was going to be run again\nto install the same packages, but that\u2019s not the case when working\nwith containers."),(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"--upgrade")," option tells ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," to upgrade the packages if they\nare already installed.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Set the ",(0,i.kt)("strong",{parentName:"p"},"command")," to run the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command."),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"p"},"CMD")," takes a list of strings, each of these strings is what you\nwould type in the command line separated by spaces."),(0,i.kt)("p",{parentName:"li"},"This command will be run from the ",(0,i.kt)("strong",{parentName:"p"},"current working directory"),", the\nsame ",(0,i.kt)("inlineCode",{parentName:"p"},"/project")," directory you set above with ",(0,i.kt)("inlineCode",{parentName:"p"},"WORKDIR /project"),"."),(0,i.kt)("p",{parentName:"li"},"We supply additional parameters ",(0,i.kt)("inlineCode",{parentName:"p"},"--num-workers")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"--kafka-broker"),"\nfor the run command. Finally, we specify the location of our\n",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka")," application location as a command argument."),(0,i.kt)("p",{parentName:"li"},"To learn more about ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command please check the ",(0,i.kt)("a",{parentName:"p",href:"../../cli/fastkafka/#fastkafka-run"},"CLI\ndocs"),"."))),(0,i.kt)("h3",{id:"build-the-docker-image"},"Build the Docker Image"),(0,i.kt)("p",null,"Now that all the files are in place, let\u2019s build the container image."),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Go to the project directory (where your ",(0,i.kt)("inlineCode",{parentName:"p"},"Dockerfile")," is, containing\nyour ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file).")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Run the following command to build the image:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"docker build -t fastkafka_project_image .\n")),(0,i.kt)("p",{parentName:"li"},"This command will create a docker image with the name\n",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," and the ",(0,i.kt)("inlineCode",{parentName:"p"},"latest")," tag."))),(0,i.kt)("p",null,"That\u2019s it! You have now built a docker image for your FastKafka project."),(0,i.kt)("h3",{id:"start-the-docker-container"},"Start the Docker Container"),(0,i.kt)("p",null,"Run a container based on the built image:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"docker run -d --name fastkafka_project_container fastkafka_project_image\n")),(0,i.kt)("h2",{id:"additional-security"},"Additional Security"),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"Trivy")," is an open-source tool that scans Docker images for\nvulnerabilities. It can be integrated into your CI/CD pipeline to ensure\nthat your images are secure and free from known vulnerabilities. Here\u2019s\nhow you can use ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," to scan your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image"),":"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Install ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," on your local machine by following the instructions\nprovided in the ",(0,i.kt)("a",{parentName:"p",href:"https://aquasecurity.github.io/trivy/latest/getting-started/installation/"},"official ",(0,i.kt)("inlineCode",{parentName:"a"},"trivy"),"\ndocumentation"),".")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Run the following command to scan your fastkafka_project_image:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"trivy image fastkafka_project_image\n")),(0,i.kt)("p",{parentName:"li"},"This command will scan your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," for any\nvulnerabilities and provide you with a report of its findings.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Fix any vulnerabilities identified by ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy"),". You can do this by\nupdating the vulnerable package to a more secure version or by using\na different package altogether.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Rebuild your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," and repeat steps 2 and 3\nuntil ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," reports no vulnerabilities."))),(0,i.kt)("p",null,"By using ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," to scan your Docker images, you can ensure that your\ncontainers are secure and free from known vulnerabilities."),(0,i.kt)("h2",{id:"example-repo"},"Example repo"),(0,i.kt)("p",null,"A\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nbased library which uses above mentioned Dockerfile to build a docker\nimage can be found\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_project/"},"here")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/16e87abe.02193372.js b/assets/js/16e87abe.02193372.js new file mode 100644 index 0000000..7549ac5 --- /dev/null +++ b/assets/js/16e87abe.02193372.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7132],{3905:(t,e,a)=>{a.d(e,{Zo:()=>k,kt:()=>d});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function o(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function l(t){for(var e=1;e=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(t);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var s=n.createContext({}),c=function(t){var e=n.useContext(s),a=e;return t&&(a="function"==typeof t?t(e):l(l({},e),t)),a},k=function(t){var e=c(t.components);return n.createElement(s.Provider,{value:e},t.children)},u="mdxType",p={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},m=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,o=t.originalType,s=t.parentName,k=i(t,["components","mdxType","originalType","parentName"]),u=c(a),m=r,d=u["".concat(s,".").concat(m)]||u[m]||p[m]||o;return a?n.createElement(d,l(l({ref:e},k),{},{components:a})):n.createElement(d,l({ref:e},k))}));function d(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var o=a.length,l=new Array(o);l[0]=m;var i={};for(var s in e)hasOwnProperty.call(e,s)&&(i[s]=e[s]);i.originalType=t,i[u]="string"==typeof t?t:r,l[1]=i;for(var c=2;c{a.r(e),a.d(e,{assets:()=>s,contentTitle:()=>l,default:()=>p,frontMatter:()=>o,metadata:()=>i,toc:()=>c});var n=a(7462),r=(a(7294),a(3905));const o={},l=void 0,i={unversionedId:"api/fastkafka/executors/DynamicTaskExecutor",id:"version-0.8.0/api/fastkafka/executors/DynamicTaskExecutor",title:"DynamicTaskExecutor",description:"fastkafka.executors.DynamicTaskExecutor {fastkafka.executors.DynamicTaskExecutor}",source:"@site/versioned_docs/version-0.8.0/api/fastkafka/executors/DynamicTaskExecutor.md",sourceDirName:"api/fastkafka/executors",slug:"/api/fastkafka/executors/DynamicTaskExecutor",permalink:"/docs/api/fastkafka/executors/DynamicTaskExecutor",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"json_encoder",permalink:"/docs/api/fastkafka/encoder/json_encoder"},next:{title:"SequentialExecutor",permalink:"/docs/api/fastkafka/executors/SequentialExecutor"}},s={},c=[{value:"fastkafka.executors.DynamicTaskExecutor",id:"fastkafka.executors.DynamicTaskExecutor",level:2},{value:"init",id:"fastkafka._components.task_streaming.DynamicTaskExecutor.init",level:3},{value:"run",id:"fastkafka._components.task_streaming.DynamicTaskExecutor.run",level:3}],k={toc:c},u="wrapper";function p(t){let{components:e,...a}=t;return(0,r.kt)(u,(0,n.Z)({},k,a,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.executors.DynamicTaskExecutor"},"fastkafka.executors.DynamicTaskExecutor"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/task_streaming.py#L207-L272",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("p",null,"A class that implements a dynamic task executor for processing consumer records."),(0,r.kt)("p",null,"The DynamicTaskExecutor class extends the StreamExecutor class and provides functionality\nfor running a tasks in parallel using asyncio.Task."),(0,r.kt)("h3",{id:"fastkafka._components.task_streaming.DynamicTaskExecutor.init"},(0,r.kt)("strong",{parentName:"h3"},"init")),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/task_streaming.py#L214-L237",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n self, throw_exceptions=False, max_buffer_size=100000, size=100000\n)\n")),(0,r.kt)("p",null,"Create an instance of DynamicTaskExecutor"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"throw_exceptions")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bool")),(0,r.kt)("td",{parentName:"tr",align:null},"Flag indicating whether exceptions should be thrown ot logged.Defaults to False."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"False"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_buffer_size")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum buffer size for the memory object stream.Defaults to 100_000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"size")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"Size of the task pool. Defaults to 100_000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100000"))))),(0,r.kt)("h3",{id:"fastkafka._components.task_streaming.DynamicTaskExecutor.run"},"run"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/task_streaming.py#L239-L272",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"run(\n self, is_shutting_down_f, generator, processor\n)\n")),(0,r.kt)("p",null,"Runs the dynamic task executor."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"is_shutting_down_f")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[], bool]")),(0,r.kt)("td",{parentName:"tr",align:null},"Function to check if the executor is shutting down."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"generator")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Generator function for retrieving consumer records."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"processor")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Processor function for processing consumer records."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))))}p.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/17896441.8961bab4.js b/assets/js/17896441.8961bab4.js new file mode 100644 index 0000000..7dc8735 --- /dev/null +++ b/assets/js/17896441.8961bab4.js @@ -0,0 +1 @@ +(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7918],{3905:(e,t,n)=>{"use strict";n.d(t,{Zo:()=>d,kt:()=>f});var a=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var s=a.createContext({}),i=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},d=function(e){var t=i(e.components);return a.createElement(s.Provider,{value:t},e.children)},m="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},p=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,s=e.parentName,d=c(e,["components","mdxType","originalType","parentName"]),m=i(n),p=o,f=m["".concat(s,".").concat(p)]||m[p]||u[p]||r;return n?a.createElement(f,l(l({ref:t},d),{},{components:n})):a.createElement(f,l({ref:t},d))}));function f(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,l=new Array(r);l[0]=p;var c={};for(var s in t)hasOwnProperty.call(t,s)&&(c[s]=t[s]);c.originalType=e,c[m]="string"==typeof e?e:o,l[1]=c;for(var i=2;i{"use strict";n.r(t),n.d(t,{default:()=>_t});var a=n(7294),o=n(833),r=n(902);const l=a.createContext(null);function c(e){let{children:t,content:n}=e;const o=function(e){return(0,a.useMemo)((()=>({metadata:e.metadata,frontMatter:e.frontMatter,assets:e.assets,contentTitle:e.contentTitle,toc:e.toc})),[e])}(n);return a.createElement(l.Provider,{value:o},t)}function s(){const e=(0,a.useContext)(l);if(null===e)throw new r.i6("DocProvider");return e}function i(){const{metadata:e,frontMatter:t,assets:n}=s();return a.createElement(o.d,{title:e.title,description:e.description,keywords:t.keywords,image:n.image??t.image})}var d=n(6010),m=n(7524),u=n(7462),p=n(5999),f=n(9960);function h(e){const{permalink:t,title:n,subLabel:o,isNext:r}=e;return a.createElement(f.Z,{className:(0,d.Z)("pagination-nav__link",r?"pagination-nav__link--next":"pagination-nav__link--prev"),to:t},o&&a.createElement("div",{className:"pagination-nav__sublabel"},o),a.createElement("div",{className:"pagination-nav__label"},n))}function g(e){const{previous:t,next:n}=e;return a.createElement("nav",{className:"pagination-nav docusaurus-mt-lg","aria-label":(0,p.I)({id:"theme.docs.paginator.navAriaLabel",message:"Docs pages navigation",description:"The ARIA label for the docs pagination"})},t&&a.createElement(h,(0,u.Z)({},t,{subLabel:a.createElement(p.Z,{id:"theme.docs.paginator.previous",description:"The label used to navigate to the previous doc"},"Previous")})),n&&a.createElement(h,(0,u.Z)({},n,{subLabel:a.createElement(p.Z,{id:"theme.docs.paginator.next",description:"The label used to navigate to the next doc"},"Next"),isNext:!0})))}function b(){const{metadata:e}=s();return a.createElement(g,{previous:e.previous,next:e.next})}var v=n(2263),E=n(143),y=n(5281),k=n(373),N=n(4477);const C={unreleased:function(e){let{siteTitle:t,versionMetadata:n}=e;return a.createElement(p.Z,{id:"theme.docs.versions.unreleasedVersionLabel",description:"The label used to tell the user that he's browsing an unreleased doc version",values:{siteTitle:t,versionLabel:a.createElement("b",null,n.label)}},"This is unreleased documentation for {siteTitle} {versionLabel} version.")},unmaintained:function(e){let{siteTitle:t,versionMetadata:n}=e;return a.createElement(p.Z,{id:"theme.docs.versions.unmaintainedVersionLabel",description:"The label used to tell the user that he's browsing an unmaintained doc version",values:{siteTitle:t,versionLabel:a.createElement("b",null,n.label)}},"This is documentation for {siteTitle} {versionLabel}, which is no longer actively maintained.")}};function L(e){const t=C[e.versionMetadata.banner];return a.createElement(t,e)}function T(e){let{versionLabel:t,to:n,onClick:o}=e;return a.createElement(p.Z,{id:"theme.docs.versions.latestVersionSuggestionLabel",description:"The label used to tell the user to check the latest version",values:{versionLabel:t,latestVersionLink:a.createElement("b",null,a.createElement(f.Z,{to:n,onClick:o},a.createElement(p.Z,{id:"theme.docs.versions.latestVersionLinkLabel",description:"The label used for the latest version suggestion link label"},"latest version")))}},"For up-to-date documentation, see the {latestVersionLink} ({versionLabel}).")}function w(e){let{className:t,versionMetadata:n}=e;const{siteConfig:{title:o}}=(0,v.Z)(),{pluginId:r}=(0,E.gA)({failfast:!0}),{savePreferredVersionName:l}=(0,k.J)(r),{latestDocSuggestion:c,latestVersionSuggestion:s}=(0,E.Jo)(r),i=c??(m=s).docs.find((e=>e.id===m.mainDocId));var m;return a.createElement("div",{className:(0,d.Z)(t,y.k.docs.docVersionBanner,"alert alert--warning margin-bottom--md"),role:"alert"},a.createElement("div",null,a.createElement(L,{siteTitle:o,versionMetadata:n})),a.createElement("div",{className:"margin-top--md"},a.createElement(T,{versionLabel:s.label,to:i.path,onClick:()=>l(s.name)})))}function _(e){let{className:t}=e;const n=(0,N.E)();return n.banner?a.createElement(w,{className:t,versionMetadata:n}):null}function x(e){let{className:t}=e;const n=(0,N.E)();return n.badge?a.createElement("span",{className:(0,d.Z)(t,y.k.docs.docVersionBadge,"badge badge--secondary")},a.createElement(p.Z,{id:"theme.docs.versionBadge.label",values:{versionLabel:n.label}},"Version: {versionLabel}")):null}function B(e){let{lastUpdatedAt:t,formattedLastUpdatedAt:n}=e;return a.createElement(p.Z,{id:"theme.lastUpdated.atDate",description:"The words used to describe on which date a page has been last updated",values:{date:a.createElement("b",null,a.createElement("time",{dateTime:new Date(1e3*t).toISOString()},n))}}," on {date}")}function Z(e){let{lastUpdatedBy:t}=e;return a.createElement(p.Z,{id:"theme.lastUpdated.byUser",description:"The words used to describe by who the page has been last updated",values:{user:a.createElement("b",null,t)}}," by {user}")}function O(e){let{lastUpdatedAt:t,formattedLastUpdatedAt:n,lastUpdatedBy:o}=e;return a.createElement("span",{className:y.k.common.lastUpdated},a.createElement(p.Z,{id:"theme.lastUpdated.lastUpdatedAtBy",description:"The sentence used to display when a page has been last updated, and by who",values:{atDate:t&&n?a.createElement(B,{lastUpdatedAt:t,formattedLastUpdatedAt:n}):"",byUser:o?a.createElement(Z,{lastUpdatedBy:o}):""}},"Last updated{atDate}{byUser}"),!1)}const H={iconEdit:"iconEdit_Z9Sw"};function A(e){let{className:t,...n}=e;return a.createElement("svg",(0,u.Z)({fill:"currentColor",height:"20",width:"20",viewBox:"0 0 40 40",className:(0,d.Z)(H.iconEdit,t),"aria-hidden":"true"},n),a.createElement("g",null,a.createElement("path",{d:"m34.5 11.7l-3 3.1-6.3-6.3 3.1-3q0.5-0.5 1.2-0.5t1.1 0.5l3.9 3.9q0.5 0.4 0.5 1.1t-0.5 1.2z m-29.5 17.1l18.4-18.5 6.3 6.3-18.4 18.4h-6.3v-6.2z"})))}function j(e){let{editUrl:t}=e;return a.createElement("a",{href:t,target:"_blank",rel:"noreferrer noopener",className:y.k.common.editThisPage},a.createElement(A,null),a.createElement(p.Z,{id:"theme.common.editThisPage",description:"The link label to edit the current page"},"Edit this page"))}const S={tag:"tag_zVej",tagRegular:"tagRegular_sFm0",tagWithCount:"tagWithCount_h2kH"};function I(e){let{permalink:t,label:n,count:o}=e;return a.createElement(f.Z,{href:t,className:(0,d.Z)(S.tag,o?S.tagWithCount:S.tagRegular)},n,o&&a.createElement("span",null,o))}const M={tags:"tags_jXut",tag:"tag_QGVx"};function P(e){let{tags:t}=e;return a.createElement(a.Fragment,null,a.createElement("b",null,a.createElement(p.Z,{id:"theme.tags.tagsListLabel",description:"The label alongside a tag list"},"Tags:")),a.createElement("ul",{className:(0,d.Z)(M.tags,"padding--none","margin-left--sm")},t.map((e=>{let{label:t,permalink:n}=e;return a.createElement("li",{key:n,className:M.tag},a.createElement(I,{label:t,permalink:n}))}))))}const U={lastUpdated:"lastUpdated_vwxv"};function z(e){return a.createElement("div",{className:(0,d.Z)(y.k.docs.docFooterTagsRow,"row margin-bottom--sm")},a.createElement("div",{className:"col"},a.createElement(P,e)))}function V(e){let{editUrl:t,lastUpdatedAt:n,lastUpdatedBy:o,formattedLastUpdatedAt:r}=e;return a.createElement("div",{className:(0,d.Z)(y.k.docs.docFooterEditMetaRow,"row")},a.createElement("div",{className:"col"},t&&a.createElement(j,{editUrl:t})),a.createElement("div",{className:(0,d.Z)("col",U.lastUpdated)},(n||o)&&a.createElement(O,{lastUpdatedAt:n,formattedLastUpdatedAt:r,lastUpdatedBy:o})))}function D(){const{metadata:e}=s(),{editUrl:t,lastUpdatedAt:n,formattedLastUpdatedAt:o,lastUpdatedBy:r,tags:l}=e,c=l.length>0,i=!!(t||n||r);return c||i?a.createElement("footer",{className:(0,d.Z)(y.k.docs.docFooter,"docusaurus-mt-lg")},c&&a.createElement(z,{tags:l}),i&&a.createElement(V,{editUrl:t,lastUpdatedAt:n,lastUpdatedBy:r,formattedLastUpdatedAt:o})):null}var R=n(6043),W=n(6668);function $(e){const t=e.map((e=>({...e,parentIndex:-1,children:[]}))),n=Array(7).fill(-1);t.forEach(((e,t)=>{const a=n.slice(2,e.level);e.parentIndex=Math.max(...a),n[e.level]=t}));const a=[];return t.forEach((e=>{const{parentIndex:n,...o}=e;n>=0?t[n].children.push(o):a.push(o)})),a}function F(e){let{toc:t,minHeadingLevel:n,maxHeadingLevel:a}=e;return t.flatMap((e=>{const t=F({toc:e.children,minHeadingLevel:n,maxHeadingLevel:a});return function(e){return e.level>=n&&e.level<=a}(e)?[{...e,children:t}]:t}))}function q(e){const t=e.getBoundingClientRect();return t.top===t.bottom?q(e.parentNode):t}function G(e,t){let{anchorTopOffset:n}=t;const a=e.find((e=>q(e).top>=n));if(a){return function(e){return e.top>0&&e.bottom{e.current=t?0:document.querySelector(".navbar").clientHeight}),[t]),e}function J(e){const t=(0,a.useRef)(void 0),n=Y();(0,a.useEffect)((()=>{if(!e)return()=>{};const{linkClassName:a,linkActiveClassName:o,minHeadingLevel:r,maxHeadingLevel:l}=e;function c(){const e=function(e){return Array.from(document.getElementsByClassName(e))}(a),c=function(e){let{minHeadingLevel:t,maxHeadingLevel:n}=e;const a=[];for(let o=t;o<=n;o+=1)a.push(`h${o}.anchor`);return Array.from(document.querySelectorAll(a.join()))}({minHeadingLevel:r,maxHeadingLevel:l}),s=G(c,{anchorTopOffset:n.current}),i=e.find((e=>s&&s.id===function(e){return decodeURIComponent(e.href.substring(e.href.indexOf("#")+1))}(e)));e.forEach((e=>{!function(e,n){n?(t.current&&t.current!==e&&t.current.classList.remove(o),e.classList.add(o),t.current=e):e.classList.remove(o)}(e,e===i)}))}return document.addEventListener("scroll",c),document.addEventListener("resize",c),c(),()=>{document.removeEventListener("scroll",c),document.removeEventListener("resize",c)}}),[e,n])}function Q(e){let{toc:t,className:n,linkClassName:o,isChild:r}=e;return t.length?a.createElement("ul",{className:r?void 0:n},t.map((e=>a.createElement("li",{key:e.id},a.createElement("a",{href:`#${e.id}`,className:o??void 0,dangerouslySetInnerHTML:{__html:e.value}}),a.createElement(Q,{isChild:!0,toc:e.children,className:n,linkClassName:o}))))):null}const X=a.memo(Q);function K(e){let{toc:t,className:n="table-of-contents table-of-contents__left-border",linkClassName:o="table-of-contents__link",linkActiveClassName:r,minHeadingLevel:l,maxHeadingLevel:c,...s}=e;const i=(0,W.L)(),d=l??i.tableOfContents.minHeadingLevel,m=c??i.tableOfContents.maxHeadingLevel,p=function(e){let{toc:t,minHeadingLevel:n,maxHeadingLevel:o}=e;return(0,a.useMemo)((()=>F({toc:$(t),minHeadingLevel:n,maxHeadingLevel:o})),[t,n,o])}({toc:t,minHeadingLevel:d,maxHeadingLevel:m});return J((0,a.useMemo)((()=>{if(o&&r)return{linkClassName:o,linkActiveClassName:r,minHeadingLevel:d,maxHeadingLevel:m}}),[o,r,d,m])),a.createElement(X,(0,u.Z)({toc:p,className:n,linkClassName:o},s))}const ee={tocCollapsibleButton:"tocCollapsibleButton_TO0P",tocCollapsibleButtonExpanded:"tocCollapsibleButtonExpanded_MG3E"};function te(e){let{collapsed:t,...n}=e;return a.createElement("button",(0,u.Z)({type:"button"},n,{className:(0,d.Z)("clean-btn",ee.tocCollapsibleButton,!t&&ee.tocCollapsibleButtonExpanded,n.className)}),a.createElement(p.Z,{id:"theme.TOCCollapsible.toggleButtonLabel",description:"The label used by the button on the collapsible TOC component"},"On this page"))}const ne={tocCollapsible:"tocCollapsible_ETCw",tocCollapsibleContent:"tocCollapsibleContent_vkbj",tocCollapsibleExpanded:"tocCollapsibleExpanded_sAul"};function ae(e){let{toc:t,className:n,minHeadingLevel:o,maxHeadingLevel:r}=e;const{collapsed:l,toggleCollapsed:c}=(0,R.u)({initialState:!0});return a.createElement("div",{className:(0,d.Z)(ne.tocCollapsible,!l&&ne.tocCollapsibleExpanded,n)},a.createElement(te,{collapsed:l,onClick:c}),a.createElement(R.z,{lazy:!0,className:ne.tocCollapsibleContent,collapsed:l},a.createElement(K,{toc:t,minHeadingLevel:o,maxHeadingLevel:r})))}const oe={tocMobile:"tocMobile_ITEo"};function re(){const{toc:e,frontMatter:t}=s();return a.createElement(ae,{toc:e,minHeadingLevel:t.toc_min_heading_level,maxHeadingLevel:t.toc_max_heading_level,className:(0,d.Z)(y.k.docs.docTocMobile,oe.tocMobile)})}const le={tableOfContents:"tableOfContents_bqdL",docItemContainer:"docItemContainer_F8PC"},ce="table-of-contents__link toc-highlight",se="table-of-contents__link--active";function ie(e){let{className:t,...n}=e;return a.createElement("div",{className:(0,d.Z)(le.tableOfContents,"thin-scrollbar",t)},a.createElement(K,(0,u.Z)({},n,{linkClassName:ce,linkActiveClassName:se})))}function de(){const{toc:e,frontMatter:t}=s();return a.createElement(ie,{toc:e,minHeadingLevel:t.toc_min_heading_level,maxHeadingLevel:t.toc_max_heading_level,className:y.k.docs.docTocDesktop})}const me={anchorWithStickyNavbar:"anchorWithStickyNavbar_LWe7",anchorWithHideOnScrollNavbar:"anchorWithHideOnScrollNavbar_WYt5"};function ue(e){let{as:t,id:n,...o}=e;const{navbar:{hideOnScroll:r}}=(0,W.L)();if("h1"===t||!n)return a.createElement(t,(0,u.Z)({},o,{id:void 0}));const l=(0,p.I)({id:"theme.common.headingLinkTitle",message:"Direct link to {heading}",description:"Title for link to heading"},{heading:"string"==typeof o.children?o.children:n});return a.createElement(t,(0,u.Z)({},o,{className:(0,d.Z)("anchor",r?me.anchorWithHideOnScrollNavbar:me.anchorWithStickyNavbar,o.className),id:n}),o.children,a.createElement(f.Z,{className:"hash-link",to:`#${n}`,"aria-label":l,title:l},"\u200b"))}var pe=n(3905),fe=n(5742);var he=n(2389),ge=n(2949);function be(){const{prism:e}=(0,W.L)(),{colorMode:t}=(0,ge.I)(),n=e.theme,a=e.darkTheme||n;return"dark"===t?a:n}var ve=n(7594),Ee=n.n(ve);const ye=/title=(?["'])(?.*?)\1/,ke=/\{(?<range>[\d,-]+)\}/,Ne={js:{start:"\\/\\/",end:""},jsBlock:{start:"\\/\\*",end:"\\*\\/"},jsx:{start:"\\{\\s*\\/\\*",end:"\\*\\/\\s*\\}"},bash:{start:"#",end:""},html:{start:"\x3c!--",end:"--\x3e"}};function Ce(e,t){const n=e.map((e=>{const{start:n,end:a}=Ne[e];return`(?:${n}\\s*(${t.flatMap((e=>[e.line,e.block?.start,e.block?.end].filter(Boolean))).join("|")})\\s*${a})`})).join("|");return new RegExp(`^\\s*(?:${n})\\s*$`)}function Le(e,t){let n=e.replace(/\n$/,"");const{language:a,magicComments:o,metastring:r}=t;if(r&&ke.test(r)){const e=r.match(ke).groups.range;if(0===o.length)throw new Error(`A highlight range has been given in code block's metastring (\`\`\` ${r}), but no magic comment config is available. Docusaurus applies the first magic comment entry's className for metastring ranges.`);const t=o[0].className,a=Ee()(e).filter((e=>e>0)).map((e=>[e-1,[t]]));return{lineClassNames:Object.fromEntries(a),code:n}}if(void 0===a)return{lineClassNames:{},code:n};const l=function(e,t){switch(e){case"js":case"javascript":case"ts":case"typescript":return Ce(["js","jsBlock"],t);case"jsx":case"tsx":return Ce(["js","jsBlock","jsx"],t);case"html":return Ce(["js","jsBlock","html"],t);case"python":case"py":case"bash":return Ce(["bash"],t);case"markdown":case"md":return Ce(["html","jsx","bash"],t);default:return Ce(Object.keys(Ne),t)}}(a,o),c=n.split("\n"),s=Object.fromEntries(o.map((e=>[e.className,{start:0,range:""}]))),i=Object.fromEntries(o.filter((e=>e.line)).map((e=>{let{className:t,line:n}=e;return[n,t]}))),d=Object.fromEntries(o.filter((e=>e.block)).map((e=>{let{className:t,block:n}=e;return[n.start,t]}))),m=Object.fromEntries(o.filter((e=>e.block)).map((e=>{let{className:t,block:n}=e;return[n.end,t]})));for(let p=0;p<c.length;){const e=c[p].match(l);if(!e){p+=1;continue}const t=e.slice(1).find((e=>void 0!==e));i[t]?s[i[t]].range+=`${p},`:d[t]?s[d[t]].start=p:m[t]&&(s[m[t]].range+=`${s[m[t]].start}-${p-1},`),c.splice(p,1)}n=c.join("\n");const u={};return Object.entries(s).forEach((e=>{let[t,{range:n}]=e;Ee()(n).forEach((e=>{u[e]??=[],u[e].push(t)}))})),{lineClassNames:u,code:n}}const Te={codeBlockContainer:"codeBlockContainer_Ckt0"};function we(e){let{as:t,...n}=e;const o=function(e){const t={color:"--prism-color",backgroundColor:"--prism-background-color"},n={};return Object.entries(e.plain).forEach((e=>{let[a,o]=e;const r=t[a];r&&"string"==typeof o&&(n[r]=o)})),n}(be());return a.createElement(t,(0,u.Z)({},n,{style:o,className:(0,d.Z)(n.className,Te.codeBlockContainer,y.k.common.codeBlock)}))}const _e={codeBlockContent:"codeBlockContent_biex",codeBlockTitle:"codeBlockTitle_Ktv7",codeBlock:"codeBlock_bY9V",codeBlockStandalone:"codeBlockStandalone_MEMb",codeBlockLines:"codeBlockLines_e6Vv",codeBlockLinesWithNumbering:"codeBlockLinesWithNumbering_o6Pm",buttonGroup:"buttonGroup__atx"};function xe(e){let{children:t,className:n}=e;return a.createElement(we,{as:"pre",tabIndex:0,className:(0,d.Z)(_e.codeBlockStandalone,"thin-scrollbar",n)},a.createElement("code",{className:_e.codeBlockLines},t))}const Be={attributes:!0,characterData:!0,childList:!0,subtree:!0};function Ze(e,t){const[n,o]=(0,a.useState)(),l=(0,a.useCallback)((()=>{o(e.current?.closest("[role=tabpanel][hidden]"))}),[e,o]);(0,a.useEffect)((()=>{l()}),[l]),function(e,t,n){void 0===n&&(n=Be);const o=(0,r.zX)(t),l=(0,r.Ql)(n);(0,a.useEffect)((()=>{const t=new MutationObserver(o);return e&&t.observe(e,l),()=>t.disconnect()}),[e,o,l])}(n,(e=>{e.forEach((e=>{"attributes"===e.type&&"hidden"===e.attributeName&&(t(),l())}))}),{attributes:!0,characterData:!1,childList:!1,subtree:!1})}const Oe={plain:{backgroundColor:"#2a2734",color:"#9a86fd"},styles:[{types:["comment","prolog","doctype","cdata","punctuation"],style:{color:"#6c6783"}},{types:["namespace"],style:{opacity:.7}},{types:["tag","operator","number"],style:{color:"#e09142"}},{types:["property","function"],style:{color:"#9a86fd"}},{types:["tag-id","selector","atrule-id"],style:{color:"#eeebff"}},{types:["attr-name"],style:{color:"#c4b9fe"}},{types:["boolean","string","entity","url","attr-value","keyword","control","directive","unit","statement","regex","atrule","placeholder","variable"],style:{color:"#ffcc99"}},{types:["deleted"],style:{textDecorationLine:"line-through"}},{types:["inserted"],style:{textDecorationLine:"underline"}},{types:["italic"],style:{fontStyle:"italic"}},{types:["important","bold"],style:{fontWeight:"bold"}},{types:["important"],style:{color:"#c4b9fe"}}]};var He={Prism:n(7410).Z,theme:Oe};function Ae(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function je(){return je=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var a in n)Object.prototype.hasOwnProperty.call(n,a)&&(e[a]=n[a])}return e},je.apply(this,arguments)}var Se=/\r\n|\r|\n/,Ie=function(e){0===e.length?e.push({types:["plain"],content:"\n",empty:!0}):1===e.length&&""===e[0].content&&(e[0].content="\n",e[0].empty=!0)},Me=function(e,t){var n=e.length;return n>0&&e[n-1]===t?e:e.concat(t)};function Pe(e,t){var n={};for(var a in e)Object.prototype.hasOwnProperty.call(e,a)&&-1===t.indexOf(a)&&(n[a]=e[a]);return n}var Ue=function(e){function t(){for(var t=this,n=[],a=arguments.length;a--;)n[a]=arguments[a];e.apply(this,n),Ae(this,"getThemeDict",(function(e){if(void 0!==t.themeDict&&e.theme===t.prevTheme&&e.language===t.prevLanguage)return t.themeDict;t.prevTheme=e.theme,t.prevLanguage=e.language;var n=e.theme?function(e,t){var n=e.plain,a=Object.create(null),o=e.styles.reduce((function(e,n){var a=n.languages,o=n.style;return a&&!a.includes(t)||n.types.forEach((function(t){var n=je({},e[t],o);e[t]=n})),e}),a);return o.root=n,o.plain=je({},n,{backgroundColor:null}),o}(e.theme,e.language):void 0;return t.themeDict=n})),Ae(this,"getLineProps",(function(e){var n=e.key,a=e.className,o=e.style,r=je({},Pe(e,["key","className","style","line"]),{className:"token-line",style:void 0,key:void 0}),l=t.getThemeDict(t.props);return void 0!==l&&(r.style=l.plain),void 0!==o&&(r.style=void 0!==r.style?je({},r.style,o):o),void 0!==n&&(r.key=n),a&&(r.className+=" "+a),r})),Ae(this,"getStyleForToken",(function(e){var n=e.types,a=e.empty,o=n.length,r=t.getThemeDict(t.props);if(void 0!==r){if(1===o&&"plain"===n[0])return a?{display:"inline-block"}:void 0;if(1===o&&!a)return r[n[0]];var l=a?{display:"inline-block"}:{},c=n.map((function(e){return r[e]}));return Object.assign.apply(Object,[l].concat(c))}})),Ae(this,"getTokenProps",(function(e){var n=e.key,a=e.className,o=e.style,r=e.token,l=je({},Pe(e,["key","className","style","token"]),{className:"token "+r.types.join(" "),children:r.content,style:t.getStyleForToken(r),key:void 0});return void 0!==o&&(l.style=void 0!==l.style?je({},l.style,o):o),void 0!==n&&(l.key=n),a&&(l.className+=" "+a),l})),Ae(this,"tokenize",(function(e,t,n,a){var o={code:t,grammar:n,language:a,tokens:[]};e.hooks.run("before-tokenize",o);var r=o.tokens=e.tokenize(o.code,o.grammar,o.language);return e.hooks.run("after-tokenize",o),r}))}return e&&(t.__proto__=e),t.prototype=Object.create(e&&e.prototype),t.prototype.constructor=t,t.prototype.render=function(){var e=this.props,t=e.Prism,n=e.language,a=e.code,o=e.children,r=this.getThemeDict(this.props),l=t.languages[n];return o({tokens:function(e){for(var t=[[]],n=[e],a=[0],o=[e.length],r=0,l=0,c=[],s=[c];l>-1;){for(;(r=a[l]++)<o[l];){var i=void 0,d=t[l],m=n[l][r];if("string"==typeof m?(d=l>0?d:["plain"],i=m):(d=Me(d,m.type),m.alias&&(d=Me(d,m.alias)),i=m.content),"string"==typeof i){var u=i.split(Se),p=u.length;c.push({types:d,content:u[0]});for(var f=1;f<p;f++)Ie(c),s.push(c=[]),c.push({types:d,content:u[f]})}else l++,t.push(d),n.push(i),a.push(0),o.push(i.length)}l--,t.pop(),n.pop(),a.pop(),o.pop()}return Ie(c),s}(void 0!==l?this.tokenize(t,a,l,n):[a]),className:"prism-code language-"+n,style:void 0!==r?r.root:{},getLineProps:this.getLineProps,getTokenProps:this.getTokenProps})},t}(a.Component);const ze=Ue,Ve={codeLine:"codeLine_lJS_",codeLineNumber:"codeLineNumber_Tfdd",codeLineContent:"codeLineContent_feaV"};function De(e){let{line:t,classNames:n,showLineNumbers:o,getLineProps:r,getTokenProps:l}=e;1===t.length&&"\n"===t[0].content&&(t[0].content="");const c=r({line:t,className:(0,d.Z)(n,o&&Ve.codeLine)}),s=t.map(((e,t)=>a.createElement("span",(0,u.Z)({key:t},l({token:e,key:t})))));return a.createElement("span",c,o?a.createElement(a.Fragment,null,a.createElement("span",{className:Ve.codeLineNumber}),a.createElement("span",{className:Ve.codeLineContent},s)):s,a.createElement("br",null))}const Re={copyButtonCopied:"copyButtonCopied_obH4",copyButtonIcons:"copyButtonIcons_eSgA",copyButtonIcon:"copyButtonIcon_y97N",copyButtonSuccessIcon:"copyButtonSuccessIcon_LjdS"};function We(e){let{code:t,className:n}=e;const[o,r]=(0,a.useState)(!1),l=(0,a.useRef)(void 0),c=(0,a.useCallback)((()=>{!function(e,t){let{target:n=document.body}=void 0===t?{}:t;if("string"!=typeof e)throw new TypeError(`Expected parameter \`text\` to be a \`string\`, got \`${typeof e}\`.`);const a=document.createElement("textarea"),o=document.activeElement;a.value=e,a.setAttribute("readonly",""),a.style.contain="strict",a.style.position="absolute",a.style.left="-9999px",a.style.fontSize="12pt";const r=document.getSelection(),l=r.rangeCount>0&&r.getRangeAt(0);n.append(a),a.select(),a.selectionStart=0,a.selectionEnd=e.length;let c=!1;try{c=document.execCommand("copy")}catch{}a.remove(),l&&(r.removeAllRanges(),r.addRange(l)),o&&o.focus()}(t),r(!0),l.current=window.setTimeout((()=>{r(!1)}),1e3)}),[t]);return(0,a.useEffect)((()=>()=>window.clearTimeout(l.current)),[]),a.createElement("button",{type:"button","aria-label":o?(0,p.I)({id:"theme.CodeBlock.copied",message:"Copied",description:"The copied button label on code blocks"}):(0,p.I)({id:"theme.CodeBlock.copyButtonAriaLabel",message:"Copy code to clipboard",description:"The ARIA label for copy code blocks button"}),title:(0,p.I)({id:"theme.CodeBlock.copy",message:"Copy",description:"The copy button label on code blocks"}),className:(0,d.Z)("clean-btn",n,Re.copyButton,o&&Re.copyButtonCopied),onClick:c},a.createElement("span",{className:Re.copyButtonIcons,"aria-hidden":"true"},a.createElement("svg",{className:Re.copyButtonIcon,viewBox:"0 0 24 24"},a.createElement("path",{d:"M19,21H8V7H19M19,5H8A2,2 0 0,0 6,7V21A2,2 0 0,0 8,23H19A2,2 0 0,0 21,21V7A2,2 0 0,0 19,5M16,1H4A2,2 0 0,0 2,3V17H4V3H16V1Z"})),a.createElement("svg",{className:Re.copyButtonSuccessIcon,viewBox:"0 0 24 24"},a.createElement("path",{d:"M21,7L9,19L3.5,13.5L4.91,12.09L9,16.17L19.59,5.59L21,7Z"}))))}const $e={wordWrapButtonIcon:"wordWrapButtonIcon_Bwma",wordWrapButtonEnabled:"wordWrapButtonEnabled_EoeP"};function Fe(e){let{className:t,onClick:n,isEnabled:o}=e;const r=(0,p.I)({id:"theme.CodeBlock.wordWrapToggle",message:"Toggle word wrap",description:"The title attribute for toggle word wrapping button of code block lines"});return a.createElement("button",{type:"button",onClick:n,className:(0,d.Z)("clean-btn",t,o&&$e.wordWrapButtonEnabled),"aria-label":r,title:r},a.createElement("svg",{className:$e.wordWrapButtonIcon,viewBox:"0 0 24 24","aria-hidden":"true"},a.createElement("path",{fill:"currentColor",d:"M4 19h6v-2H4v2zM20 5H4v2h16V5zm-3 6H4v2h13.25c1.1 0 2 .9 2 2s-.9 2-2 2H15v-2l-3 3l3 3v-2h2c2.21 0 4-1.79 4-4s-1.79-4-4-4z"})))}function qe(e){let{children:t,className:n="",metastring:o,title:r,showLineNumbers:l,language:c}=e;const{prism:{defaultLanguage:s,magicComments:i}}=(0,W.L)(),m=c??function(e){const t=e.split(" ").find((e=>e.startsWith("language-")));return t?.replace(/language-/,"")}(n)??s,p=be(),f=function(){const[e,t]=(0,a.useState)(!1),[n,o]=(0,a.useState)(!1),r=(0,a.useRef)(null),l=(0,a.useCallback)((()=>{const n=r.current.querySelector("code");e?n.removeAttribute("style"):(n.style.whiteSpace="pre-wrap",n.style.overflowWrap="anywhere"),t((e=>!e))}),[r,e]),c=(0,a.useCallback)((()=>{const{scrollWidth:e,clientWidth:t}=r.current,n=e>t||r.current.querySelector("code").hasAttribute("style");o(n)}),[r]);return Ze(r,c),(0,a.useEffect)((()=>{c()}),[e,c]),(0,a.useEffect)((()=>(window.addEventListener("resize",c,{passive:!0}),()=>{window.removeEventListener("resize",c)})),[c]),{codeBlockRef:r,isEnabled:e,isCodeScrollable:n,toggle:l}}(),h=function(e){return e?.match(ye)?.groups.title??""}(o)||r,{lineClassNames:g,code:b}=Le(t,{metastring:o,language:m,magicComments:i}),v=l??function(e){return Boolean(e?.includes("showLineNumbers"))}(o);return a.createElement(we,{as:"div",className:(0,d.Z)(n,m&&!n.includes(`language-${m}`)&&`language-${m}`)},h&&a.createElement("div",{className:_e.codeBlockTitle},h),a.createElement("div",{className:_e.codeBlockContent},a.createElement(ze,(0,u.Z)({},He,{theme:p,code:b,language:m??"text"}),(e=>{let{className:t,tokens:n,getLineProps:o,getTokenProps:r}=e;return a.createElement("pre",{tabIndex:0,ref:f.codeBlockRef,className:(0,d.Z)(t,_e.codeBlock,"thin-scrollbar")},a.createElement("code",{className:(0,d.Z)(_e.codeBlockLines,v&&_e.codeBlockLinesWithNumbering)},n.map(((e,t)=>a.createElement(De,{key:t,line:e,getLineProps:o,getTokenProps:r,classNames:g[t],showLineNumbers:v})))))})),a.createElement("div",{className:_e.buttonGroup},(f.isEnabled||f.isCodeScrollable)&&a.createElement(Fe,{className:_e.codeButton,onClick:()=>f.toggle(),isEnabled:f.isEnabled}),a.createElement(We,{className:_e.codeButton,code:b}))))}function Ge(e){let{children:t,...n}=e;const o=(0,he.Z)(),r=function(e){return a.Children.toArray(e).some((e=>(0,a.isValidElement)(e)))?e:Array.isArray(e)?e.join(""):e}(t),l="string"==typeof r?qe:xe;return a.createElement(l,(0,u.Z)({key:String(o)},n),r)}const Ye={details:"details_lb9f",isBrowser:"isBrowser_bmU9",collapsibleContent:"collapsibleContent_i85q"};function Je(e){return!!e&&("SUMMARY"===e.tagName||Je(e.parentElement))}function Qe(e,t){return!!e&&(e===t||Qe(e.parentElement,t))}function Xe(e){let{summary:t,children:n,...o}=e;const r=(0,he.Z)(),l=(0,a.useRef)(null),{collapsed:c,setCollapsed:s}=(0,R.u)({initialState:!o.open}),[i,m]=(0,a.useState)(o.open),p=a.isValidElement(t)?t:a.createElement("summary",null,t??"Details");return a.createElement("details",(0,u.Z)({},o,{ref:l,open:i,"data-collapsed":c,className:(0,d.Z)(Ye.details,r&&Ye.isBrowser,o.className),onMouseDown:e=>{Je(e.target)&&e.detail>1&&e.preventDefault()},onClick:e=>{e.stopPropagation();const t=e.target;Je(t)&&Qe(t,l.current)&&(e.preventDefault(),c?(s(!1),m(!0)):s(!0))}}),p,a.createElement(R.z,{lazy:!1,collapsed:c,disableSSRStyle:!0,onCollapseTransitionEnd:e=>{s(e),m(!e)}},a.createElement("div",{className:Ye.collapsibleContent},n)))}const Ke={details:"details_b_Ee"},et="alert alert--info";function tt(e){let{...t}=e;return a.createElement(Xe,(0,u.Z)({},t,{className:(0,d.Z)(et,Ke.details,t.className)}))}function nt(e){return a.createElement(ue,e)}const at={containsTaskList:"containsTaskList_mC6p"};const ot={img:"img_ev3q"};const rt="admonition_LlT9",lt="admonitionHeading_tbUL",ct="admonitionIcon_kALy",st="admonitionContent_S0QG";const it={note:{infimaClassName:"secondary",iconComponent:function(){return a.createElement("svg",{viewBox:"0 0 14 16"},a.createElement("path",{fillRule:"evenodd",d:"M6.3 5.69a.942.942 0 0 1-.28-.7c0-.28.09-.52.28-.7.19-.18.42-.28.7-.28.28 0 .52.09.7.28.18.19.28.42.28.7 0 .28-.09.52-.28.7a1 1 0 0 1-.7.3c-.28 0-.52-.11-.7-.3zM8 7.99c-.02-.25-.11-.48-.31-.69-.2-.19-.42-.3-.69-.31H6c-.27.02-.48.13-.69.31-.2.2-.3.44-.31.69h1v3c.02.27.11.5.31.69.2.2.42.31.69.31h1c.27 0 .48-.11.69-.31.2-.19.3-.42.31-.69H8V7.98v.01zM7 2.3c-3.14 0-5.7 2.54-5.7 5.68 0 3.14 2.56 5.7 5.7 5.7s5.7-2.55 5.7-5.7c0-3.15-2.56-5.69-5.7-5.69v.01zM7 .98c3.86 0 7 3.14 7 7s-3.14 7-7 7-7-3.12-7-7 3.14-7 7-7z"}))},label:a.createElement(p.Z,{id:"theme.admonition.note",description:"The default label used for the Note admonition (:::note)"},"note")},tip:{infimaClassName:"success",iconComponent:function(){return a.createElement("svg",{viewBox:"0 0 12 16"},a.createElement("path",{fillRule:"evenodd",d:"M6.5 0C3.48 0 1 2.19 1 5c0 .92.55 2.25 1 3 1.34 2.25 1.78 2.78 2 4v1h5v-1c.22-1.22.66-1.75 2-4 .45-.75 1-2.08 1-3 0-2.81-2.48-5-5.5-5zm3.64 7.48c-.25.44-.47.8-.67 1.11-.86 1.41-1.25 2.06-1.45 3.23-.02.05-.02.11-.02.17H5c0-.06 0-.13-.02-.17-.2-1.17-.59-1.83-1.45-3.23-.2-.31-.42-.67-.67-1.11C2.44 6.78 2 5.65 2 5c0-2.2 2.02-4 4.5-4 1.22 0 2.36.42 3.22 1.19C10.55 2.94 11 3.94 11 5c0 .66-.44 1.78-.86 2.48zM4 14h5c-.23 1.14-1.3 2-2.5 2s-2.27-.86-2.5-2z"}))},label:a.createElement(p.Z,{id:"theme.admonition.tip",description:"The default label used for the Tip admonition (:::tip)"},"tip")},danger:{infimaClassName:"danger",iconComponent:function(){return a.createElement("svg",{viewBox:"0 0 12 16"},a.createElement("path",{fillRule:"evenodd",d:"M5.05.31c.81 2.17.41 3.38-.52 4.31C3.55 5.67 1.98 6.45.9 7.98c-1.45 2.05-1.7 6.53 3.53 7.7-2.2-1.16-2.67-4.52-.3-6.61-.61 2.03.53 3.33 1.94 2.86 1.39-.47 2.3.53 2.27 1.67-.02.78-.31 1.44-1.13 1.81 3.42-.59 4.78-3.42 4.78-5.56 0-2.84-2.53-3.22-1.25-5.61-1.52.13-2.03 1.13-1.89 2.75.09 1.08-1.02 1.8-1.86 1.33-.67-.41-.66-1.19-.06-1.78C8.18 5.31 8.68 2.45 5.05.32L5.03.3l.02.01z"}))},label:a.createElement(p.Z,{id:"theme.admonition.danger",description:"The default label used for the Danger admonition (:::danger)"},"danger")},info:{infimaClassName:"info",iconComponent:function(){return a.createElement("svg",{viewBox:"0 0 14 16"},a.createElement("path",{fillRule:"evenodd",d:"M7 2.3c3.14 0 5.7 2.56 5.7 5.7s-2.56 5.7-5.7 5.7A5.71 5.71 0 0 1 1.3 8c0-3.14 2.56-5.7 5.7-5.7zM7 1C3.14 1 0 4.14 0 8s3.14 7 7 7 7-3.14 7-7-3.14-7-7-7zm1 3H6v5h2V4zm0 6H6v2h2v-2z"}))},label:a.createElement(p.Z,{id:"theme.admonition.info",description:"The default label used for the Info admonition (:::info)"},"info")},caution:{infimaClassName:"warning",iconComponent:function(){return a.createElement("svg",{viewBox:"0 0 16 16"},a.createElement("path",{fillRule:"evenodd",d:"M8.893 1.5c-.183-.31-.52-.5-.887-.5s-.703.19-.886.5L.138 13.499a.98.98 0 0 0 0 1.001c.193.31.53.501.886.501h13.964c.367 0 .704-.19.877-.5a1.03 1.03 0 0 0 .01-1.002L8.893 1.5zm.133 11.497H6.987v-2.003h2.039v2.003zm0-3.004H6.987V5.987h2.039v4.006z"}))},label:a.createElement(p.Z,{id:"theme.admonition.caution",description:"The default label used for the Caution admonition (:::caution)"},"caution")}},dt={secondary:"note",important:"info",success:"tip",warning:"danger"};function mt(e){const{mdxAdmonitionTitle:t,rest:n}=function(e){const t=a.Children.toArray(e),n=t.find((e=>a.isValidElement(e)&&"mdxAdmonitionTitle"===e.props?.mdxType)),o=a.createElement(a.Fragment,null,t.filter((e=>e!==n)));return{mdxAdmonitionTitle:n,rest:o}}(e.children);return{...e,title:e.title??t,children:n}}const ut={head:function(e){const t=a.Children.map(e.children,(e=>a.isValidElement(e)?function(e){if(e.props?.mdxType&&e.props.originalType){const{mdxType:t,originalType:n,...o}=e.props;return a.createElement(e.props.originalType,o)}return e}(e):e));return a.createElement(fe.Z,e,t)},code:function(e){const t=["a","abbr","b","br","button","cite","code","del","dfn","em","i","img","input","ins","kbd","label","object","output","q","ruby","s","small","span","strong","sub","sup","time","u","var","wbr"];return a.Children.toArray(e.children).every((e=>"string"==typeof e&&!e.includes("\n")||(0,a.isValidElement)(e)&&t.includes(e.props?.mdxType)))?a.createElement("code",e):a.createElement(Ge,e)},a:function(e){return a.createElement(f.Z,e)},pre:function(e){return a.createElement(Ge,(0,a.isValidElement)(e.children)&&"code"===e.children.props?.originalType?e.children.props:{...e})},details:function(e){const t=a.Children.toArray(e.children),n=t.find((e=>a.isValidElement(e)&&"summary"===e.props?.mdxType)),o=a.createElement(a.Fragment,null,t.filter((e=>e!==n)));return a.createElement(tt,(0,u.Z)({},e,{summary:n}),o)},ul:function(e){return a.createElement("ul",(0,u.Z)({},e,{className:(t=e.className,(0,d.Z)(t,t?.includes("contains-task-list")&&at.containsTaskList))}));var t},img:function(e){return a.createElement("img",(0,u.Z)({loading:"lazy"},e,{className:(t=e.className,(0,d.Z)(t,ot.img))}));var t},h1:e=>a.createElement(nt,(0,u.Z)({as:"h1"},e)),h2:e=>a.createElement(nt,(0,u.Z)({as:"h2"},e)),h3:e=>a.createElement(nt,(0,u.Z)({as:"h3"},e)),h4:e=>a.createElement(nt,(0,u.Z)({as:"h4"},e)),h5:e=>a.createElement(nt,(0,u.Z)({as:"h5"},e)),h6:e=>a.createElement(nt,(0,u.Z)({as:"h6"},e)),admonition:function(e){const{children:t,type:n,title:o,icon:r}=mt(e),l=function(e){const t=dt[e]??e,n=it[t];return n||(console.warn(`No admonition config found for admonition type "${t}". Using Info as fallback.`),it.info)}(n),c=o??l.label,{iconComponent:s}=l,i=r??a.createElement(s,null);return a.createElement("div",{className:(0,d.Z)(y.k.common.admonition,y.k.common.admonitionType(e.type),"alert",`alert--${l.infimaClassName}`,rt)},a.createElement("div",{className:lt},a.createElement("span",{className:ct},i),c),a.createElement("div",{className:st},t))},mermaid:()=>null};function pt(e){let{children:t}=e;return a.createElement(pe.Zo,{components:ut},t)}function ft(e){let{children:t}=e;const n=function(){const{metadata:e,frontMatter:t,contentTitle:n}=s();return t.hide_title||void 0!==n?null:e.title}();return a.createElement("div",{className:(0,d.Z)(y.k.docs.docMarkdown,"markdown")},n&&a.createElement("header",null,a.createElement(ue,{as:"h1"},n)),a.createElement(pt,null,t))}var ht=n(2802),gt=n(8596),bt=n(4996);function vt(e){return a.createElement("svg",(0,u.Z)({viewBox:"0 0 24 24"},e),a.createElement("path",{d:"M10 19v-5h4v5c0 .55.45 1 1 1h3c.55 0 1-.45 1-1v-7h1.7c.46 0 .68-.57.33-.87L12.67 3.6c-.38-.34-.96-.34-1.34 0l-8.36 7.53c-.34.3-.13.87.33.87H5v7c0 .55.45 1 1 1h3c.55 0 1-.45 1-1z",fill:"currentColor"}))}const Et={breadcrumbHomeIcon:"breadcrumbHomeIcon_YNFT"};function yt(){const e=(0,bt.Z)("/");return a.createElement("li",{className:"breadcrumbs__item"},a.createElement(f.Z,{"aria-label":(0,p.I)({id:"theme.docs.breadcrumbs.home",message:"Home page",description:"The ARIA label for the home page in the breadcrumbs"}),className:"breadcrumbs__link",href:e},a.createElement(vt,{className:Et.breadcrumbHomeIcon})))}const kt={breadcrumbsContainer:"breadcrumbsContainer_Z_bl"};function Nt(e){let{children:t,href:n,isLast:o}=e;const r="breadcrumbs__link";return o?a.createElement("span",{className:r,itemProp:"name"},t):n?a.createElement(f.Z,{className:r,href:n,itemProp:"item"},a.createElement("span",{itemProp:"name"},t)):a.createElement("span",{className:r},t)}function Ct(e){let{children:t,active:n,index:o,addMicrodata:r}=e;return a.createElement("li",(0,u.Z)({},r&&{itemScope:!0,itemProp:"itemListElement",itemType:"https://schema.org/ListItem"},{className:(0,d.Z)("breadcrumbs__item",{"breadcrumbs__item--active":n})}),t,a.createElement("meta",{itemProp:"position",content:String(o+1)}))}function Lt(){const e=(0,ht.s1)(),t=(0,gt.Ns)();return e?a.createElement("nav",{className:(0,d.Z)(y.k.docs.docBreadcrumbs,kt.breadcrumbsContainer),"aria-label":(0,p.I)({id:"theme.docs.breadcrumbs.navAriaLabel",message:"Breadcrumbs",description:"The ARIA label for the breadcrumbs"})},a.createElement("ul",{className:"breadcrumbs",itemScope:!0,itemType:"https://schema.org/BreadcrumbList"},t&&a.createElement(yt,null),e.map(((t,n)=>{const o=n===e.length-1;return a.createElement(Ct,{key:n,active:o,index:n,addMicrodata:!!t.href},a.createElement(Nt,{href:t.href,isLast:o},t.label))})))):null}const Tt={docItemContainer:"docItemContainer_Djhp",docItemCol:"docItemCol_VOVn"};function wt(e){let{children:t}=e;const n=function(){const{frontMatter:e,toc:t}=s(),n=(0,m.i)(),o=e.hide_table_of_contents,r=!o&&t.length>0;return{hidden:o,mobile:r?a.createElement(re,null):void 0,desktop:!r||"desktop"!==n&&"ssr"!==n?void 0:a.createElement(de,null)}}();return a.createElement("div",{className:"row"},a.createElement("div",{className:(0,d.Z)("col",!n.hidden&&Tt.docItemCol)},a.createElement(_,null),a.createElement("div",{className:Tt.docItemContainer},a.createElement("article",null,a.createElement(Lt,null),a.createElement(x,null),n.mobile,a.createElement(ft,null,t),a.createElement(D,null)),a.createElement(b,null))),n.desktop&&a.createElement("div",{className:"col col--3"},n.desktop))}function _t(e){const t=`docs-doc-id-${e.content.metadata.unversionedId}`,n=e.content;return a.createElement(c,{content:e.content},a.createElement(o.FG,{className:t},a.createElement(i,null),a.createElement(wt,null,a.createElement(n,null))))}},4477:(e,t,n)=>{"use strict";n.d(t,{E:()=>c,q:()=>l});var a=n(7294),o=n(902);const r=a.createContext(null);function l(e){let{children:t,version:n}=e;return a.createElement(r.Provider,{value:n},t)}function c(){const e=(0,a.useContext)(r);if(null===e)throw new o.i6("DocsVersionProvider");return e}},7594:(e,t)=>{function n(e){let t,n=[];for(let a of e.split(",").map((e=>e.trim())))if(/^-?\d+$/.test(a))n.push(parseInt(a,10));else if(t=a.match(/^(-?\d+)(-|\.\.\.?|\u2025|\u2026|\u22EF)(-?\d+)$/)){let[e,a,o,r]=t;if(a&&r){a=parseInt(a),r=parseInt(r);const e=a<r?1:-1;"-"!==o&&".."!==o&&"\u2025"!==o||(r+=e);for(let t=a;t!==r;t+=e)n.push(t)}}return n}t.default=n,e.exports=n}}]); \ No newline at end of file diff --git a/assets/js/1957b43a.daf8773c.js b/assets/js/1957b43a.daf8773c.js new file mode 100644 index 0000000..5f6194a --- /dev/null +++ b/assets/js/1957b43a.daf8773c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5684],{3905:(t,a,e)=>{e.d(a,{Zo:()=>u,kt:()=>c});var i=e(7294);function r(t,a,e){return a in t?Object.defineProperty(t,a,{value:e,enumerable:!0,configurable:!0,writable:!0}):t[a]=e,t}function n(t,a){var e=Object.keys(t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(t);a&&(i=i.filter((function(a){return Object.getOwnPropertyDescriptor(t,a).enumerable}))),e.push.apply(e,i)}return e}function s(t){for(var a=1;a<arguments.length;a++){var e=null!=arguments[a]?arguments[a]:{};a%2?n(Object(e),!0).forEach((function(a){r(t,a,e[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(e)):n(Object(e)).forEach((function(a){Object.defineProperty(t,a,Object.getOwnPropertyDescriptor(e,a))}))}return t}function p(t,a){if(null==t)return{};var e,i,r=function(t,a){if(null==t)return{};var e,i,r={},n=Object.keys(t);for(i=0;i<n.length;i++)e=n[i],a.indexOf(e)>=0||(r[e]=t[e]);return r}(t,a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);for(i=0;i<n.length;i++)e=n[i],a.indexOf(e)>=0||Object.prototype.propertyIsEnumerable.call(t,e)&&(r[e]=t[e])}return r}var l=i.createContext({}),k=function(t){var a=i.useContext(l),e=a;return t&&(e="function"==typeof t?t(a):s(s({},a),t)),e},u=function(t){var a=k(t.components);return i.createElement(l.Provider,{value:a},t.children)},m="mdxType",o={inlineCode:"code",wrapper:function(t){var a=t.children;return i.createElement(i.Fragment,{},a)}},h=i.forwardRef((function(t,a){var e=t.components,r=t.mdxType,n=t.originalType,l=t.parentName,u=p(t,["components","mdxType","originalType","parentName"]),m=k(e),h=r,c=m["".concat(l,".").concat(h)]||m[h]||o[h]||n;return e?i.createElement(c,s(s({ref:a},u),{},{components:e})):i.createElement(c,s({ref:a},u))}));function c(t,a){var e=arguments,r=a&&a.mdxType;if("string"==typeof t||r){var n=e.length,s=new Array(n);s[0]=h;var p={};for(var l in a)hasOwnProperty.call(a,l)&&(p[l]=a[l]);p.originalType=t,p[m]="string"==typeof t?t:r,s[1]=p;for(var k=2;k<n;k++)s[k]=e[k];return i.createElement.apply(null,s)}return i.createElement.apply(null,e)}h.displayName="MDXCreateElement"},6494:(t,a,e)=>{e.r(a),e.d(a,{assets:()=>l,contentTitle:()=>s,default:()=>o,frontMatter:()=>n,metadata:()=>p,toc:()=>k});var i=e(7462),r=(e(7294),e(3905));const n={},s="Release notes",p={unversionedId:"CHANGELOG",id:"version-0.7.0/CHANGELOG",title:"Release notes",description:"0.7.0",source:"@site/versioned_docs/version-0.7.0/CHANGELOG.md",sourceDirName:".",slug:"/CHANGELOG",permalink:"/docs/0.7.0/CHANGELOG",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Contributing to fastkafka",permalink:"/docs/0.7.0/CONTRIBUTING"}},l={},k=[{value:"0.7.0",id:"070",level:2},{value:"New Features",id:"new-features",level:3},{value:"Bugs Squashed",id:"bugs-squashed",level:3},{value:"0.6.0",id:"060",level:2},{value:"New Features",id:"new-features-1",level:3},{value:"Bugs Squashed",id:"bugs-squashed-1",level:3},{value:"0.5.0",id:"050",level:2},{value:"New Features",id:"new-features-2",level:3},{value:"Bugs Squashed",id:"bugs-squashed-2",level:3},{value:"0.4.0",id:"040",level:2},{value:"New Features",id:"new-features-3",level:3},{value:"0.3.1",id:"031",level:2},{value:"0.3.0",id:"030",level:2},{value:"New Features",id:"new-features-4",level:3},{value:"Bugs Squashed",id:"bugs-squashed-3",level:3},{value:"0.2.3",id:"023",level:2},{value:"0.2.2",id:"022",level:2},{value:"New Features",id:"new-features-5",level:3},{value:"Bugs Squashed",id:"bugs-squashed-4",level:3},{value:"0.2.0",id:"020",level:2},{value:"New Features",id:"new-features-6",level:3},{value:"Bugs Squashed",id:"bugs-squashed-5",level:3},{value:"0.1.3",id:"013",level:2},{value:"0.1.2",id:"012",level:2},{value:"New Features",id:"new-features-7",level:3},{value:"Bugs Squashed",id:"bugs-squashed-6",level:3},{value:"0.1.1",id:"011",level:2},{value:"Bugs Squashed",id:"bugs-squashed-7",level:3},{value:"0.1.0",id:"010",level:2}],u={toc:k},m="wrapper";function o(t){let{components:a,...e}=t;return(0,r.kt)(m,(0,i.Z)({},u,e,{components:a,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"release-notes"},"Release notes"),(0,r.kt)("h2",{id:"070"},"0.7.0"),(0,r.kt)("h3",{id:"new-features"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Optional description argument to consumes and produces decorator implemented (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/338"},"#338"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Consumes and produces decorators now have optional ",(0,r.kt)("inlineCode",{parentName:"li"},"description")," argument that is used instead of function docstring in async doc generation when specified"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"FastKafka Windows OS support enabled (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/326"},"#326"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"FastKafka can now run on Windows"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"FastKafka and FastAPI integration implemented (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/304"},"#304"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"FastKafka can now be run alongside FastAPI"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Batch consuming option to consumers implemented (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/298"},"#298"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Consumers can consume events in batches by specifying msg type of consuming function as ",(0,r.kt)("inlineCode",{parentName:"li"},"List[YourMsgType]")," "))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Removed support for synchronous produce functions (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/295"},"#295"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Added default broker values and update docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/292"},"#292"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("h3",{id:"bugs-squashed"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix index.ipynb to be runnable in colab (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/342"},"#342"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Use cli option root_path docs generate and serve CLI commands (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/341"},"#341"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix incorrect asyncapi docs path on fastkafka docs serve command (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/335"},"#335"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Serve docs now takes app ",(0,r.kt)("inlineCode",{parentName:"li"},"root_path")," argument into consideration when specified in app"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix typo (supress_timestamps->suppress_timestamps) and remove fix for enabling timestamps (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/315"},"#315"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix logs printing timestamps (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/308"},"#308"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix topics with dots causing failure of tester instantiation (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/306"},"#306"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},'Specified topics can now have "." in their names')))),(0,r.kt)("h2",{id:"060"},"0.6.0"),(0,r.kt)("h3",{id:"new-features-1"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Timestamps added to CLI commands (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/283"},"#283"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/davorrunje"},"@davorrunje"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Added option to process messages concurrently (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/278"},"#278"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"A new ",(0,r.kt)("inlineCode",{parentName:"li"},"executor")," option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add consumes and produces functions to app (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/274"},"#274"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add batching for producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/273"},"#273"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirement(batch): batch support is a real need! and i see it on the issue list.... so hope we do not need to wait too long"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("a",{parentName:"p",href:"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"},"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix broken links in guides (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/272"},"#272"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Generate the docusaurus sidebar dynamically by parsing summary.md (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/270"},"#270"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Metadata passed to consumer (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/269"},"#269"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirement(key): read the key value somehow..Maybe I missed something in the docs\nrequirement(header): read header values, Reason: I use CDC | Debezium and in the current system the header values are important to differentiate between the CRUD operations."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("a",{parentName:"p",href:"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"},"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Contribution with instructions how to build and test added (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/255"},"#255"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Export encoders, decoders from fastkafka.encoder (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/246"},"#246"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/239"},"#239"),")")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"UI Improvement: Post screenshots with links to the actual messages in testimonials section (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/228"},"#228"),")")),(0,r.kt)("h3",{id:"bugs-squashed-1"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Batch testing fix (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/280"},"#280"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Tester breaks when using Batching or KafkaEvent producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/279"},"#279"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Consumer loop callbacks are not executing in parallel (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/276"},"#276"),")"))),(0,r.kt)("h2",{id:"050"},"0.5.0"),(0,r.kt)("h3",{id:"new-features-2"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Significant speedup of Kafka producer (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/236"},"#236"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Added support for AVRO encoding/decoding (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/231"},"#231"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("h3",{id:"bugs-squashed-2"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fixed sidebar to include guides in docusaurus documentation (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/238"},"#238"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fixed link to symbols in docusaurus docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/227"},"#227"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Removed bootstrap servers from constructor (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/220"},"#220"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl")))),(0,r.kt)("h2",{id:"040"},"0.4.0"),(0,r.kt)("h3",{id:"new-features-3"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Integrate fastkafka chat (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/208"},"#208"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add benchmarking (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/206"},"#206"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Enable fast testing without running kafka locally (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/198"},"#198"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Generate docs using Docusaurus (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/194"},"#194"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add test cases for LocalRedpandaBroker (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/189"},"#189"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Reimplement patch and delegates from fastcore (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/188"},"#188"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Rename existing functions into start and stop and add lifespan handler (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/117"},"#117"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios"},"https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios"))))),(0,r.kt)("h2",{id:"031"},"0.3.1"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"README.md file updated")),(0,r.kt)("h2",{id:"030"},"0.3.0"),(0,r.kt)("h3",{id:"new-features-4"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Guide for fastkafka produces using partition key (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/172"},"#172"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Closes #161"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add support for Redpanda for testing and deployment (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/181"},"#181"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Remove bootstrap_servers from ",(0,r.kt)("strong",{parentName:"p"},"init")," and use the name of broker as an option when running/testing (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/134"},"#134"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add a GH action file to check for broken links in the docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/163"},"#163"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Optimize requirements for testing and docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/151"},"#151"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Break requirements into base and optional for testing and dev (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/124"},"#124"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Minimize base requirements needed just for running the service."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add link to example git repo into guide for building docs using actions (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/81"},"#81"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add logging for run_in_background (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/46"},"#46"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Implement partition Key mechanism for producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/16"},"#16"),")"))),(0,r.kt)("h3",{id:"bugs-squashed-3"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Implement checks for npm installation and version (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/176"},"#176"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Closes #158 by checking if the npx is installed and more verbose error handling"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix the helper.py link in CHANGELOG.md (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/165"},"#165"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"fastkafka docs install_deps fails (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/157"},"#157"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Unexpected internal error: ","[Errno 2]"," No such file or directory: 'npx'"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Broken links in docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/141"},"#141"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"fastkafka run is not showing up in CLI docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/132"},"#132"),")"))),(0,r.kt)("h2",{id:"023"},"0.2.3"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Fixed broken links on PyPi index page")),(0,r.kt)("h2",{id:"022"},"0.2.2"),(0,r.kt)("h3",{id:"new-features-5"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Extract JDK and Kafka installation out of LocalKafkaBroker (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/131"},"#131"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"PyYAML version relaxed (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/119"},"#119"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/davorrunje"},"@davorrunje"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Replace docker based kafka with local (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/68"},"#68"),")"),(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","replace docker compose with a simple docker run (standard run_jupyter.sh should do)"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","replace all tests to use LocalKafkaBroker"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","update documentation")))),(0,r.kt)("h3",{id:"bugs-squashed-4"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix broken link for FastKafka docs in index notebook (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/145"},"#145"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix encoding issues when loading setup.py on windows OS (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/135"},"#135"),")"))),(0,r.kt)("h2",{id:"020"},"0.2.0"),(0,r.kt)("h3",{id:"new-features-6"},"New Features"),(0,r.kt)("ul",{className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul"},"Replace kafka container with LocalKafkaBroker (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/112"},"#112"),")",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Replace kafka container with LocalKafkaBroker in tests"))))),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Remove kafka container from tests environment"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Fix failing tests")),(0,r.kt)("h3",{id:"bugs-squashed-5"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Fix random failing in CI (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/109"},"#109"),")")),(0,r.kt)("h2",{id:"013"},"0.1.3"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"version update in ",(0,r.kt)("strong",{parentName:"li"},"init"),".py")),(0,r.kt)("h2",{id:"012"},"0.1.2"),(0,r.kt)("h3",{id:"new-features-7"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Git workflow action for publishing Kafka docs (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/78"},"#78"),")")),(0,r.kt)("h3",{id:"bugs-squashed-6"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Include missing requirement (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/110"},"#110"),")",(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Typer is imported in this ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/helpers.py"},"file")," but it is not included in ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/settings.ini"},"settings.ini")),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add aiohttp which is imported in this ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_helpers.py"},"file")),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add nbformat which is imported in _components/helpers.py"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add nbconvert which is imported in _components/helpers.py")))),(0,r.kt)("h2",{id:"011"},"0.1.1"),(0,r.kt)("h3",{id:"bugs-squashed-7"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"JDK install fails on Python 3.8 (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/106"},"#106"),")")),(0,r.kt)("h2",{id:"010"},"0.1.0"),(0,r.kt)("p",null,"Initial release"))}o.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/196c63a7.f818c35d.js b/assets/js/196c63a7.f818c35d.js new file mode 100644 index 0000000..44df423 --- /dev/null +++ b/assets/js/196c63a7.f818c35d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4404],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>k});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){a(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function c(e,t){if(null==e)return{};var n,r,a=function(e,t){if(null==e)return{};var n,r,a={},o=Object.keys(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=r.createContext({}),s=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=s(e.components);return r.createElement(i.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,i=e.parentName,p=c(e,["components","mdxType","originalType","parentName"]),d=s(n),f=a,k=d["".concat(i,".").concat(f)]||d[f]||u[f]||o;return n?r.createElement(k,l(l({ref:t},p),{},{components:n})):r.createElement(k,l({ref:t},p))}));function k(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,l=new Array(o);l[0]=f;var c={};for(var i in t)hasOwnProperty.call(t,i)&&(c[i]=t[i]);c.originalType=e,c[d]="string"==typeof e?e:a,l[1]=c;for(var s=2;s<o;s++)l[s]=n[s];return r.createElement.apply(null,l)}return r.createElement.apply(null,n)}f.displayName="MDXCreateElement"},6869:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>u,frontMatter:()=>o,metadata:()=>c,toc:()=>s});var r=n(7462),a=(n(7294),n(3905));const o={},l=void 0,c={unversionedId:"api/fastkafka/encoder/json_encoder",id:"version-0.8.0/api/fastkafka/encoder/json_encoder",title:"json_encoder",description:"jsonencoder {fastkafka.encoder.jsonencoder}",source:"@site/versioned_docs/version-0.8.0/api/fastkafka/encoder/json_encoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/json_encoder",permalink:"/docs/api/fastkafka/encoder/json_encoder",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"json_decoder",permalink:"/docs/api/fastkafka/encoder/json_decoder"},next:{title:"DynamicTaskExecutor",permalink:"/docs/api/fastkafka/executors/DynamicTaskExecutor"}},i={},s=[{value:"json_encoder",id:"fastkafka.encoder.json_encoder",level:3}],p={toc:s},d="wrapper";function u(e){let{components:t,...n}=e;return(0,a.kt)(d,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h3",{id:"fastkafka.encoder.json_encoder"},"json_encoder"),(0,a.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/json.py#L28-L38",class:"link-to-source",target:"_blank"},"View source"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-py"},"json_encoder(\n msg\n)\n")),(0,a.kt)("p",null,"Encoder to encode pydantic instances to json string"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Name"),(0,a.kt)("th",{parentName:"tr",align:null},"Type"),(0,a.kt)("th",{parentName:"tr",align:null},"Description"),(0,a.kt)("th",{parentName:"tr",align:null},"Default"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("inlineCode",{parentName:"td"},"msg")),(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("inlineCode",{parentName:"td"},"BaseModel")),(0,a.kt)("td",{parentName:"tr",align:null},"An instance of pydantic basemodel"),(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("em",{parentName:"td"},"required"))))),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Returns"),":"),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Type"),(0,a.kt)("th",{parentName:"tr",align:null},"Description"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("inlineCode",{parentName:"td"},"bytes")),(0,a.kt)("td",{parentName:"tr",align:null},"Json string in bytes which is encoded from pydantic basemodel")))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/1a4e3797.a52196f1.js b/assets/js/1a4e3797.a52196f1.js new file mode 100644 index 0000000..793602a --- /dev/null +++ b/assets/js/1a4e3797.a52196f1.js @@ -0,0 +1,2 @@ +/*! For license information please see 1a4e3797.a52196f1.js.LICENSE.txt */ +(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7920],{7331:e=>{function t(){this._events=this._events||{},this._maxListeners=this._maxListeners||void 0}function r(e){return"function"==typeof e}function n(e){return"object"==typeof e&&null!==e}function i(e){return void 0===e}e.exports=t,t.prototype._events=void 0,t.prototype._maxListeners=void 0,t.defaultMaxListeners=10,t.prototype.setMaxListeners=function(e){if("number"!=typeof e||e<0||isNaN(e))throw TypeError("n must be a positive number");return this._maxListeners=e,this},t.prototype.emit=function(e){var t,a,s,c,u,o;if(this._events||(this._events={}),"error"===e&&(!this._events.error||n(this._events.error)&&!this._events.error.length)){if((t=arguments[1])instanceof Error)throw t;var h=new Error('Uncaught, unspecified "error" event. ('+t+")");throw h.context=t,h}if(i(a=this._events[e]))return!1;if(r(a))switch(arguments.length){case 1:a.call(this);break;case 2:a.call(this,arguments[1]);break;case 3:a.call(this,arguments[1],arguments[2]);break;default:c=Array.prototype.slice.call(arguments,1),a.apply(this,c)}else if(n(a))for(c=Array.prototype.slice.call(arguments,1),s=(o=a.slice()).length,u=0;u<s;u++)o[u].apply(this,c);return!0},t.prototype.addListener=function(e,a){var s;if(!r(a))throw TypeError("listener must be a function");return this._events||(this._events={}),this._events.newListener&&this.emit("newListener",e,r(a.listener)?a.listener:a),this._events[e]?n(this._events[e])?this._events[e].push(a):this._events[e]=[this._events[e],a]:this._events[e]=a,n(this._events[e])&&!this._events[e].warned&&(s=i(this._maxListeners)?t.defaultMaxListeners:this._maxListeners)&&s>0&&this._events[e].length>s&&(this._events[e].warned=!0,console.error("(node) warning: possible EventEmitter memory leak detected. %d listeners added. Use emitter.setMaxListeners() to increase limit.",this._events[e].length),"function"==typeof console.trace&&console.trace()),this},t.prototype.on=t.prototype.addListener,t.prototype.once=function(e,t){if(!r(t))throw TypeError("listener must be a function");var n=!1;function i(){this.removeListener(e,i),n||(n=!0,t.apply(this,arguments))}return i.listener=t,this.on(e,i),this},t.prototype.removeListener=function(e,t){var i,a,s,c;if(!r(t))throw TypeError("listener must be a function");if(!this._events||!this._events[e])return this;if(s=(i=this._events[e]).length,a=-1,i===t||r(i.listener)&&i.listener===t)delete this._events[e],this._events.removeListener&&this.emit("removeListener",e,t);else if(n(i)){for(c=s;c-- >0;)if(i[c]===t||i[c].listener&&i[c].listener===t){a=c;break}if(a<0)return this;1===i.length?(i.length=0,delete this._events[e]):i.splice(a,1),this._events.removeListener&&this.emit("removeListener",e,t)}return this},t.prototype.removeAllListeners=function(e){var t,n;if(!this._events)return this;if(!this._events.removeListener)return 0===arguments.length?this._events={}:this._events[e]&&delete this._events[e],this;if(0===arguments.length){for(t in this._events)"removeListener"!==t&&this.removeAllListeners(t);return this.removeAllListeners("removeListener"),this._events={},this}if(r(n=this._events[e]))this.removeListener(e,n);else if(n)for(;n.length;)this.removeListener(e,n[n.length-1]);return delete this._events[e],this},t.prototype.listeners=function(e){return this._events&&this._events[e]?r(this._events[e])?[this._events[e]]:this._events[e].slice():[]},t.prototype.listenerCount=function(e){if(this._events){var t=this._events[e];if(r(t))return 1;if(t)return t.length}return 0},t.listenerCount=function(e,t){return e.listenerCount(t)}},4766:(e,t,r)=>{"use strict";var n=r(9374),i=r(7775),a=r(3076);function s(e,t,r){return new n(e,t,r)}s.version=r(5474),s.AlgoliaSearchHelper=n,s.SearchParameters=i,s.SearchResults=a,e.exports=s},8078:(e,t,r)=>{"use strict";var n=r(7331);function i(e,t){this.main=e,this.fn=t,this.lastResults=null}r(4853)(i,n),i.prototype.detach=function(){this.removeAllListeners(),this.main.detachDerivedHelper(this)},i.prototype.getModifiedState=function(e){return this.fn(e)},e.exports=i},2437:(e,t,r)=>{"use strict";var n=r(2344),i=r(9803),a=r(116),s={addRefinement:function(e,t,r){if(s.isRefined(e,t,r))return e;var i=""+r,a=e[t]?e[t].concat(i):[i],c={};return c[t]=a,n({},c,e)},removeRefinement:function(e,t,r){if(void 0===r)return s.clearRefinement(e,(function(e,r){return t===r}));var n=""+r;return s.clearRefinement(e,(function(e,r){return t===r&&n===e}))},toggleRefinement:function(e,t,r){if(void 0===r)throw new Error("toggleRefinement should be used with a value");return s.isRefined(e,t,r)?s.removeRefinement(e,t,r):s.addRefinement(e,t,r)},clearRefinement:function(e,t,r){if(void 0===t)return a(e)?{}:e;if("string"==typeof t)return i(e,[t]);if("function"==typeof t){var n=!1,s=Object.keys(e).reduce((function(i,a){var s=e[a]||[],c=s.filter((function(e){return!t(e,a,r)}));return c.length!==s.length&&(n=!0),i[a]=c,i}),{});return n?s:e}},isRefined:function(e,t,r){var n=!!e[t]&&e[t].length>0;if(void 0===r||!n)return n;var i=""+r;return-1!==e[t].indexOf(i)}};e.exports=s},7775:(e,t,r)=>{"use strict";var n=r(185),i=r(2344),a=r(2686),s=r(7888),c=r(8023),u=r(9803),o=r(116),h=r(6801),f=r(2437);function l(e,t){return Array.isArray(e)&&Array.isArray(t)?e.length===t.length&&e.every((function(e,r){return l(t[r],e)})):e===t}function m(e){var t=e?m._parseNumbers(e):{};void 0===t.userToken||h(t.userToken)||console.warn("[algoliasearch-helper] The `userToken` parameter is invalid. This can lead to wrong analytics.\n - Format: [a-zA-Z0-9_-]{1,64}"),this.facets=t.facets||[],this.disjunctiveFacets=t.disjunctiveFacets||[],this.hierarchicalFacets=t.hierarchicalFacets||[],this.facetsRefinements=t.facetsRefinements||{},this.facetsExcludes=t.facetsExcludes||{},this.disjunctiveFacetsRefinements=t.disjunctiveFacetsRefinements||{},this.numericRefinements=t.numericRefinements||{},this.tagRefinements=t.tagRefinements||[],this.hierarchicalFacetsRefinements=t.hierarchicalFacetsRefinements||{};var r=this;Object.keys(t).forEach((function(e){var n=-1!==m.PARAMETERS.indexOf(e),i=void 0!==t[e];!n&&i&&(r[e]=t[e])}))}m.PARAMETERS=Object.keys(new m),m._parseNumbers=function(e){if(e instanceof m)return e;var t={};if(["aroundPrecision","aroundRadius","getRankingInfo","minWordSizefor2Typos","minWordSizefor1Typo","page","maxValuesPerFacet","distinct","minimumAroundRadius","hitsPerPage","minProximity"].forEach((function(r){var n=e[r];if("string"==typeof n){var i=parseFloat(n);t[r]=isNaN(i)?n:i}})),Array.isArray(e.insideBoundingBox)&&(t.insideBoundingBox=e.insideBoundingBox.map((function(e){return Array.isArray(e)?e.map((function(e){return parseFloat(e)})):e}))),e.numericRefinements){var r={};Object.keys(e.numericRefinements).forEach((function(t){var n=e.numericRefinements[t]||{};r[t]={},Object.keys(n).forEach((function(e){var i=n[e].map((function(e){return Array.isArray(e)?e.map((function(e){return"string"==typeof e?parseFloat(e):e})):"string"==typeof e?parseFloat(e):e}));r[t][e]=i}))})),t.numericRefinements=r}return n({},e,t)},m.make=function(e){var t=new m(e);return(e.hierarchicalFacets||[]).forEach((function(e){if(e.rootPath){var r=t.getHierarchicalRefinement(e.name);r.length>0&&0!==r[0].indexOf(e.rootPath)&&(t=t.clearRefinements(e.name)),0===(r=t.getHierarchicalRefinement(e.name)).length&&(t=t.toggleHierarchicalFacetRefinement(e.name,e.rootPath))}})),t},m.validate=function(e,t){var r=t||{};return e.tagFilters&&r.tagRefinements&&r.tagRefinements.length>0?new Error("[Tags] Cannot switch from the managed tag API to the advanced API. It is probably an error, if it is really what you want, you should first clear the tags with clearTags method."):e.tagRefinements.length>0&&r.tagFilters?new Error("[Tags] Cannot switch from the advanced tag API to the managed API. It is probably an error, if it is not, you should first clear the tags with clearTags method."):e.numericFilters&&r.numericRefinements&&o(r.numericRefinements)?new Error("[Numeric filters] Can't switch from the advanced to the managed API. It is probably an error, if this is really what you want, you have to first clear the numeric filters."):o(e.numericRefinements)&&r.numericFilters?new Error("[Numeric filters] Can't switch from the managed API to the advanced. It is probably an error, if this is really what you want, you have to first clear the numeric filters."):null},m.prototype={constructor:m,clearRefinements:function(e){var t={numericRefinements:this._clearNumericRefinements(e),facetsRefinements:f.clearRefinement(this.facetsRefinements,e,"conjunctiveFacet"),facetsExcludes:f.clearRefinement(this.facetsExcludes,e,"exclude"),disjunctiveFacetsRefinements:f.clearRefinement(this.disjunctiveFacetsRefinements,e,"disjunctiveFacet"),hierarchicalFacetsRefinements:f.clearRefinement(this.hierarchicalFacetsRefinements,e,"hierarchicalFacet")};return t.numericRefinements===this.numericRefinements&&t.facetsRefinements===this.facetsRefinements&&t.facetsExcludes===this.facetsExcludes&&t.disjunctiveFacetsRefinements===this.disjunctiveFacetsRefinements&&t.hierarchicalFacetsRefinements===this.hierarchicalFacetsRefinements?this:this.setQueryParameters(t)},clearTags:function(){return void 0===this.tagFilters&&0===this.tagRefinements.length?this:this.setQueryParameters({tagFilters:void 0,tagRefinements:[]})},setIndex:function(e){return e===this.index?this:this.setQueryParameters({index:e})},setQuery:function(e){return e===this.query?this:this.setQueryParameters({query:e})},setPage:function(e){return e===this.page?this:this.setQueryParameters({page:e})},setFacets:function(e){return this.setQueryParameters({facets:e})},setDisjunctiveFacets:function(e){return this.setQueryParameters({disjunctiveFacets:e})},setHitsPerPage:function(e){return this.hitsPerPage===e?this:this.setQueryParameters({hitsPerPage:e})},setTypoTolerance:function(e){return this.typoTolerance===e?this:this.setQueryParameters({typoTolerance:e})},addNumericRefinement:function(e,t,r){var i=c(r);if(this.isNumericRefined(e,t,i))return this;var a=n({},this.numericRefinements);return a[e]=n({},a[e]),a[e][t]?(a[e][t]=a[e][t].slice(),a[e][t].push(i)):a[e][t]=[i],this.setQueryParameters({numericRefinements:a})},getConjunctiveRefinements:function(e){return this.isConjunctiveFacet(e)&&this.facetsRefinements[e]||[]},getDisjunctiveRefinements:function(e){return this.isDisjunctiveFacet(e)&&this.disjunctiveFacetsRefinements[e]||[]},getHierarchicalRefinement:function(e){return this.hierarchicalFacetsRefinements[e]||[]},getExcludeRefinements:function(e){return this.isConjunctiveFacet(e)&&this.facetsExcludes[e]||[]},removeNumericRefinement:function(e,t,r){return void 0!==r?this.isNumericRefined(e,t,r)?this.setQueryParameters({numericRefinements:this._clearNumericRefinements((function(n,i){return i===e&&n.op===t&&l(n.val,c(r))}))}):this:void 0!==t?this.isNumericRefined(e,t)?this.setQueryParameters({numericRefinements:this._clearNumericRefinements((function(r,n){return n===e&&r.op===t}))}):this:this.isNumericRefined(e)?this.setQueryParameters({numericRefinements:this._clearNumericRefinements((function(t,r){return r===e}))}):this},getNumericRefinements:function(e){return this.numericRefinements[e]||{}},getNumericRefinement:function(e,t){return this.numericRefinements[e]&&this.numericRefinements[e][t]},_clearNumericRefinements:function(e){if(void 0===e)return o(this.numericRefinements)?{}:this.numericRefinements;if("string"==typeof e)return u(this.numericRefinements,[e]);if("function"==typeof e){var t=!1,r=this.numericRefinements,n=Object.keys(r).reduce((function(n,i){var a=r[i],s={};return a=a||{},Object.keys(a).forEach((function(r){var n=a[r]||[],c=[];n.forEach((function(t){e({val:t,op:r},i,"numeric")||c.push(t)})),c.length!==n.length&&(t=!0),s[r]=c})),n[i]=s,n}),{});return t?n:this.numericRefinements}},addFacet:function(e){return this.isConjunctiveFacet(e)?this:this.setQueryParameters({facets:this.facets.concat([e])})},addDisjunctiveFacet:function(e){return this.isDisjunctiveFacet(e)?this:this.setQueryParameters({disjunctiveFacets:this.disjunctiveFacets.concat([e])})},addHierarchicalFacet:function(e){if(this.isHierarchicalFacet(e.name))throw new Error("Cannot declare two hierarchical facets with the same name: `"+e.name+"`");return this.setQueryParameters({hierarchicalFacets:this.hierarchicalFacets.concat([e])})},addFacetRefinement:function(e,t){if(!this.isConjunctiveFacet(e))throw new Error(e+" is not defined in the facets attribute of the helper configuration");return f.isRefined(this.facetsRefinements,e,t)?this:this.setQueryParameters({facetsRefinements:f.addRefinement(this.facetsRefinements,e,t)})},addExcludeRefinement:function(e,t){if(!this.isConjunctiveFacet(e))throw new Error(e+" is not defined in the facets attribute of the helper configuration");return f.isRefined(this.facetsExcludes,e,t)?this:this.setQueryParameters({facetsExcludes:f.addRefinement(this.facetsExcludes,e,t)})},addDisjunctiveFacetRefinement:function(e,t){if(!this.isDisjunctiveFacet(e))throw new Error(e+" is not defined in the disjunctiveFacets attribute of the helper configuration");return f.isRefined(this.disjunctiveFacetsRefinements,e,t)?this:this.setQueryParameters({disjunctiveFacetsRefinements:f.addRefinement(this.disjunctiveFacetsRefinements,e,t)})},addTagRefinement:function(e){if(this.isTagRefined(e))return this;var t={tagRefinements:this.tagRefinements.concat(e)};return this.setQueryParameters(t)},removeFacet:function(e){return this.isConjunctiveFacet(e)?this.clearRefinements(e).setQueryParameters({facets:this.facets.filter((function(t){return t!==e}))}):this},removeDisjunctiveFacet:function(e){return this.isDisjunctiveFacet(e)?this.clearRefinements(e).setQueryParameters({disjunctiveFacets:this.disjunctiveFacets.filter((function(t){return t!==e}))}):this},removeHierarchicalFacet:function(e){return this.isHierarchicalFacet(e)?this.clearRefinements(e).setQueryParameters({hierarchicalFacets:this.hierarchicalFacets.filter((function(t){return t.name!==e}))}):this},removeFacetRefinement:function(e,t){if(!this.isConjunctiveFacet(e))throw new Error(e+" is not defined in the facets attribute of the helper configuration");return f.isRefined(this.facetsRefinements,e,t)?this.setQueryParameters({facetsRefinements:f.removeRefinement(this.facetsRefinements,e,t)}):this},removeExcludeRefinement:function(e,t){if(!this.isConjunctiveFacet(e))throw new Error(e+" is not defined in the facets attribute of the helper configuration");return f.isRefined(this.facetsExcludes,e,t)?this.setQueryParameters({facetsExcludes:f.removeRefinement(this.facetsExcludes,e,t)}):this},removeDisjunctiveFacetRefinement:function(e,t){if(!this.isDisjunctiveFacet(e))throw new Error(e+" is not defined in the disjunctiveFacets attribute of the helper configuration");return f.isRefined(this.disjunctiveFacetsRefinements,e,t)?this.setQueryParameters({disjunctiveFacetsRefinements:f.removeRefinement(this.disjunctiveFacetsRefinements,e,t)}):this},removeTagRefinement:function(e){if(!this.isTagRefined(e))return this;var t={tagRefinements:this.tagRefinements.filter((function(t){return t!==e}))};return this.setQueryParameters(t)},toggleRefinement:function(e,t){return this.toggleFacetRefinement(e,t)},toggleFacetRefinement:function(e,t){if(this.isHierarchicalFacet(e))return this.toggleHierarchicalFacetRefinement(e,t);if(this.isConjunctiveFacet(e))return this.toggleConjunctiveFacetRefinement(e,t);if(this.isDisjunctiveFacet(e))return this.toggleDisjunctiveFacetRefinement(e,t);throw new Error("Cannot refine the undeclared facet "+e+"; it should be added to the helper options facets, disjunctiveFacets or hierarchicalFacets")},toggleConjunctiveFacetRefinement:function(e,t){if(!this.isConjunctiveFacet(e))throw new Error(e+" is not defined in the facets attribute of the helper configuration");return this.setQueryParameters({facetsRefinements:f.toggleRefinement(this.facetsRefinements,e,t)})},toggleExcludeFacetRefinement:function(e,t){if(!this.isConjunctiveFacet(e))throw new Error(e+" is not defined in the facets attribute of the helper configuration");return this.setQueryParameters({facetsExcludes:f.toggleRefinement(this.facetsExcludes,e,t)})},toggleDisjunctiveFacetRefinement:function(e,t){if(!this.isDisjunctiveFacet(e))throw new Error(e+" is not defined in the disjunctiveFacets attribute of the helper configuration");return this.setQueryParameters({disjunctiveFacetsRefinements:f.toggleRefinement(this.disjunctiveFacetsRefinements,e,t)})},toggleHierarchicalFacetRefinement:function(e,t){if(!this.isHierarchicalFacet(e))throw new Error(e+" is not defined in the hierarchicalFacets attribute of the helper configuration");var r=this._getHierarchicalFacetSeparator(this.getHierarchicalFacetByName(e)),n={};return void 0!==this.hierarchicalFacetsRefinements[e]&&this.hierarchicalFacetsRefinements[e].length>0&&(this.hierarchicalFacetsRefinements[e][0]===t||0===this.hierarchicalFacetsRefinements[e][0].indexOf(t+r))?-1===t.indexOf(r)?n[e]=[]:n[e]=[t.slice(0,t.lastIndexOf(r))]:n[e]=[t],this.setQueryParameters({hierarchicalFacetsRefinements:i({},n,this.hierarchicalFacetsRefinements)})},addHierarchicalFacetRefinement:function(e,t){if(this.isHierarchicalFacetRefined(e))throw new Error(e+" is already refined.");if(!this.isHierarchicalFacet(e))throw new Error(e+" is not defined in the hierarchicalFacets attribute of the helper configuration.");var r={};return r[e]=[t],this.setQueryParameters({hierarchicalFacetsRefinements:i({},r,this.hierarchicalFacetsRefinements)})},removeHierarchicalFacetRefinement:function(e){if(!this.isHierarchicalFacetRefined(e))return this;var t={};return t[e]=[],this.setQueryParameters({hierarchicalFacetsRefinements:i({},t,this.hierarchicalFacetsRefinements)})},toggleTagRefinement:function(e){return this.isTagRefined(e)?this.removeTagRefinement(e):this.addTagRefinement(e)},isDisjunctiveFacet:function(e){return this.disjunctiveFacets.indexOf(e)>-1},isHierarchicalFacet:function(e){return void 0!==this.getHierarchicalFacetByName(e)},isConjunctiveFacet:function(e){return this.facets.indexOf(e)>-1},isFacetRefined:function(e,t){return!!this.isConjunctiveFacet(e)&&f.isRefined(this.facetsRefinements,e,t)},isExcludeRefined:function(e,t){return!!this.isConjunctiveFacet(e)&&f.isRefined(this.facetsExcludes,e,t)},isDisjunctiveFacetRefined:function(e,t){return!!this.isDisjunctiveFacet(e)&&f.isRefined(this.disjunctiveFacetsRefinements,e,t)},isHierarchicalFacetRefined:function(e,t){if(!this.isHierarchicalFacet(e))return!1;var r=this.getHierarchicalRefinement(e);return t?-1!==r.indexOf(t):r.length>0},isNumericRefined:function(e,t,r){if(void 0===r&&void 0===t)return!!this.numericRefinements[e];var n=this.numericRefinements[e]&&void 0!==this.numericRefinements[e][t];if(void 0===r||!n)return n;var i,a,u=c(r),o=void 0!==(i=this.numericRefinements[e][t],a=u,s(i,(function(e){return l(e,a)})));return n&&o},isTagRefined:function(e){return-1!==this.tagRefinements.indexOf(e)},getRefinedDisjunctiveFacets:function(){var e=this,t=a(Object.keys(this.numericRefinements).filter((function(t){return Object.keys(e.numericRefinements[t]).length>0})),this.disjunctiveFacets);return Object.keys(this.disjunctiveFacetsRefinements).filter((function(t){return e.disjunctiveFacetsRefinements[t].length>0})).concat(t).concat(this.getRefinedHierarchicalFacets())},getRefinedHierarchicalFacets:function(){var e=this;return a(this.hierarchicalFacets.map((function(e){return e.name})),Object.keys(this.hierarchicalFacetsRefinements).filter((function(t){return e.hierarchicalFacetsRefinements[t].length>0})))},getUnrefinedDisjunctiveFacets:function(){var e=this.getRefinedDisjunctiveFacets();return this.disjunctiveFacets.filter((function(t){return-1===e.indexOf(t)}))},managedParameters:["index","facets","disjunctiveFacets","facetsRefinements","hierarchicalFacets","facetsExcludes","disjunctiveFacetsRefinements","numericRefinements","tagRefinements","hierarchicalFacetsRefinements"],getQueryParams:function(){var e=this.managedParameters,t={},r=this;return Object.keys(this).forEach((function(n){var i=r[n];-1===e.indexOf(n)&&void 0!==i&&(t[n]=i)})),t},setQueryParameter:function(e,t){if(this[e]===t)return this;var r={};return r[e]=t,this.setQueryParameters(r)},setQueryParameters:function(e){if(!e)return this;var t=m.validate(this,e);if(t)throw t;var r=this,n=m._parseNumbers(e),i=Object.keys(this).reduce((function(e,t){return e[t]=r[t],e}),{}),a=Object.keys(n).reduce((function(e,t){var r=void 0!==e[t],i=void 0!==n[t];return r&&!i?u(e,[t]):(i&&(e[t]=n[t]),e)}),i);return new this.constructor(a)},resetPage:function(){return void 0===this.page?this:this.setPage(0)},_getHierarchicalFacetSortBy:function(e){return e.sortBy||["isRefined:desc","name:asc"]},_getHierarchicalFacetSeparator:function(e){return e.separator||" > "},_getHierarchicalRootPath:function(e){return e.rootPath||null},_getHierarchicalShowParentLevel:function(e){return"boolean"!=typeof e.showParentLevel||e.showParentLevel},getHierarchicalFacetByName:function(e){return s(this.hierarchicalFacets,(function(t){return t.name===e}))},getHierarchicalFacetBreadcrumb:function(e){if(!this.isHierarchicalFacet(e))return[];var t=this.getHierarchicalRefinement(e)[0];if(!t)return[];var r=this._getHierarchicalFacetSeparator(this.getHierarchicalFacetByName(e));return t.split(r).map((function(e){return e.trim()}))},toString:function(){return JSON.stringify(this,null,2)}},e.exports=m},210:(e,t,r)=>{"use strict";e.exports=function(e){return function(t,r){var s=e.hierarchicalFacets[r],o=e.hierarchicalFacetsRefinements[s.name]&&e.hierarchicalFacetsRefinements[s.name][0]||"",h=e._getHierarchicalFacetSeparator(s),f=e._getHierarchicalRootPath(s),l=e._getHierarchicalShowParentLevel(s),m=a(e._getHierarchicalFacetSortBy(s)),d=t.every((function(e){return e.exhaustive})),p=function(e,t,r,a,s){return function(o,h,f){var l=o;if(f>0){var m=0;for(l=o;m<f;){var d=l&&Array.isArray(l.data)?l.data:[];l=i(d,(function(e){return e.isRefined})),m++}}if(l){var p=Object.keys(h.data).map((function(e){return[e,h.data[e]]})).filter((function(e){return function(e,t,r,n,i,a){if(i&&(0!==e.indexOf(i)||i===e))return!1;return!i&&-1===e.indexOf(n)||i&&e.split(n).length-i.split(n).length==1||-1===e.indexOf(n)&&-1===r.indexOf(n)||0===r.indexOf(e)||0===e.indexOf(t+n)&&(a||0===e.indexOf(r))}(e[0],l.path||r,s,t,r,a)}));l.data=n(p.map((function(e){var r=e[0];return function(e,t,r,n,i){var a=t.split(r);return{name:a[a.length-1].trim(),path:t,escapedValue:c(t),count:e,isRefined:n===t||0===n.indexOf(t+r),exhaustive:i,data:null}}(e[1],r,t,u(s),h.exhaustive)})),e[0],e[1])}return o}}(m,h,f,l,o),v=t;return f&&(v=t.slice(f.split(h).length)),v.reduce(p,{name:e.hierarchicalFacets[r].name,count:null,isRefined:!0,path:null,escapedValue:null,exhaustive:d,data:null})}};var n=r(2148),i=r(7888),a=r(2293),s=r(4039),c=s.escapeFacetValue,u=s.unescapeFacetValue},3076:(e,t,r)=>{"use strict";var n=r(185),i=r(2344),a=r(2148),s=r(4587),c=r(7888),u=r(9725),o=r(2293),h=r(4039),f=h.escapeFacetValue,l=h.unescapeFacetValue,m=r(210);function d(e){var t={};return e.forEach((function(e,r){t[e]=r})),t}function p(e,t,r){t&&t[r]&&(e.stats=t[r])}function v(e,t,r){var a=t[0];this._rawResults=t;var o=this;Object.keys(a).forEach((function(e){o[e]=a[e]})),Object.keys(r||{}).forEach((function(e){o[e]=r[e]})),this.processingTimeMS=t.reduce((function(e,t){return void 0===t.processingTimeMS?e:e+t.processingTimeMS}),0),this.disjunctiveFacets=[],this.hierarchicalFacets=e.hierarchicalFacets.map((function(){return[]})),this.facets=[];var h=e.getRefinedDisjunctiveFacets(),f=d(e.facets),v=d(e.disjunctiveFacets),g=1,y=a.facets||{};Object.keys(y).forEach((function(t){var r,n,i=y[t],s=(r=e.hierarchicalFacets,n=t,c(r,(function(e){return(e.attributes||[]).indexOf(n)>-1})));if(s){var h=s.attributes.indexOf(t),l=u(e.hierarchicalFacets,(function(e){return e.name===s.name}));o.hierarchicalFacets[l][h]={attribute:t,data:i,exhaustive:a.exhaustiveFacetsCount}}else{var m,d=-1!==e.disjunctiveFacets.indexOf(t),g=-1!==e.facets.indexOf(t);d&&(m=v[t],o.disjunctiveFacets[m]={name:t,data:i,exhaustive:a.exhaustiveFacetsCount},p(o.disjunctiveFacets[m],a.facets_stats,t)),g&&(m=f[t],o.facets[m]={name:t,data:i,exhaustive:a.exhaustiveFacetsCount},p(o.facets[m],a.facets_stats,t))}})),this.hierarchicalFacets=s(this.hierarchicalFacets),h.forEach((function(r){var s=t[g],c=s&&s.facets?s.facets:{},h=e.getHierarchicalFacetByName(r);Object.keys(c).forEach((function(t){var r,f=c[t];if(h){r=u(e.hierarchicalFacets,(function(e){return e.name===h.name}));var m=u(o.hierarchicalFacets[r],(function(e){return e.attribute===t}));if(-1===m)return;o.hierarchicalFacets[r][m].data=n({},o.hierarchicalFacets[r][m].data,f)}else{r=v[t];var d=a.facets&&a.facets[t]||{};o.disjunctiveFacets[r]={name:t,data:i({},f,d),exhaustive:s.exhaustiveFacetsCount},p(o.disjunctiveFacets[r],s.facets_stats,t),e.disjunctiveFacetsRefinements[t]&&e.disjunctiveFacetsRefinements[t].forEach((function(n){!o.disjunctiveFacets[r].data[n]&&e.disjunctiveFacetsRefinements[t].indexOf(l(n))>-1&&(o.disjunctiveFacets[r].data[n]=0)}))}})),g++})),e.getRefinedHierarchicalFacets().forEach((function(r){var n=e.getHierarchicalFacetByName(r),a=e._getHierarchicalFacetSeparator(n),s=e.getHierarchicalRefinement(r);0===s.length||s[0].split(a).length<2||t.slice(g).forEach((function(t){var r=t&&t.facets?t.facets:{};Object.keys(r).forEach((function(t){var c=r[t],h=u(e.hierarchicalFacets,(function(e){return e.name===n.name})),f=u(o.hierarchicalFacets[h],(function(e){return e.attribute===t}));if(-1!==f){var l={};if(s.length>0){var m=s[0].split(a)[0];l[m]=o.hierarchicalFacets[h][f].data[m]}o.hierarchicalFacets[h][f].data=i(l,c,o.hierarchicalFacets[h][f].data)}})),g++}))})),Object.keys(e.facetsExcludes).forEach((function(t){var r=e.facetsExcludes[t],n=f[t];o.facets[n]={name:t,data:a.facets[t],exhaustive:a.exhaustiveFacetsCount},r.forEach((function(e){o.facets[n]=o.facets[n]||{name:t},o.facets[n].data=o.facets[n].data||{},o.facets[n].data[e]=0}))})),this.hierarchicalFacets=this.hierarchicalFacets.map(m(e)),this.facets=s(this.facets),this.disjunctiveFacets=s(this.disjunctiveFacets),this._state=e}function g(e,t){function r(e){return e.name===t}if(e._state.isConjunctiveFacet(t)){var n=c(e.facets,r);return n?Object.keys(n.data).map((function(r){var i=f(r);return{name:r,escapedValue:i,count:n.data[r],isRefined:e._state.isFacetRefined(t,i),isExcluded:e._state.isExcludeRefined(t,r)}})):[]}if(e._state.isDisjunctiveFacet(t)){var i=c(e.disjunctiveFacets,r);return i?Object.keys(i.data).map((function(r){var n=f(r);return{name:r,escapedValue:n,count:i.data[r],isRefined:e._state.isDisjunctiveFacetRefined(t,n)}})):[]}if(e._state.isHierarchicalFacet(t)){var a=c(e.hierarchicalFacets,r);if(!a)return a;var s=e._state.getHierarchicalFacetByName(t),u=l(e._state.getHierarchicalRefinement(t)[0]||"").split(e._state._getHierarchicalFacetSeparator(s));return u.unshift(t),y(a,u,0),a}}function y(e,t,r){e.isRefined=e.name===t[r],e.data&&e.data.forEach((function(e){y(e,t,r+1)}))}function R(e,t,r,n){if(n=n||0,Array.isArray(t))return e(t,r[n]);if(!t.data||0===t.data.length)return t;var a=t.data.map((function(t){return R(e,t,r,n+1)})),s=e(a,r[n]);return i({data:s},t)}function F(e,t){var r=c(e,(function(e){return e.name===t}));return r&&r.stats}function b(e,t,r,n,i){var a=c(i,(function(e){return e.name===r})),s=a&&a.data&&a.data[n]?a.data[n]:0,u=a&&a.exhaustive||!1;return{type:t,attributeName:r,name:n,count:s,exhaustive:u}}v.prototype.getFacetByName=function(e){function t(t){return t.name===e}return c(this.facets,t)||c(this.disjunctiveFacets,t)||c(this.hierarchicalFacets,t)},v.DEFAULT_SORT=["isRefined:desc","count:desc","name:asc"],v.prototype.getFacetValues=function(e,t){var r=g(this,e);if(r){var n,s=i({},t,{sortBy:v.DEFAULT_SORT,facetOrdering:!(t&&t.sortBy)}),c=this;if(Array.isArray(r))n=[e];else n=c._state.getHierarchicalFacetByName(r.name).attributes;return R((function(e,t){if(s.facetOrdering){var r=function(e,t){return e.renderingContent&&e.renderingContent.facetOrdering&&e.renderingContent.facetOrdering.values&&e.renderingContent.facetOrdering.values[t]}(c,t);if(Boolean(r))return function(e,t){var r=[],n=[],i=(t.order||[]).reduce((function(e,t,r){return e[t]=r,e}),{});e.forEach((function(e){var t=e.path||e.name;void 0!==i[t]?r[i[t]]=e:n.push(e)})),r=r.filter((function(e){return e}));var s,c=t.sortRemainingBy;return"hidden"===c?r:(s="alpha"===c?[["path","name"],["asc","asc"]]:[["count"],["desc"]],r.concat(a(n,s[0],s[1])))}(e,r)}if(Array.isArray(s.sortBy)){var n=o(s.sortBy,v.DEFAULT_SORT);return a(e,n[0],n[1])}if("function"==typeof s.sortBy)return function(e,t){return t.sort(e)}(s.sortBy,e);throw new Error("options.sortBy is optional but if defined it must be either an array of string (predicates) or a sorting function")}),r,n)}},v.prototype.getFacetStats=function(e){return this._state.isConjunctiveFacet(e)?F(this.facets,e):this._state.isDisjunctiveFacet(e)?F(this.disjunctiveFacets,e):void 0},v.prototype.getRefinements=function(){var e=this._state,t=this,r=[];return Object.keys(e.facetsRefinements).forEach((function(n){e.facetsRefinements[n].forEach((function(i){r.push(b(e,"facet",n,i,t.facets))}))})),Object.keys(e.facetsExcludes).forEach((function(n){e.facetsExcludes[n].forEach((function(i){r.push(b(e,"exclude",n,i,t.facets))}))})),Object.keys(e.disjunctiveFacetsRefinements).forEach((function(n){e.disjunctiveFacetsRefinements[n].forEach((function(i){r.push(b(e,"disjunctive",n,i,t.disjunctiveFacets))}))})),Object.keys(e.hierarchicalFacetsRefinements).forEach((function(n){e.hierarchicalFacetsRefinements[n].forEach((function(i){r.push(function(e,t,r,n){var i=e.getHierarchicalFacetByName(t),a=e._getHierarchicalFacetSeparator(i),s=r.split(a),u=c(n,(function(e){return e.name===t})),o=s.reduce((function(e,t){var r=e&&c(e.data,(function(e){return e.name===t}));return void 0!==r?r:e}),u),h=o&&o.count||0,f=o&&o.exhaustive||!1,l=o&&o.path||"";return{type:"hierarchical",attributeName:t,name:l,count:h,exhaustive:f}}(e,n,i,t.hierarchicalFacets))}))})),Object.keys(e.numericRefinements).forEach((function(t){var n=e.numericRefinements[t];Object.keys(n).forEach((function(e){n[e].forEach((function(n){r.push({type:"numeric",attributeName:t,name:n,numericValue:n,operator:e})}))}))})),e.tagRefinements.forEach((function(e){r.push({type:"tag",attributeName:"_tags",name:e})})),r},e.exports=v},9374:(e,t,r)=>{"use strict";var n=r(7775),i=r(3076),a=r(8078),s=r(6394),c=r(7331),u=r(4853),o=r(116),h=r(9803),f=r(185),l=r(5474),m=r(4039).escapeFacetValue;function d(e,t,r){"function"==typeof e.addAlgoliaAgent&&e.addAlgoliaAgent("JS Helper ("+l+")"),this.setClient(e);var i=r||{};i.index=t,this.state=n.make(i),this.lastResults=null,this._queryId=0,this._lastQueryIdReceived=-1,this.derivedHelpers=[],this._currentNbQueries=0}function p(e){if(e<0)throw new Error("Page requested below 0.");return this._change({state:this.state.setPage(e),isPageReset:!1}),this}function v(){return this.state.page}u(d,c),d.prototype.search=function(){return this._search({onlyWithDerivedHelpers:!1}),this},d.prototype.searchOnlyWithDerivedHelpers=function(){return this._search({onlyWithDerivedHelpers:!0}),this},d.prototype.getQuery=function(){var e=this.state;return s._getHitsSearchParams(e)},d.prototype.searchOnce=function(e,t){var r=e?this.state.setQueryParameters(e):this.state,n=s._getQueries(r.index,r),a=this;if(this._currentNbQueries++,this.emit("searchOnce",{state:r}),!t)return this.client.search(n).then((function(e){return a._currentNbQueries--,0===a._currentNbQueries&&a.emit("searchQueueEmpty"),{content:new i(r,e.results),state:r,_originalResponse:e}}),(function(e){throw a._currentNbQueries--,0===a._currentNbQueries&&a.emit("searchQueueEmpty"),e}));this.client.search(n).then((function(e){a._currentNbQueries--,0===a._currentNbQueries&&a.emit("searchQueueEmpty"),t(null,new i(r,e.results),r)})).catch((function(e){a._currentNbQueries--,0===a._currentNbQueries&&a.emit("searchQueueEmpty"),t(e,null,r)}))},d.prototype.findAnswers=function(e){console.warn("[algoliasearch-helper] answers is no longer supported");var t=this.state,r=this.derivedHelpers[0];if(!r)return Promise.resolve([]);var n=r.getModifiedState(t),i=f({attributesForPrediction:e.attributesForPrediction,nbHits:e.nbHits},{params:h(s._getHitsSearchParams(n),["attributesToSnippet","hitsPerPage","restrictSearchableAttributes","snippetEllipsisText"])}),a="search for answers was called, but this client does not have a function client.initIndex(index).findAnswers";if("function"!=typeof this.client.initIndex)throw new Error(a);var c=this.client.initIndex(n.index);if("function"!=typeof c.findAnswers)throw new Error(a);return c.findAnswers(n.query,e.queryLanguages,i)},d.prototype.searchForFacetValues=function(e,t,r,n){var i="function"==typeof this.client.searchForFacetValues,a="function"==typeof this.client.initIndex;if(!i&&!a&&"function"!=typeof this.client.search)throw new Error("search for facet values (searchable) was called, but this client does not have a function client.searchForFacetValues or client.initIndex(index).searchForFacetValues");var c=this.state.setQueryParameters(n||{}),u=c.isDisjunctiveFacet(e),o=s.getSearchForFacetQuery(e,t,r,c);this._currentNbQueries++;var h,f=this;return i?h=this.client.searchForFacetValues([{indexName:c.index,params:o}]):a?h=this.client.initIndex(c.index).searchForFacetValues(o):(delete o.facetName,h=this.client.search([{type:"facet",facet:e,indexName:c.index,params:o}]).then((function(e){return e.results[0]}))),this.emit("searchForFacetValues",{state:c,facet:e,query:t}),h.then((function(t){return f._currentNbQueries--,0===f._currentNbQueries&&f.emit("searchQueueEmpty"),(t=Array.isArray(t)?t[0]:t).facetHits.forEach((function(t){t.escapedValue=m(t.value),t.isRefined=u?c.isDisjunctiveFacetRefined(e,t.escapedValue):c.isFacetRefined(e,t.escapedValue)})),t}),(function(e){throw f._currentNbQueries--,0===f._currentNbQueries&&f.emit("searchQueueEmpty"),e}))},d.prototype.setQuery=function(e){return this._change({state:this.state.resetPage().setQuery(e),isPageReset:!0}),this},d.prototype.clearRefinements=function(e){return this._change({state:this.state.resetPage().clearRefinements(e),isPageReset:!0}),this},d.prototype.clearTags=function(){return this._change({state:this.state.resetPage().clearTags(),isPageReset:!0}),this},d.prototype.addDisjunctiveFacetRefinement=function(e,t){return this._change({state:this.state.resetPage().addDisjunctiveFacetRefinement(e,t),isPageReset:!0}),this},d.prototype.addDisjunctiveRefine=function(){return this.addDisjunctiveFacetRefinement.apply(this,arguments)},d.prototype.addHierarchicalFacetRefinement=function(e,t){return this._change({state:this.state.resetPage().addHierarchicalFacetRefinement(e,t),isPageReset:!0}),this},d.prototype.addNumericRefinement=function(e,t,r){return this._change({state:this.state.resetPage().addNumericRefinement(e,t,r),isPageReset:!0}),this},d.prototype.addFacetRefinement=function(e,t){return this._change({state:this.state.resetPage().addFacetRefinement(e,t),isPageReset:!0}),this},d.prototype.addRefine=function(){return this.addFacetRefinement.apply(this,arguments)},d.prototype.addFacetExclusion=function(e,t){return this._change({state:this.state.resetPage().addExcludeRefinement(e,t),isPageReset:!0}),this},d.prototype.addExclude=function(){return this.addFacetExclusion.apply(this,arguments)},d.prototype.addTag=function(e){return this._change({state:this.state.resetPage().addTagRefinement(e),isPageReset:!0}),this},d.prototype.removeNumericRefinement=function(e,t,r){return this._change({state:this.state.resetPage().removeNumericRefinement(e,t,r),isPageReset:!0}),this},d.prototype.removeDisjunctiveFacetRefinement=function(e,t){return this._change({state:this.state.resetPage().removeDisjunctiveFacetRefinement(e,t),isPageReset:!0}),this},d.prototype.removeDisjunctiveRefine=function(){return this.removeDisjunctiveFacetRefinement.apply(this,arguments)},d.prototype.removeHierarchicalFacetRefinement=function(e){return this._change({state:this.state.resetPage().removeHierarchicalFacetRefinement(e),isPageReset:!0}),this},d.prototype.removeFacetRefinement=function(e,t){return this._change({state:this.state.resetPage().removeFacetRefinement(e,t),isPageReset:!0}),this},d.prototype.removeRefine=function(){return this.removeFacetRefinement.apply(this,arguments)},d.prototype.removeFacetExclusion=function(e,t){return this._change({state:this.state.resetPage().removeExcludeRefinement(e,t),isPageReset:!0}),this},d.prototype.removeExclude=function(){return this.removeFacetExclusion.apply(this,arguments)},d.prototype.removeTag=function(e){return this._change({state:this.state.resetPage().removeTagRefinement(e),isPageReset:!0}),this},d.prototype.toggleFacetExclusion=function(e,t){return this._change({state:this.state.resetPage().toggleExcludeFacetRefinement(e,t),isPageReset:!0}),this},d.prototype.toggleExclude=function(){return this.toggleFacetExclusion.apply(this,arguments)},d.prototype.toggleRefinement=function(e,t){return this.toggleFacetRefinement(e,t)},d.prototype.toggleFacetRefinement=function(e,t){return this._change({state:this.state.resetPage().toggleFacetRefinement(e,t),isPageReset:!0}),this},d.prototype.toggleRefine=function(){return this.toggleFacetRefinement.apply(this,arguments)},d.prototype.toggleTag=function(e){return this._change({state:this.state.resetPage().toggleTagRefinement(e),isPageReset:!0}),this},d.prototype.nextPage=function(){var e=this.state.page||0;return this.setPage(e+1)},d.prototype.previousPage=function(){var e=this.state.page||0;return this.setPage(e-1)},d.prototype.setCurrentPage=p,d.prototype.setPage=p,d.prototype.setIndex=function(e){return this._change({state:this.state.resetPage().setIndex(e),isPageReset:!0}),this},d.prototype.setQueryParameter=function(e,t){return this._change({state:this.state.resetPage().setQueryParameter(e,t),isPageReset:!0}),this},d.prototype.setState=function(e){return this._change({state:n.make(e),isPageReset:!1}),this},d.prototype.overrideStateWithoutTriggeringChangeEvent=function(e){return this.state=new n(e),this},d.prototype.hasRefinements=function(e){return!!o(this.state.getNumericRefinements(e))||(this.state.isConjunctiveFacet(e)?this.state.isFacetRefined(e):this.state.isDisjunctiveFacet(e)?this.state.isDisjunctiveFacetRefined(e):!!this.state.isHierarchicalFacet(e)&&this.state.isHierarchicalFacetRefined(e))},d.prototype.isExcluded=function(e,t){return this.state.isExcludeRefined(e,t)},d.prototype.isDisjunctiveRefined=function(e,t){return this.state.isDisjunctiveFacetRefined(e,t)},d.prototype.hasTag=function(e){return this.state.isTagRefined(e)},d.prototype.isTagRefined=function(){return this.hasTagRefinements.apply(this,arguments)},d.prototype.getIndex=function(){return this.state.index},d.prototype.getCurrentPage=v,d.prototype.getPage=v,d.prototype.getTags=function(){return this.state.tagRefinements},d.prototype.getRefinements=function(e){var t=[];if(this.state.isConjunctiveFacet(e))this.state.getConjunctiveRefinements(e).forEach((function(e){t.push({value:e,type:"conjunctive"})})),this.state.getExcludeRefinements(e).forEach((function(e){t.push({value:e,type:"exclude"})}));else if(this.state.isDisjunctiveFacet(e)){this.state.getDisjunctiveRefinements(e).forEach((function(e){t.push({value:e,type:"disjunctive"})}))}var r=this.state.getNumericRefinements(e);return Object.keys(r).forEach((function(e){var n=r[e];t.push({value:n,operator:e,type:"numeric"})})),t},d.prototype.getNumericRefinement=function(e,t){return this.state.getNumericRefinement(e,t)},d.prototype.getHierarchicalFacetBreadcrumb=function(e){return this.state.getHierarchicalFacetBreadcrumb(e)},d.prototype._search=function(e){var t=this.state,r=[],n=[];e.onlyWithDerivedHelpers||(n=s._getQueries(t.index,t),r.push({state:t,queriesCount:n.length,helper:this}),this.emit("search",{state:t,results:this.lastResults}));var i=this.derivedHelpers.map((function(e){var n=e.getModifiedState(t),i=s._getQueries(n.index,n);return r.push({state:n,queriesCount:i.length,helper:e}),e.emit("search",{state:n,results:e.lastResults}),i})),a=Array.prototype.concat.apply(n,i),c=this._queryId++;this._currentNbQueries++;try{this.client.search(a).then(this._dispatchAlgoliaResponse.bind(this,r,c)).catch(this._dispatchAlgoliaError.bind(this,c))}catch(u){this.emit("error",{error:u})}},d.prototype._dispatchAlgoliaResponse=function(e,t,r){if(!(t<this._lastQueryIdReceived)){this._currentNbQueries-=t-this._lastQueryIdReceived,this._lastQueryIdReceived=t,0===this._currentNbQueries&&this.emit("searchQueueEmpty");var n=r.results.slice();e.forEach((function(e){var t=e.state,r=e.queriesCount,a=e.helper,s=n.splice(0,r),c=a.lastResults=new i(t,s);a.emit("result",{results:c,state:t})}))}},d.prototype._dispatchAlgoliaError=function(e,t){e<this._lastQueryIdReceived||(this._currentNbQueries-=e-this._lastQueryIdReceived,this._lastQueryIdReceived=e,this.emit("error",{error:t}),0===this._currentNbQueries&&this.emit("searchQueueEmpty"))},d.prototype.containsRefinement=function(e,t,r,n){return e||0!==t.length||0!==r.length||0!==n.length},d.prototype._hasDisjunctiveRefinements=function(e){return this.state.disjunctiveRefinements[e]&&this.state.disjunctiveRefinements[e].length>0},d.prototype._change=function(e){var t=e.state,r=e.isPageReset;t!==this.state&&(this.state=t,this.emit("change",{state:this.state,results:this.lastResults,isPageReset:r}))},d.prototype.clearCache=function(){return this.client.clearCache&&this.client.clearCache(),this},d.prototype.setClient=function(e){return this.client===e||("function"==typeof e.addAlgoliaAgent&&e.addAlgoliaAgent("JS Helper ("+l+")"),this.client=e),this},d.prototype.getClient=function(){return this.client},d.prototype.derive=function(e){var t=new a(this,e);return this.derivedHelpers.push(t),t},d.prototype.detachDerivedHelper=function(e){var t=this.derivedHelpers.indexOf(e);if(-1===t)throw new Error("Derived helper already detached");this.derivedHelpers.splice(t,1)},d.prototype.hasPendingRequests=function(){return this._currentNbQueries>0},e.exports=d},4587:e=>{"use strict";e.exports=function(e){return Array.isArray(e)?e.filter(Boolean):[]}},2344:e=>{"use strict";e.exports=function(){return Array.prototype.slice.call(arguments).reduceRight((function(e,t){return Object.keys(Object(t)).forEach((function(r){void 0!==t[r]&&(void 0!==e[r]&&delete e[r],e[r]=t[r])})),e}),{})}},4039:e=>{"use strict";e.exports={escapeFacetValue:function(e){return"string"!=typeof e?e:String(e).replace(/^-/,"\\-")},unescapeFacetValue:function(e){return"string"!=typeof e?e:e.replace(/^\\-/,"-")}}},7888:e=>{"use strict";e.exports=function(e,t){if(Array.isArray(e))for(var r=0;r<e.length;r++)if(t(e[r]))return e[r]}},9725:e=>{"use strict";e.exports=function(e,t){if(!Array.isArray(e))return-1;for(var r=0;r<e.length;r++)if(t(e[r]))return r;return-1}},2293:(e,t,r)=>{"use strict";var n=r(7888);e.exports=function(e,t){var r=(t||[]).map((function(e){return e.split(":")}));return e.reduce((function(e,t){var i=t.split(":"),a=n(r,(function(e){return e[0]===i[0]}));return i.length>1||!a?(e[0].push(i[0]),e[1].push(i[1]),e):(e[0].push(a[0]),e[1].push(a[1]),e)}),[[],[]])}},4853:e=>{"use strict";e.exports=function(e,t){e.prototype=Object.create(t.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}})}},2686:e=>{"use strict";e.exports=function(e,t){return e.filter((function(r,n){return t.indexOf(r)>-1&&e.indexOf(r)===n}))}},185:e=>{"use strict";function t(e){return"function"==typeof e||Array.isArray(e)||"[object Object]"===Object.prototype.toString.call(e)}function r(e,n){if(e===n)return e;for(var i in n)if(Object.prototype.hasOwnProperty.call(n,i)&&"__proto__"!==i&&"constructor"!==i){var a=n[i],s=e[i];void 0!==s&&void 0===a||(t(s)&&t(a)?e[i]=r(s,a):e[i]="object"==typeof(c=a)&&null!==c?r(Array.isArray(c)?[]:{},c):c)}var c;return e}e.exports=function(e){t(e)||(e={});for(var n=1,i=arguments.length;n<i;n++){var a=arguments[n];t(a)&&r(e,a)}return e}},116:e=>{"use strict";e.exports=function(e){return e&&Object.keys(e).length>0}},9803:e=>{"use strict";e.exports=function(e,t){if(null===e)return{};var r,n,i={},a=Object.keys(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||(i[r]=e[r]);return i}},2148:e=>{"use strict";function t(e,t){if(e!==t){var r=void 0!==e,n=null===e,i=void 0!==t,a=null===t;if(!a&&e>t||n&&i||!r)return 1;if(!n&&e<t||a&&r||!i)return-1}return 0}e.exports=function(e,r,n){if(!Array.isArray(e))return[];Array.isArray(n)||(n=[]);var i=e.map((function(e,t){return{criteria:r.map((function(t){return e[t]})),index:t,value:e}}));return i.sort((function(e,r){for(var i=-1;++i<e.criteria.length;){var a=t(e.criteria[i],r.criteria[i]);if(a)return i>=n.length?a:"desc"===n[i]?-a:a}return e.index-r.index})),i.map((function(e){return e.value}))}},8023:e=>{"use strict";e.exports=function e(t){if("number"==typeof t)return t;if("string"==typeof t)return parseFloat(t);if(Array.isArray(t))return t.map(e);throw new Error("The value should be a number, a parsable string or an array of those.")}},6394:(e,t,r)=>{"use strict";var n=r(185);function i(e){return Object.keys(e).sort((function(e,t){return e.localeCompare(t)})).reduce((function(t,r){return t[r]=e[r],t}),{})}var a={_getQueries:function(e,t){var r=[];return r.push({indexName:e,params:a._getHitsSearchParams(t)}),t.getRefinedDisjunctiveFacets().forEach((function(n){r.push({indexName:e,params:a._getDisjunctiveFacetSearchParams(t,n)})})),t.getRefinedHierarchicalFacets().forEach((function(n){var i=t.getHierarchicalFacetByName(n),s=t.getHierarchicalRefinement(n),c=t._getHierarchicalFacetSeparator(i);if(s.length>0&&s[0].split(c).length>1){var u=s[0].split(c).slice(0,-1).reduce((function(e,t,r){return e.concat({attribute:i.attributes[r],value:0===r?t:[e[e.length-1].value,t].join(c)})}),[]);u.forEach((function(n,s){var c=a._getDisjunctiveFacetSearchParams(t,n.attribute,0===s);function o(e){return i.attributes.some((function(t){return t===e.split(":")[0]}))}var h=(c.facetFilters||[]).reduce((function(e,t){if(Array.isArray(t)){var r=t.filter((function(e){return!o(e)}));r.length>0&&e.push(r)}return"string"!=typeof t||o(t)||e.push(t),e}),[]),f=u[s-1];c.facetFilters=s>0?h.concat(f.attribute+":"+f.value):h.length>0?h:void 0,r.push({indexName:e,params:c})}))}})),r},_getHitsSearchParams:function(e){var t=e.facets.concat(e.disjunctiveFacets).concat(a._getHitsHierarchicalFacetsAttributes(e)),r=a._getFacetFilters(e),s=a._getNumericFilters(e),c=a._getTagFilters(e),u={facets:t.indexOf("*")>-1?["*"]:t,tagFilters:c};return r.length>0&&(u.facetFilters=r),s.length>0&&(u.numericFilters=s),i(n({},e.getQueryParams(),u))},_getDisjunctiveFacetSearchParams:function(e,t,r){var s=a._getFacetFilters(e,t,r),c=a._getNumericFilters(e,t),u=a._getTagFilters(e),o={hitsPerPage:0,page:0,analytics:!1,clickAnalytics:!1};u.length>0&&(o.tagFilters=u);var h=e.getHierarchicalFacetByName(t);return o.facets=h?a._getDisjunctiveHierarchicalFacetAttribute(e,h,r):t,c.length>0&&(o.numericFilters=c),s.length>0&&(o.facetFilters=s),i(n({},e.getQueryParams(),o))},_getNumericFilters:function(e,t){if(e.numericFilters)return e.numericFilters;var r=[];return Object.keys(e.numericRefinements).forEach((function(n){var i=e.numericRefinements[n]||{};Object.keys(i).forEach((function(e){var a=i[e]||[];t!==n&&a.forEach((function(t){if(Array.isArray(t)){var i=t.map((function(t){return n+e+t}));r.push(i)}else r.push(n+e+t)}))}))})),r},_getTagFilters:function(e){return e.tagFilters?e.tagFilters:e.tagRefinements.join(",")},_getFacetFilters:function(e,t,r){var n=[],i=e.facetsRefinements||{};Object.keys(i).forEach((function(e){(i[e]||[]).forEach((function(t){n.push(e+":"+t)}))}));var a=e.facetsExcludes||{};Object.keys(a).forEach((function(e){(a[e]||[]).forEach((function(t){n.push(e+":-"+t)}))}));var s=e.disjunctiveFacetsRefinements||{};Object.keys(s).forEach((function(e){var r=s[e]||[];if(e!==t&&r&&0!==r.length){var i=[];r.forEach((function(t){i.push(e+":"+t)})),n.push(i)}}));var c=e.hierarchicalFacetsRefinements||{};return Object.keys(c).forEach((function(i){var a=(c[i]||[])[0];if(void 0!==a){var s,u,o=e.getHierarchicalFacetByName(i),h=e._getHierarchicalFacetSeparator(o),f=e._getHierarchicalRootPath(o);if(t===i){if(-1===a.indexOf(h)||!f&&!0===r||f&&f.split(h).length===a.split(h).length)return;f?(u=f.split(h).length-1,a=f):(u=a.split(h).length-2,a=a.slice(0,a.lastIndexOf(h))),s=o.attributes[u]}else u=a.split(h).length-1,s=o.attributes[u];s&&n.push([s+":"+a])}})),n},_getHitsHierarchicalFacetsAttributes:function(e){return e.hierarchicalFacets.reduce((function(t,r){var n=e.getHierarchicalRefinement(r.name)[0];if(!n)return t.push(r.attributes[0]),t;var i=e._getHierarchicalFacetSeparator(r),a=n.split(i).length,s=r.attributes.slice(0,a+1);return t.concat(s)}),[])},_getDisjunctiveHierarchicalFacetAttribute:function(e,t,r){var n=e._getHierarchicalFacetSeparator(t);if(!0===r){var i=e._getHierarchicalRootPath(t),a=0;return i&&(a=i.split(n).length),[t.attributes[a]]}var s=(e.getHierarchicalRefinement(t.name)[0]||"").split(n).length-1;return t.attributes.slice(0,s+1)},getSearchForFacetQuery:function(e,t,r,s){var c=s.isDisjunctiveFacet(e)?s.clearRefinements(e):s,u={facetQuery:t,facetName:e};return"number"==typeof r&&(u.maxFacetHits=r),i(n({},a._getHitsSearchParams(c),u))}};e.exports=a},6801:e=>{"use strict";e.exports=function(e){return null!==e&&/^[a-zA-Z0-9_-]{1,64}$/.test(e)}},5474:e=>{"use strict";e.exports="3.12.0"},290:function(e){e.exports=function(){"use strict";function e(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function t(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function r(r){for(var n=1;n<arguments.length;n++){var i=null!=arguments[n]?arguments[n]:{};n%2?t(Object(i),!0).forEach((function(t){e(r,t,i[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(r,Object.getOwnPropertyDescriptors(i)):t(Object(i)).forEach((function(e){Object.defineProperty(r,e,Object.getOwnPropertyDescriptor(i,e))}))}return r}function n(e,t){if(null==e)return{};var r,n,i=function(e,t){if(null==e)return{};var r,n,i={},a=Object.keys(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||(i[r]=e[r]);return i}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(i[r]=e[r])}return i}function i(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){if(Symbol.iterator in Object(e)||"[object Arguments]"===Object.prototype.toString.call(e)){var r=[],n=!0,i=!1,a=void 0;try{for(var s,c=e[Symbol.iterator]();!(n=(s=c.next()).done)&&(r.push(s.value),!t||r.length!==t);n=!0);}catch(e){i=!0,a=e}finally{try{n||null==c.return||c.return()}finally{if(i)throw a}}return r}}(e,t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance")}()}function a(e){return function(e){if(Array.isArray(e)){for(var t=0,r=new Array(e.length);t<e.length;t++)r[t]=e[t];return r}}(e)||function(e){if(Symbol.iterator in Object(e)||"[object Arguments]"===Object.prototype.toString.call(e))return Array.from(e)}(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance")}()}function s(e){var t,r="algoliasearch-client-js-".concat(e.key),n=function(){return void 0===t&&(t=e.localStorage||window.localStorage),t},a=function(){return JSON.parse(n().getItem(r)||"{}")};return{get:function(e,t){var r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{miss:function(){return Promise.resolve()}};return Promise.resolve().then((function(){var r=JSON.stringify(e),n=a()[r];return Promise.all([n||t(),void 0!==n])})).then((function(e){var t=i(e,2),n=t[0],a=t[1];return Promise.all([n,a||r.miss(n)])})).then((function(e){return i(e,1)[0]}))},set:function(e,t){return Promise.resolve().then((function(){var i=a();return i[JSON.stringify(e)]=t,n().setItem(r,JSON.stringify(i)),t}))},delete:function(e){return Promise.resolve().then((function(){var t=a();delete t[JSON.stringify(e)],n().setItem(r,JSON.stringify(t))}))},clear:function(){return Promise.resolve().then((function(){n().removeItem(r)}))}}}function c(e){var t=a(e.caches),r=t.shift();return void 0===r?{get:function(e,t){var r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{miss:function(){return Promise.resolve()}};return t().then((function(e){return Promise.all([e,r.miss(e)])})).then((function(e){return i(e,1)[0]}))},set:function(e,t){return Promise.resolve(t)},delete:function(e){return Promise.resolve()},clear:function(){return Promise.resolve()}}:{get:function(e,n){var i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{miss:function(){return Promise.resolve()}};return r.get(e,n,i).catch((function(){return c({caches:t}).get(e,n,i)}))},set:function(e,n){return r.set(e,n).catch((function(){return c({caches:t}).set(e,n)}))},delete:function(e){return r.delete(e).catch((function(){return c({caches:t}).delete(e)}))},clear:function(){return r.clear().catch((function(){return c({caches:t}).clear()}))}}}function u(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{serializable:!0},t={};return{get:function(r,n){var i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{miss:function(){return Promise.resolve()}},a=JSON.stringify(r);if(a in t)return Promise.resolve(e.serializable?JSON.parse(t[a]):t[a]);var s=n(),c=i&&i.miss||function(){return Promise.resolve()};return s.then((function(e){return c(e)})).then((function(){return s}))},set:function(r,n){return t[JSON.stringify(r)]=e.serializable?JSON.stringify(n):n,Promise.resolve(n)},delete:function(e){return delete t[JSON.stringify(e)],Promise.resolve()},clear:function(){return t={},Promise.resolve()}}}function o(e){for(var t=e.length-1;t>0;t--){var r=Math.floor(Math.random()*(t+1)),n=e[t];e[t]=e[r],e[r]=n}return e}function h(e,t){return t?(Object.keys(t).forEach((function(r){e[r]=t[r](e)})),e):e}function f(e){for(var t=arguments.length,r=new Array(t>1?t-1:0),n=1;n<t;n++)r[n-1]=arguments[n];var i=0;return e.replace(/%s/g,(function(){return encodeURIComponent(r[i++])}))}var l={WithinQueryParameters:0,WithinHeaders:1};function m(e,t){var r=e||{},n=r.data||{};return Object.keys(r).forEach((function(e){-1===["timeout","headers","queryParameters","data","cacheable"].indexOf(e)&&(n[e]=r[e])})),{data:Object.entries(n).length>0?n:void 0,timeout:r.timeout||t,headers:r.headers||{},queryParameters:r.queryParameters||{},cacheable:r.cacheable}}var d={Read:1,Write:2,Any:3},p=1,v=2,g=3;function y(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:p;return r(r({},e),{},{status:t,lastUpdate:Date.now()})}function R(e){return"string"==typeof e?{protocol:"https",url:e,accept:d.Any}:{protocol:e.protocol||"https",url:e.url,accept:e.accept||d.Any}}var F="GET",b="POST";function P(e,t){return Promise.all(t.map((function(t){return e.get(t,(function(){return Promise.resolve(y(t))}))}))).then((function(e){var r=e.filter((function(e){return function(e){return e.status===p||Date.now()-e.lastUpdate>12e4}(e)})),n=e.filter((function(e){return function(e){return e.status===g&&Date.now()-e.lastUpdate<=12e4}(e)})),i=[].concat(a(r),a(n));return{getTimeout:function(e,t){return(0===n.length&&0===e?1:n.length+3+e)*t},statelessHosts:i.length>0?i.map((function(e){return R(e)})):t}}))}function j(e,t,n,i){var s=[],c=function(e,t){if(e.method!==F&&(void 0!==e.data||void 0!==t.data)){var n=Array.isArray(e.data)?e.data:r(r({},e.data),t.data);return JSON.stringify(n)}}(n,i),u=function(e,t){var n=r(r({},e.headers),t.headers),i={};return Object.keys(n).forEach((function(e){var t=n[e];i[e.toLowerCase()]=t})),i}(e,i),o=n.method,h=n.method!==F?{}:r(r({},n.data),i.data),f=r(r(r({"x-algolia-agent":e.userAgent.value},e.queryParameters),h),i.queryParameters),l=0,m=function t(r,a){var h=r.pop();if(void 0===h)throw{name:"RetryError",message:"Unreachable hosts - your application id may be incorrect. If the error persists, contact support@algolia.com.",transporterStackTrace:w(s)};var m={data:c,headers:u,method:o,url:E(h,n.path,f),connectTimeout:a(l,e.timeouts.connect),responseTimeout:a(l,i.timeout)},d=function(e){var t={request:m,response:e,host:h,triesLeft:r.length};return s.push(t),t},p={onSuccess:function(e){return function(e){try{return JSON.parse(e.content)}catch(t){throw function(e,t){return{name:"DeserializationError",message:e,response:t}}(t.message,e)}}(e)},onRetry:function(n){var i=d(n);return n.isTimedOut&&l++,Promise.all([e.logger.info("Retryable failure",O(i)),e.hostsCache.set(h,y(h,n.isTimedOut?g:v))]).then((function(){return t(r,a)}))},onFail:function(e){throw d(e),function(e,t){var r=e.content,n=e.status,i=r;try{i=JSON.parse(r).message}catch(e){}return function(e,t,r){return{name:"ApiError",message:e,status:t,transporterStackTrace:r}}(i,n,t)}(e,w(s))}};return e.requester.send(m).then((function(e){return function(e,t){return function(e){var t=e.status;return e.isTimedOut||function(e){var t=e.isTimedOut,r=e.status;return!t&&0==~~r}(e)||2!=~~(t/100)&&4!=~~(t/100)}(e)?t.onRetry(e):2==~~(e.status/100)?t.onSuccess(e):t.onFail(e)}(e,p)}))};return P(e.hostsCache,t).then((function(e){return m(a(e.statelessHosts).reverse(),e.getTimeout)}))}function _(e){var t={value:"Algolia for JavaScript (".concat(e,")"),add:function(e){var r="; ".concat(e.segment).concat(void 0!==e.version?" (".concat(e.version,")"):"");return-1===t.value.indexOf(r)&&(t.value="".concat(t.value).concat(r)),t}};return t}function E(e,t,r){var n=x(r),i="".concat(e.protocol,"://").concat(e.url,"/").concat("/"===t.charAt(0)?t.substr(1):t);return n.length&&(i+="?".concat(n)),i}function x(e){return Object.keys(e).map((function(t){return f("%s=%s",t,(r=e[t],"[object Object]"===Object.prototype.toString.call(r)||"[object Array]"===Object.prototype.toString.call(r)?JSON.stringify(e[t]):e[t]));var r})).join("&")}function w(e){return e.map((function(e){return O(e)}))}function O(e){var t=e.request.headers["x-algolia-api-key"]?{"x-algolia-api-key":"*****"}:{};return r(r({},e),{},{request:r(r({},e.request),{},{headers:r(r({},e.request.headers),t)})})}var N=function(e){var t=e.appId,n=function(e,t,r){var n={"x-algolia-api-key":r,"x-algolia-application-id":t};return{headers:function(){return e===l.WithinHeaders?n:{}},queryParameters:function(){return e===l.WithinQueryParameters?n:{}}}}(void 0!==e.authMode?e.authMode:l.WithinHeaders,t,e.apiKey),a=function(e){var t=e.hostsCache,r=e.logger,n=e.requester,a=e.requestsCache,s=e.responsesCache,c=e.timeouts,u=e.userAgent,o=e.hosts,h=e.queryParameters,f={hostsCache:t,logger:r,requester:n,requestsCache:a,responsesCache:s,timeouts:c,userAgent:u,headers:e.headers,queryParameters:h,hosts:o.map((function(e){return R(e)})),read:function(e,t){var r=m(t,f.timeouts.read),n=function(){return j(f,f.hosts.filter((function(e){return 0!=(e.accept&d.Read)})),e,r)};if(!0!==(void 0!==r.cacheable?r.cacheable:e.cacheable))return n();var a={request:e,mappedRequestOptions:r,transporter:{queryParameters:f.queryParameters,headers:f.headers}};return f.responsesCache.get(a,(function(){return f.requestsCache.get(a,(function(){return f.requestsCache.set(a,n()).then((function(e){return Promise.all([f.requestsCache.delete(a),e])}),(function(e){return Promise.all([f.requestsCache.delete(a),Promise.reject(e)])})).then((function(e){var t=i(e,2);return t[0],t[1]}))}))}),{miss:function(e){return f.responsesCache.set(a,e)}})},write:function(e,t){return j(f,f.hosts.filter((function(e){return 0!=(e.accept&d.Write)})),e,m(t,f.timeouts.write))}};return f}(r(r({hosts:[{url:"".concat(t,"-dsn.algolia.net"),accept:d.Read},{url:"".concat(t,".algolia.net"),accept:d.Write}].concat(o([{url:"".concat(t,"-1.algolianet.com")},{url:"".concat(t,"-2.algolianet.com")},{url:"".concat(t,"-3.algolianet.com")}]))},e),{},{headers:r(r(r({},n.headers()),{"content-type":"application/x-www-form-urlencoded"}),e.headers),queryParameters:r(r({},n.queryParameters()),e.queryParameters)}));return h({transporter:a,appId:t,addAlgoliaAgent:function(e,t){a.userAgent.add({segment:e,version:t})},clearCache:function(){return Promise.all([a.requestsCache.clear(),a.responsesCache.clear()]).then((function(){}))}},e.methods)},A=function(e){return function(t,r){return t.method===F?e.transporter.read(t,r):e.transporter.write(t,r)}},H=function(e){return function(t){var r=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return h({transporter:e.transporter,appId:e.appId,indexName:t},r.methods)}},S=function(e){return function(t,n){var i=t.map((function(e){return r(r({},e),{},{params:x(e.params||{})})}));return e.transporter.read({method:b,path:"1/indexes/*/queries",data:{requests:i},cacheable:!0},n)}},T=function(e){return function(t,i){return Promise.all(t.map((function(t){var a=t.params,s=a.facetName,c=a.facetQuery,u=n(a,["facetName","facetQuery"]);return H(e)(t.indexName,{methods:{searchForFacetValues:k}}).searchForFacetValues(s,c,r(r({},i),u))})))}},Q=function(e){return function(t,r,n){return e.transporter.read({method:b,path:f("1/answers/%s/prediction",e.indexName),data:{query:t,queryLanguages:r},cacheable:!0},n)}},C=function(e){return function(t,r){return e.transporter.read({method:b,path:f("1/indexes/%s/query",e.indexName),data:{query:t},cacheable:!0},r)}},k=function(e){return function(t,r,n){return e.transporter.read({method:b,path:f("1/indexes/%s/facets/%s/query",e.indexName,t),data:{facetQuery:r},cacheable:!0},n)}},I=1,D=2,q=3;function V(e,t,n){var i,a={appId:e,apiKey:t,timeouts:{connect:1,read:2,write:30},requester:{send:function(e){return new Promise((function(t){var r=new XMLHttpRequest;r.open(e.method,e.url,!0),Object.keys(e.headers).forEach((function(t){return r.setRequestHeader(t,e.headers[t])}));var n,i=function(e,n){return setTimeout((function(){r.abort(),t({status:0,content:n,isTimedOut:!0})}),1e3*e)},a=i(e.connectTimeout,"Connection timeout");r.onreadystatechange=function(){r.readyState>r.OPENED&&void 0===n&&(clearTimeout(a),n=i(e.responseTimeout,"Socket timeout"))},r.onerror=function(){0===r.status&&(clearTimeout(a),clearTimeout(n),t({content:r.responseText||"Network request failed",status:r.status,isTimedOut:!1}))},r.onload=function(){clearTimeout(a),clearTimeout(n),t({content:r.responseText,status:r.status,isTimedOut:!1})},r.send(e.data)}))}},logger:(i=q,{debug:function(e,t){return I>=i&&console.debug(e,t),Promise.resolve()},info:function(e,t){return D>=i&&console.info(e,t),Promise.resolve()},error:function(e,t){return console.error(e,t),Promise.resolve()}}),responsesCache:u(),requestsCache:u({serializable:!1}),hostsCache:c({caches:[s({key:"".concat("4.16.0","-").concat(e)}),u()]}),userAgent:_("4.16.0").add({segment:"Browser",version:"lite"}),authMode:l.WithinQueryParameters};return N(r(r(r({},a),n),{},{methods:{search:S,searchForFacetValues:T,multipleQueries:S,multipleSearchForFacetValues:T,customRequest:A,initIndex:function(e){return function(t){return H(e)(t,{methods:{search:C,searchForFacetValues:k,findAnswers:Q}})}}}}))}return V.version="4.16.0",V}()},6675:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>A});var n=r(7294),i=r(6010),a=r(4766),s=r.n(a),c=r(290),u=r.n(c),o=r(412),h=r(5742),f=r(9960),l=r(143),m=r(2263);const d=["zero","one","two","few","many","other"];function p(e){return d.filter((t=>e.includes(t)))}const v={locale:"en",pluralForms:p(["one","other"]),select:e=>1===e?"one":"other"};function g(){const{i18n:{currentLocale:e}}=(0,m.Z)();return(0,n.useMemo)((()=>{try{return function(e){const t=new Intl.PluralRules(e);return{locale:e,pluralForms:p(t.resolvedOptions().pluralCategories),select:e=>t.select(e)}}(e)}catch(t){return console.error(`Failed to use Intl.PluralRules for locale "${e}".\nDocusaurus will fallback to the default (English) implementation.\nError: ${t.message}\n`),v}}),[e])}function y(){const e=g();return{selectMessage:(t,r)=>function(e,t,r){const n=e.split("|");if(1===n.length)return n[0];n.length>r.pluralForms.length&&console.error(`For locale=${r.locale}, a maximum of ${r.pluralForms.length} plural forms are expected (${r.pluralForms.join(",")}), but the message contains ${n.length}: ${e}`);const i=r.select(t),a=r.pluralForms.indexOf(i);return n[Math.min(a,n.length-1)]}(r,t,e)}}var R=r(6177),F=r(902),b=r(833),P=r(2128),j=r(5999),_=r(6278),E=r(239),x=r(7452);const w={searchQueryInput:"searchQueryInput_u2C7",searchVersionInput:"searchVersionInput_m0Ui",searchResultsColumn:"searchResultsColumn_JPFH",algoliaLogo:"algoliaLogo_rT1R",algoliaLogoPathFill:"algoliaLogoPathFill_WdUC",searchResultItem:"searchResultItem_Tv2o",searchResultItemHeading:"searchResultItemHeading_KbCB",searchResultItemPath:"searchResultItemPath_lhe1",searchResultItemSummary:"searchResultItemSummary_AEaO",searchQueryColumn:"searchQueryColumn_RTkw",searchVersionColumn:"searchVersionColumn_ypXd",searchLogoColumn:"searchLogoColumn_rJIA",loadingSpinner:"loadingSpinner_XVxU","loading-spin":"loading-spin_vzvp",loader:"loader_vvXV"};function O(e){let{docsSearchVersionsHelpers:t}=e;const r=Object.entries(t.allDocsData).filter((e=>{let[,t]=e;return t.versions.length>1}));return n.createElement("div",{className:(0,i.Z)("col","col--3","padding-left--none",w.searchVersionColumn)},r.map((e=>{let[i,a]=e;const s=r.length>1?`${i}: `:"";return n.createElement("select",{key:i,onChange:e=>t.setSearchVersion(i,e.target.value),defaultValue:t.searchVersions[i],className:w.searchVersionInput},a.versions.map(((e,t)=>n.createElement("option",{key:t,label:`${s}${e.label}`,value:e.name}))))})))}function N(){const{i18n:{currentLocale:e}}=(0,m.Z)(),{algolia:{appId:t,apiKey:r,indexName:a}}=(0,_.L)(),c=(0,E.l)(),d=function(){const{selectMessage:e}=y();return t=>e(t,(0,j.I)({id:"theme.SearchPage.documentsFound.plurals",description:'Pluralized label for "{count} documents found". Use as much plural forms (separated by "|") as your language support (see https://www.unicode.org/cldr/cldr-aux/charts/34/supplemental/language_plural_rules.html)',message:"One document found|{count} documents found"},{count:t}))}(),p=function(){const e=(0,l._r)(),[t,r]=(0,n.useState)((()=>Object.entries(e).reduce(((e,t)=>{let[r,n]=t;return{...e,[r]:n.versions[0].name}}),{}))),i=Object.values(e).some((e=>e.versions.length>1));return{allDocsData:e,versioningEnabled:i,searchVersions:t,setSearchVersion:(e,t)=>r((r=>({...r,[e]:t})))}}(),[v,g]=(0,R.K)(),b={items:[],query:null,totalResults:null,totalPages:null,lastPage:null,hasMore:null,loading:null},[N,A]=(0,n.useReducer)(((e,t)=>{switch(t.type){case"reset":return b;case"loading":return{...e,loading:!0};case"update":return v!==t.value.query?e:{...t.value,items:0===t.value.lastPage?t.value.items:e.items.concat(t.value.items)};case"advance":{const t=e.totalPages>e.lastPage+1;return{...e,lastPage:t?e.lastPage+1:e.lastPage,hasMore:t}}default:return e}}),b),H=u()(t,r),S=s()(H,a,{hitsPerPage:15,advancedSyntax:!0,disjunctiveFacets:["language","docusaurus_tag"]});S.on("result",(e=>{let{results:{query:t,hits:r,page:n,nbHits:i,nbPages:a}}=e;if(""===t||!Array.isArray(r))return void A({type:"reset"});const s=e=>e.replace(/algolia-docsearch-suggestion--highlight/g,"search-result-match"),u=r.map((e=>{let{url:t,_highlightResult:{hierarchy:r},_snippetResult:n={}}=e;const i=Object.keys(r).map((e=>s(r[e].value)));return{title:i.pop(),url:c(t),summary:n.content?`${s(n.content.value)}...`:"",breadcrumbs:i}}));A({type:"update",value:{items:u,query:t,totalResults:i,totalPages:a,lastPage:n,hasMore:a>n+1,loading:!1}})}));const[T,Q]=(0,n.useState)(null),C=(0,n.useRef)(0),k=(0,n.useRef)(o.Z.canUseIntersectionObserver&&new IntersectionObserver((e=>{const{isIntersecting:t,boundingClientRect:{y:r}}=e[0];t&&C.current>r&&A({type:"advance"}),C.current=r}),{threshold:1})),I=()=>v?(0,j.I)({id:"theme.SearchPage.existingResultsTitle",message:'Search results for "{query}"',description:"The search page title for non-empty query"},{query:v}):(0,j.I)({id:"theme.SearchPage.emptyResultsTitle",message:"Search the documentation",description:"The search page title for empty query"}),D=(0,F.zX)((function(t){void 0===t&&(t=0),S.addDisjunctiveFacetRefinement("docusaurus_tag","default"),S.addDisjunctiveFacetRefinement("language",e),Object.entries(p.searchVersions).forEach((e=>{let[t,r]=e;S.addDisjunctiveFacetRefinement("docusaurus_tag",`docs-${t}-${r}`)})),S.setQuery(v).setPage(t).search()}));return(0,n.useEffect)((()=>{if(!T)return;const e=k.current;return e?(e.observe(T),()=>e.unobserve(T)):()=>!0}),[T]),(0,n.useEffect)((()=>{A({type:"reset"}),v&&(A({type:"loading"}),setTimeout((()=>{D()}),300))}),[v,p.searchVersions,D]),(0,n.useEffect)((()=>{N.lastPage&&0!==N.lastPage&&D(N.lastPage)}),[D,N.lastPage]),n.createElement(x.Z,null,n.createElement(h.Z,null,n.createElement("title",null,(0,P.p)(I())),n.createElement("meta",{property:"robots",content:"noindex, follow"})),n.createElement("div",{className:"container margin-vert--lg"},n.createElement("h1",null,I()),n.createElement("form",{className:"row",onSubmit:e=>e.preventDefault()},n.createElement("div",{className:(0,i.Z)("col",w.searchQueryColumn,{"col--9":p.versioningEnabled,"col--12":!p.versioningEnabled})},n.createElement("input",{type:"search",name:"q",className:w.searchQueryInput,placeholder:(0,j.I)({id:"theme.SearchPage.inputPlaceholder",message:"Type your search here",description:"The placeholder for search page input"}),"aria-label":(0,j.I)({id:"theme.SearchPage.inputLabel",message:"Search",description:"The ARIA label for search page input"}),onChange:e=>g(e.target.value),value:v,autoComplete:"off",autoFocus:!0})),p.versioningEnabled&&n.createElement(O,{docsSearchVersionsHelpers:p})),n.createElement("div",{className:"row"},n.createElement("div",{className:(0,i.Z)("col","col--8",w.searchResultsColumn)},!!N.totalResults&&d(N.totalResults)),n.createElement("div",{className:(0,i.Z)("col","col--4","text--right",w.searchLogoColumn)},n.createElement("a",{target:"_blank",rel:"noopener noreferrer",href:"https://www.algolia.com/","aria-label":(0,j.I)({id:"theme.SearchPage.algoliaLabel",message:"Search by Algolia",description:"The ARIA label for Algolia mention"})},n.createElement("svg",{viewBox:"0 0 168 24",className:w.algoliaLogo},n.createElement("g",{fill:"none"},n.createElement("path",{className:w.algoliaLogoPathFill,d:"M120.925 18.804c-4.386.02-4.386-3.54-4.386-4.106l-.007-13.336 2.675-.424v13.254c0 .322 0 2.358 1.718 2.364v2.248zm-10.846-2.18c.821 0 1.43-.047 1.855-.129v-2.719a6.334 6.334 0 0 0-1.574-.199 5.7 5.7 0 0 0-.897.069 2.699 2.699 0 0 0-.814.24c-.24.116-.439.28-.582.491-.15.212-.219.335-.219.656 0 .628.219.991.616 1.23s.938.362 1.615.362zm-.233-9.7c.883 0 1.629.109 2.231.328.602.218 1.088.525 1.444.915.363.396.609.922.76 1.483.157.56.232 1.175.232 1.85v6.874a32.5 32.5 0 0 1-1.868.314c-.834.123-1.772.185-2.813.185-.69 0-1.327-.069-1.895-.198a4.001 4.001 0 0 1-1.471-.636 3.085 3.085 0 0 1-.951-1.134c-.226-.465-.343-1.12-.343-1.803 0-.656.13-1.073.384-1.525a3.24 3.24 0 0 1 1.047-1.106c.445-.287.95-.492 1.532-.615a8.8 8.8 0 0 1 1.82-.185 8.404 8.404 0 0 1 1.972.24v-.438c0-.307-.035-.6-.11-.874a1.88 1.88 0 0 0-.384-.73 1.784 1.784 0 0 0-.724-.493 3.164 3.164 0 0 0-1.143-.205c-.616 0-1.177.075-1.69.164a7.735 7.735 0 0 0-1.26.307l-.321-2.192c.335-.117.834-.233 1.478-.349a10.98 10.98 0 0 1 2.073-.178zm52.842 9.626c.822 0 1.43-.048 1.854-.13V13.7a6.347 6.347 0 0 0-1.574-.199c-.294 0-.595.021-.896.069a2.7 2.7 0 0 0-.814.24 1.46 1.46 0 0 0-.582.491c-.15.212-.218.335-.218.656 0 .628.218.991.615 1.23.404.245.938.362 1.615.362zm-.226-9.694c.883 0 1.629.108 2.231.327.602.219 1.088.526 1.444.915.355.39.609.923.759 1.483a6.8 6.8 0 0 1 .233 1.852v6.873c-.41.088-1.034.19-1.868.314-.834.123-1.772.184-2.813.184-.69 0-1.327-.068-1.895-.198a4.001 4.001 0 0 1-1.471-.635 3.085 3.085 0 0 1-.951-1.134c-.226-.465-.343-1.12-.343-1.804 0-.656.13-1.073.384-1.524.26-.45.608-.82 1.047-1.107.445-.286.95-.491 1.532-.614a8.803 8.803 0 0 1 2.751-.13c.329.034.671.096 1.04.185v-.437a3.3 3.3 0 0 0-.109-.875 1.873 1.873 0 0 0-.384-.731 1.784 1.784 0 0 0-.724-.492 3.165 3.165 0 0 0-1.143-.205c-.616 0-1.177.075-1.69.164a7.75 7.75 0 0 0-1.26.307l-.321-2.193c.335-.116.834-.232 1.478-.348a11.633 11.633 0 0 1 2.073-.177zm-8.034-1.271a1.626 1.626 0 0 1-1.628-1.62c0-.895.725-1.62 1.628-1.62.904 0 1.63.725 1.63 1.62 0 .895-.733 1.62-1.63 1.62zm1.348 13.22h-2.689V7.27l2.69-.423v11.956zm-4.714 0c-4.386.02-4.386-3.54-4.386-4.107l-.008-13.336 2.676-.424v13.254c0 .322 0 2.358 1.718 2.364v2.248zm-8.698-5.903c0-1.156-.253-2.119-.746-2.788-.493-.677-1.183-1.01-2.067-1.01-.882 0-1.574.333-2.065 1.01-.493.676-.733 1.632-.733 2.788 0 1.168.246 1.953.74 2.63.492.683 1.183 1.018 2.066 1.018.882 0 1.574-.342 2.067-1.019.492-.683.738-1.46.738-2.63zm2.737-.007c0 .902-.13 1.584-.397 2.33a5.52 5.52 0 0 1-1.128 1.906 4.986 4.986 0 0 1-1.752 1.223c-.685.286-1.739.45-2.265.45-.528-.006-1.574-.157-2.252-.45a5.096 5.096 0 0 1-1.744-1.223c-.487-.527-.863-1.162-1.137-1.906a6.345 6.345 0 0 1-.41-2.33c0-.902.123-1.77.397-2.508a5.554 5.554 0 0 1 1.15-1.892 5.133 5.133 0 0 1 1.75-1.216c.679-.287 1.425-.423 2.232-.423.808 0 1.553.142 2.237.423a4.88 4.88 0 0 1 1.753 1.216 5.644 5.644 0 0 1 1.135 1.892c.287.738.431 1.606.431 2.508zm-20.138 0c0 1.12.246 2.363.738 2.882.493.52 1.13.78 1.91.78.424 0 .828-.062 1.204-.178.377-.116.677-.253.917-.417V9.33a10.476 10.476 0 0 0-1.766-.226c-.971-.028-1.71.37-2.23 1.004-.513.636-.773 1.75-.773 2.788zm7.438 5.274c0 1.824-.466 3.156-1.404 4.004-.936.846-2.367 1.27-4.296 1.27-.705 0-2.17-.137-3.34-.396l.431-2.118c.98.205 2.272.26 2.95.26 1.074 0 1.84-.219 2.299-.656.459-.437.684-1.086.684-1.948v-.437a8.07 8.07 0 0 1-1.047.397c-.43.13-.93.198-1.492.198-.739 0-1.41-.116-2.018-.349a4.206 4.206 0 0 1-1.567-1.025c-.431-.45-.774-1.017-1.013-1.694-.24-.677-.363-1.885-.363-2.773 0-.834.13-1.88.384-2.577.26-.696.629-1.298 1.129-1.796.493-.498 1.095-.881 1.8-1.162a6.605 6.605 0 0 1 2.428-.457c.87 0 1.67.109 2.45.24.78.129 1.444.265 1.985.415V18.17zM6.972 6.677v1.627c-.712-.446-1.52-.67-2.425-.67-.585 0-1.045.13-1.38.391a1.24 1.24 0 0 0-.502 1.03c0 .425.164.765.494 1.02.33.256.835.532 1.516.83.447.192.795.356 1.045.495.25.138.537.332.862.582.324.25.563.548.718.894.154.345.23.741.23 1.188 0 .947-.334 1.691-1.004 2.234-.67.542-1.537.814-2.601.814-1.18 0-2.16-.229-2.936-.686v-1.708c.84.628 1.814.942 2.92.942.585 0 1.048-.136 1.388-.407.34-.271.51-.646.51-1.125 0-.287-.1-.55-.302-.79-.203-.24-.42-.42-.655-.542-.234-.123-.585-.29-1.053-.503a61.27 61.27 0 0 1-.582-.271 13.67 13.67 0 0 1-.55-.287 4.275 4.275 0 0 1-.567-.351 6.92 6.92 0 0 1-.455-.4c-.18-.17-.31-.34-.39-.51-.08-.17-.155-.37-.224-.598a2.553 2.553 0 0 1-.104-.742c0-.915.333-1.638.998-2.17.664-.532 1.523-.798 2.576-.798.968 0 1.793.17 2.473.51zm7.468 5.696v-.287c-.022-.607-.187-1.088-.495-1.444-.309-.357-.75-.535-1.324-.535-.532 0-.99.194-1.373.583-.382.388-.622.949-.717 1.683h3.909zm1.005 2.792v1.404c-.596.34-1.383.51-2.362.51-1.255 0-2.255-.377-3-1.132-.744-.755-1.116-1.744-1.116-2.968 0-1.297.34-2.316 1.021-3.055.68-.74 1.548-1.11 2.6-1.11 1.033 0 1.852.323 2.458.966.606.644.91 1.572.91 2.784 0 .33-.033.676-.096 1.038h-5.314c.107.702.405 1.239.894 1.611.49.372 1.106.558 1.85.558.862 0 1.58-.202 2.155-.606zm6.605-1.77h-1.212c-.596 0-1.045.116-1.349.35-.303.234-.454.532-.454.894 0 .372.117.664.35.877.235.213.575.32 1.022.32.51 0 .912-.142 1.204-.424.293-.281.44-.651.44-1.108v-.91zm-4.068-2.554V9.325c.627-.361 1.457-.542 2.489-.542 2.116 0 3.175 1.026 3.175 3.08V17h-1.548v-.957c-.415.68-1.143 1.02-2.186 1.02-.766 0-1.38-.22-1.843-.661-.462-.442-.694-1.003-.694-1.684 0-.776.293-1.38.878-1.81.585-.431 1.404-.647 2.457-.647h1.34V11.8c0-.554-.133-.971-.399-1.253-.266-.282-.707-.423-1.324-.423a4.07 4.07 0 0 0-2.345.718zm9.333-1.93v1.42c.394-1 1.101-1.5 2.123-1.5.148 0 .313.016.494.048v1.531a1.885 1.885 0 0 0-.75-.143c-.542 0-.989.24-1.34.718-.351.479-.527 1.048-.527 1.707V17h-1.563V8.91h1.563zm5.01 4.084c.022.82.272 1.492.75 2.019.479.526 1.15.79 2.01.79.639 0 1.235-.176 1.788-.527v1.404c-.521.319-1.186.479-1.995.479-1.265 0-2.276-.4-3.031-1.197-.755-.798-1.133-1.792-1.133-2.984 0-1.16.38-2.151 1.14-2.975.761-.825 1.79-1.237 3.088-1.237.702 0 1.346.149 1.93.447v1.436a3.242 3.242 0 0 0-1.77-.495c-.84 0-1.513.266-2.019.798-.505.532-.758 1.213-.758 2.042zM40.24 5.72v4.579c.458-1 1.293-1.5 2.505-1.5.787 0 1.42.245 1.899.734.479.49.718 1.17.718 2.042V17h-1.564v-5.106c0-.553-.14-.98-.422-1.284-.282-.303-.652-.455-1.11-.455-.531 0-1.002.202-1.411.606-.41.405-.615 1.022-.615 1.851V17h-1.563V5.72h1.563zm14.966 10.02c.596 0 1.096-.253 1.5-.758.404-.506.606-1.157.606-1.955 0-.915-.202-1.62-.606-2.114-.404-.495-.92-.742-1.548-.742-.553 0-1.05.224-1.491.67-.442.447-.662 1.133-.662 2.058 0 .958.212 1.67.638 2.138.425.469.946.703 1.563.703zM53.004 5.72v4.42c.574-.894 1.388-1.341 2.44-1.341 1.022 0 1.857.383 2.506 1.149.649.766.973 1.781.973 3.047 0 1.138-.309 2.109-.925 2.912-.617.803-1.463 1.205-2.537 1.205-1.075 0-1.894-.447-2.457-1.34V17h-1.58V5.72h1.58zm9.908 11.104l-3.223-7.913h1.739l1.005 2.632 1.26 3.415c.096-.32.48-1.458 1.15-3.415l.909-2.632h1.66l-2.92 7.866c-.777 2.074-1.963 3.11-3.559 3.11a2.92 2.92 0 0 1-.734-.079v-1.34c.17.042.351.064.543.064 1.032 0 1.755-.57 2.17-1.708z"}),n.createElement("path",{fill:"#5468FF",d:"M78.988.938h16.594a2.968 2.968 0 0 1 2.966 2.966V20.5a2.967 2.967 0 0 1-2.966 2.964H78.988a2.967 2.967 0 0 1-2.966-2.964V3.897A2.961 2.961 0 0 1 78.988.938z"}),n.createElement("path",{fill:"white",d:"M89.632 5.967v-.772a.978.978 0 0 0-.978-.977h-2.28a.978.978 0 0 0-.978.977v.793c0 .088.082.15.171.13a7.127 7.127 0 0 1 1.984-.28c.65 0 1.295.088 1.917.259.082.02.164-.04.164-.13m-6.248 1.01l-.39-.389a.977.977 0 0 0-1.382 0l-.465.465a.973.973 0 0 0 0 1.38l.383.383c.062.061.15.047.205-.014.226-.307.472-.601.746-.874.281-.28.568-.526.883-.751.068-.042.075-.137.02-.2m4.16 2.453v3.341c0 .096.104.165.192.117l2.97-1.537c.068-.034.089-.117.055-.184a3.695 3.695 0 0 0-3.08-1.866c-.068 0-.136.054-.136.13m0 8.048a4.489 4.489 0 0 1-4.49-4.482 4.488 4.488 0 0 1 4.49-4.482 4.488 4.488 0 0 1 4.489 4.482 4.484 4.484 0 0 1-4.49 4.482m0-10.85a6.363 6.363 0 1 0 0 12.729 6.37 6.37 0 0 0 6.372-6.368 6.358 6.358 0 0 0-6.371-6.36"})))))),N.items.length>0?n.createElement("main",null,N.items.map(((e,t)=>{let{title:r,url:a,summary:s,breadcrumbs:c}=e;return n.createElement("article",{key:t,className:w.searchResultItem},n.createElement("h2",{className:w.searchResultItemHeading},n.createElement(f.Z,{to:a,dangerouslySetInnerHTML:{__html:r}})),c.length>0&&n.createElement("nav",{"aria-label":"breadcrumbs"},n.createElement("ul",{className:(0,i.Z)("breadcrumbs",w.searchResultItemPath)},c.map(((e,t)=>n.createElement("li",{key:t,className:"breadcrumbs__item",dangerouslySetInnerHTML:{__html:e}}))))),s&&n.createElement("p",{className:w.searchResultItemSummary,dangerouslySetInnerHTML:{__html:s}}))}))):[v&&!N.loading&&n.createElement("p",{key:"no-results"},n.createElement(j.Z,{id:"theme.SearchPage.noResultsText",description:"The paragraph for empty search result"},"No results were found")),!!N.loading&&n.createElement("div",{key:"spinner",className:w.loadingSpinner})],N.hasMore&&n.createElement("div",{className:w.loader,ref:Q},n.createElement(j.Z,{id:"theme.SearchPage.fetchingNewResults",description:"The paragraph for fetching new search results"},"Fetching new results..."))))}function A(){return n.createElement(b.FG,{className:"search-page-wrapper"},n.createElement(N,null))}}}]); \ No newline at end of file diff --git a/assets/js/1a4e3797.a52196f1.js.LICENSE.txt b/assets/js/1a4e3797.a52196f1.js.LICENSE.txt new file mode 100644 index 0000000..42d81b2 --- /dev/null +++ b/assets/js/1a4e3797.a52196f1.js.LICENSE.txt @@ -0,0 +1 @@ +/*! algoliasearch-lite.umd.js | 4.16.0 | © Algolia, inc. | https://github.com/algolia/algoliasearch-client-javascript */ diff --git a/assets/js/1be78505.228b1716.js b/assets/js/1be78505.228b1716.js new file mode 100644 index 0000000..a163fcd --- /dev/null +++ b/assets/js/1be78505.228b1716.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9514,4972],{9963:(e,t,n)=>{n.r(t),n.d(t,{default:()=>fe});var a=n(7294),l=n(6010),o=n(833),r=n(5281),c=n(3320),i=n(2802),s=n(4477),d=n(1116),m=n(7452),u=n(5999),b=n(2466),p=n(5936);const h={backToTopButton:"backToTopButton_sjWU",backToTopButtonShow:"backToTopButtonShow_xfvO"};function E(){const{shown:e,scrollToTop:t}=function(e){let{threshold:t}=e;const[n,l]=(0,a.useState)(!1),o=(0,a.useRef)(!1),{startScroll:r,cancelScroll:c}=(0,b.Ct)();return(0,b.RF)(((e,n)=>{let{scrollY:a}=e;const r=n?.scrollY;r&&(o.current?o.current=!1:a>=r?(c(),l(!1)):a<t?l(!1):a+window.innerHeight<document.documentElement.scrollHeight&&l(!0))})),(0,p.S)((e=>{e.location.hash&&(o.current=!0,l(!1))})),{shown:n,scrollToTop:()=>r(0)}}({threshold:300});return a.createElement("button",{"aria-label":(0,u.I)({id:"theme.BackToTopButton.buttonAriaLabel",message:"Scroll back to top",description:"The ARIA label for the back to top button"}),className:(0,l.Z)("clean-btn",r.k.common.backToTopButton,h.backToTopButton,e&&h.backToTopButtonShow),type:"button",onClick:t})}var f=n(6550),g=n(7524),k=n(6668),v=n(1327),_=n(7462);function C(e){return a.createElement("svg",(0,_.Z)({width:"20",height:"20","aria-hidden":"true"},e),a.createElement("g",{fill:"#7a7a7a"},a.createElement("path",{d:"M9.992 10.023c0 .2-.062.399-.172.547l-4.996 7.492a.982.982 0 01-.828.454H1c-.55 0-1-.453-1-1 0-.2.059-.403.168-.551l4.629-6.942L.168 3.078A.939.939 0 010 2.528c0-.548.45-.997 1-.997h2.996c.352 0 .649.18.828.45L9.82 9.472c.11.148.172.347.172.55zm0 0"}),a.createElement("path",{d:"M19.98 10.023c0 .2-.058.399-.168.547l-4.996 7.492a.987.987 0 01-.828.454h-3c-.547 0-.996-.453-.996-1 0-.2.059-.403.168-.551l4.625-6.942-4.625-6.945a.939.939 0 01-.168-.55 1 1 0 01.996-.997h3c.348 0 .649.18.828.45l4.996 7.492c.11.148.168.347.168.55zm0 0"})))}const S={collapseSidebarButton:"collapseSidebarButton_PEFL",collapseSidebarButtonIcon:"collapseSidebarButtonIcon_kv0_"};function I(e){let{onClick:t}=e;return a.createElement("button",{type:"button",title:(0,u.I)({id:"theme.docs.sidebar.collapseButtonTitle",message:"Collapse sidebar",description:"The title attribute for collapse button of doc sidebar"}),"aria-label":(0,u.I)({id:"theme.docs.sidebar.collapseButtonAriaLabel",message:"Collapse sidebar",description:"The title attribute for collapse button of doc sidebar"}),className:(0,l.Z)("button button--secondary button--outline",S.collapseSidebarButton),onClick:t},a.createElement(C,{className:S.collapseSidebarButtonIcon}))}var N=n(9689),T=n(902);const x=Symbol("EmptyContext"),Z=a.createContext(x);function B(e){let{children:t}=e;const[n,l]=(0,a.useState)(null),o=(0,a.useMemo)((()=>({expandedItem:n,setExpandedItem:l})),[n]);return a.createElement(Z.Provider,{value:o},t)}var y=n(6043),w=n(8596),L=n(9960),A=n(2389);function M(e){let{categoryLabel:t,onClick:n}=e;return a.createElement("button",{"aria-label":(0,u.I)({id:"theme.DocSidebarItem.toggleCollapsedCategoryAriaLabel",message:"Toggle the collapsible sidebar category '{label}'",description:"The ARIA label to toggle the collapsible sidebar category"},{label:t}),type:"button",className:"clean-btn menu__caret",onClick:n})}function F(e){let{item:t,onItemClick:n,activePath:o,level:c,index:s,...d}=e;const{items:m,label:u,collapsible:b,className:p,href:h}=t,{docs:{sidebar:{autoCollapseCategories:E}}}=(0,k.L)(),f=function(e){const t=(0,A.Z)();return(0,a.useMemo)((()=>e.href?e.href:!t&&e.collapsible?(0,i.Wl)(e):void 0),[e,t])}(t),g=(0,i._F)(t,o),v=(0,w.Mg)(h,o),{collapsed:C,setCollapsed:S}=(0,y.u)({initialState:()=>!!b&&(!g&&t.collapsed)}),{expandedItem:I,setExpandedItem:N}=function(){const e=(0,a.useContext)(Z);if(e===x)throw new T.i6("DocSidebarItemsExpandedStateProvider");return e}(),B=function(e){void 0===e&&(e=!C),N(e?null:s),S(e)};return function(e){let{isActive:t,collapsed:n,updateCollapsed:l}=e;const o=(0,T.D9)(t);(0,a.useEffect)((()=>{t&&!o&&n&&l(!1)}),[t,o,n,l])}({isActive:g,collapsed:C,updateCollapsed:B}),(0,a.useEffect)((()=>{b&&null!=I&&I!==s&&E&&S(!0)}),[b,I,s,S,E]),a.createElement("li",{className:(0,l.Z)(r.k.docs.docSidebarItemCategory,r.k.docs.docSidebarItemCategoryLevel(c),"menu__list-item",{"menu__list-item--collapsed":C},p)},a.createElement("div",{className:(0,l.Z)("menu__list-item-collapsible",{"menu__list-item-collapsible--active":v})},a.createElement(L.Z,(0,_.Z)({className:(0,l.Z)("menu__link",{"menu__link--sublist":b,"menu__link--sublist-caret":!h&&b,"menu__link--active":g}),onClick:b?e=>{n?.(t),h?B(!1):(e.preventDefault(),B())}:()=>{n?.(t)},"aria-current":v?"page":void 0,"aria-expanded":b?!C:void 0,href:b?f??"#":f},d),u),h&&b&&a.createElement(M,{categoryLabel:u,onClick:e=>{e.preventDefault(),B()}})),a.createElement(y.z,{lazy:!0,as:"ul",className:"menu__list",collapsed:C},a.createElement(K,{items:m,tabIndex:C?-1:0,onItemClick:n,activePath:o,level:c+1})))}var H=n(3919),P=n(9471);const W={menuExternalLink:"menuExternalLink_NmtK"};function D(e){let{item:t,onItemClick:n,activePath:o,level:c,index:s,...d}=e;const{href:m,label:u,className:b,autoAddBaseUrl:p}=t,h=(0,i._F)(t,o),E=(0,H.Z)(m);return a.createElement("li",{className:(0,l.Z)(r.k.docs.docSidebarItemLink,r.k.docs.docSidebarItemLinkLevel(c),"menu__list-item",b),key:u},a.createElement(L.Z,(0,_.Z)({className:(0,l.Z)("menu__link",!E&&W.menuExternalLink,{"menu__link--active":h}),autoAddBaseUrl:p,"aria-current":h?"page":void 0,to:m},E&&{onClick:n?()=>n(t):void 0},d),u,!E&&a.createElement(P.Z,null)))}const R={menuHtmlItem:"menuHtmlItem_M9Kj"};function V(e){let{item:t,level:n,index:o}=e;const{value:c,defaultStyle:i,className:s}=t;return a.createElement("li",{className:(0,l.Z)(r.k.docs.docSidebarItemLink,r.k.docs.docSidebarItemLinkLevel(n),i&&[R.menuHtmlItem,"menu__list-item"],s),key:o,dangerouslySetInnerHTML:{__html:c}})}function z(e){let{item:t,...n}=e;switch(t.type){case"category":return a.createElement(F,(0,_.Z)({item:t},n));case"html":return a.createElement(V,(0,_.Z)({item:t},n));default:return a.createElement(D,(0,_.Z)({item:t},n))}}function U(e){let{items:t,...n}=e;return a.createElement(B,null,t.map(((e,t)=>a.createElement(z,(0,_.Z)({key:t,item:e,index:t},n)))))}const K=(0,a.memo)(U),j={menu:"menu_SIkG",menuWithAnnouncementBar:"menuWithAnnouncementBar_GW3s"};function q(e){let{path:t,sidebar:n,className:o}=e;const c=function(){const{isActive:e}=(0,N.nT)(),[t,n]=(0,a.useState)(e);return(0,b.RF)((t=>{let{scrollY:a}=t;e&&n(0===a)}),[e]),e&&t}();return a.createElement("nav",{"aria-label":(0,u.I)({id:"theme.docs.sidebar.navAriaLabel",message:"Docs sidebar",description:"The ARIA label for the sidebar navigation"}),className:(0,l.Z)("menu thin-scrollbar",j.menu,c&&j.menuWithAnnouncementBar,o)},a.createElement("ul",{className:(0,l.Z)(r.k.docs.docSidebarMenu,"menu__list")},a.createElement(K,{items:n,activePath:t,level:1})))}const G="sidebar_njMd",Y="sidebarWithHideableNavbar_wUlq",O="sidebarHidden_VK0M",X="sidebarLogo_isFc";function J(e){let{path:t,sidebar:n,onCollapse:o,isHidden:r}=e;const{navbar:{hideOnScroll:c},docs:{sidebar:{hideable:i}}}=(0,k.L)();return a.createElement("div",{className:(0,l.Z)(G,c&&Y,r&&O)},c&&a.createElement(v.Z,{tabIndex:-1,className:X}),a.createElement(q,{path:t,sidebar:n}),i&&a.createElement(I,{onClick:o}))}const Q=a.memo(J);var $=n(3102),ee=n(3163);const te=e=>{let{sidebar:t,path:n}=e;const o=(0,ee.e)();return a.createElement("ul",{className:(0,l.Z)(r.k.docs.docSidebarMenu,"menu__list")},a.createElement(K,{items:t,activePath:n,onItemClick:e=>{"category"===e.type&&e.href&&o.toggle(),"link"===e.type&&o.toggle()},level:1}))};function ne(e){return a.createElement($.Zo,{component:te,props:e})}const ae=a.memo(ne);function le(e){const t=(0,g.i)(),n="desktop"===t||"ssr"===t,l="mobile"===t;return a.createElement(a.Fragment,null,n&&a.createElement(Q,e),l&&a.createElement(ae,e))}const oe={expandButton:"expandButton_m80_",expandButtonIcon:"expandButtonIcon_BlDH"};function re(e){let{toggleSidebar:t}=e;return a.createElement("div",{className:oe.expandButton,title:(0,u.I)({id:"theme.docs.sidebar.expandButtonTitle",message:"Expand sidebar",description:"The ARIA label and title attribute for expand button of doc sidebar"}),"aria-label":(0,u.I)({id:"theme.docs.sidebar.expandButtonAriaLabel",message:"Expand sidebar",description:"The ARIA label and title attribute for expand button of doc sidebar"}),tabIndex:0,role:"button",onKeyDown:t,onClick:t},a.createElement(C,{className:oe.expandButtonIcon}))}const ce={docSidebarContainer:"docSidebarContainer_b6E3",docSidebarContainerHidden:"docSidebarContainerHidden_b3ry",sidebarViewport:"sidebarViewport_Xe31"};function ie(e){let{children:t}=e;const n=(0,d.V)();return a.createElement(a.Fragment,{key:n?.name??"noSidebar"},t)}function se(e){let{sidebar:t,hiddenSidebarContainer:n,setHiddenSidebarContainer:o}=e;const{pathname:c}=(0,f.TH)(),[i,s]=(0,a.useState)(!1),d=(0,a.useCallback)((()=>{i&&s(!1),o((e=>!e))}),[o,i]);return a.createElement("aside",{className:(0,l.Z)(r.k.docs.docSidebarContainer,ce.docSidebarContainer,n&&ce.docSidebarContainerHidden),onTransitionEnd:e=>{e.currentTarget.classList.contains(ce.docSidebarContainer)&&n&&s(!0)}},a.createElement(ie,null,a.createElement("div",{className:(0,l.Z)(ce.sidebarViewport,i&&ce.sidebarViewportHidden)},a.createElement(le,{sidebar:t,path:c,onCollapse:d,isHidden:i}),i&&a.createElement(re,{toggleSidebar:d}))))}const de={docMainContainer:"docMainContainer_gTbr",docMainContainerEnhanced:"docMainContainerEnhanced_Uz_u",docItemWrapperEnhanced:"docItemWrapperEnhanced_czyv"};function me(e){let{hiddenSidebarContainer:t,children:n}=e;const o=(0,d.V)();return a.createElement("main",{className:(0,l.Z)(de.docMainContainer,(t||!o)&&de.docMainContainerEnhanced)},a.createElement("div",{className:(0,l.Z)("container padding-top--md padding-bottom--lg",de.docItemWrapper,t&&de.docItemWrapperEnhanced)},n))}const ue={docPage:"docPage__5DB",docsWrapper:"docsWrapper_BCFX"};function be(e){let{children:t}=e;const n=(0,d.V)(),[l,o]=(0,a.useState)(!1);return a.createElement(m.Z,{wrapperClassName:ue.docsWrapper},a.createElement(E,null),a.createElement("div",{className:ue.docPage},n&&a.createElement(se,{sidebar:n.items,hiddenSidebarContainer:l,setHiddenSidebarContainer:o}),a.createElement(me,{hiddenSidebarContainer:l},t)))}var pe=n(4972),he=n(197);function Ee(e){const{versionMetadata:t}=e;return a.createElement(a.Fragment,null,a.createElement(he.Z,{version:t.version,tag:(0,c.os)(t.pluginId,t.version)}),a.createElement(o.d,null,t.noIndex&&a.createElement("meta",{name:"robots",content:"noindex, nofollow"})))}function fe(e){const{versionMetadata:t}=e,n=(0,i.hI)(e);if(!n)return a.createElement(pe.default,null);const{docElement:c,sidebarName:m,sidebarItems:u}=n;return a.createElement(a.Fragment,null,a.createElement(Ee,e),a.createElement(o.FG,{className:(0,l.Z)(r.k.wrapper.docsPages,r.k.page.docsDocPage,e.versionMetadata.className)},a.createElement(s.q,{version:t},a.createElement(d.b,{name:m,items:u},a.createElement(be,null,c)))))}},4972:(e,t,n)=>{n.r(t),n.d(t,{default:()=>c});var a=n(7294),l=n(5999),o=n(833),r=n(7452);function c(){return a.createElement(a.Fragment,null,a.createElement(o.d,{title:(0,l.I)({id:"theme.NotFound.title",message:"Page Not Found"})}),a.createElement(r.Z,null,a.createElement("main",{className:"container margin-vert--xl"},a.createElement("div",{className:"row"},a.createElement("div",{className:"col col--6 col--offset-3"},a.createElement("h1",{className:"hero__title"},a.createElement(l.Z,{id:"theme.NotFound.title",description:"The title of the 404 page"},"Page Not Found")),a.createElement("p",null,a.createElement(l.Z,{id:"theme.NotFound.p1",description:"The first paragraph of the 404 page"},"We could not find what you were looking for.")),a.createElement("p",null,a.createElement(l.Z,{id:"theme.NotFound.p2",description:"The 2nd paragraph of the 404 page"},"Please contact the owner of the site that linked you to the original URL and let them know their link is broken.")))))))}},4477:(e,t,n)=>{n.d(t,{E:()=>c,q:()=>r});var a=n(7294),l=n(902);const o=a.createContext(null);function r(e){let{children:t,version:n}=e;return a.createElement(o.Provider,{value:n},t)}function c(){const e=(0,a.useContext)(o);if(null===e)throw new l.i6("DocsVersionProvider");return e}}}]); \ No newline at end of file diff --git a/assets/js/1d4d4e46.53235266.js b/assets/js/1d4d4e46.53235266.js new file mode 100644 index 0000000..612bcad --- /dev/null +++ b/assets/js/1d4d4e46.53235266.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5623],{3905:(a,e,t)=>{t.d(e,{Zo:()=>f,kt:()=>d});var n=t(7294);function i(a,e,t){return e in a?Object.defineProperty(a,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):a[e]=t,a}function s(a,e){var t=Object.keys(a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(a);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(a,e).enumerable}))),t.push.apply(t,n)}return t}function o(a){for(var e=1;e<arguments.length;e++){var t=null!=arguments[e]?arguments[e]:{};e%2?s(Object(t),!0).forEach((function(e){i(a,e,t[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(a,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(e){Object.defineProperty(a,e,Object.getOwnPropertyDescriptor(t,e))}))}return a}function r(a,e){if(null==a)return{};var t,n,i=function(a,e){if(null==a)return{};var t,n,i={},s=Object.keys(a);for(n=0;n<s.length;n++)t=s[n],e.indexOf(t)>=0||(i[t]=a[t]);return i}(a,e);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(a);for(n=0;n<s.length;n++)t=s[n],e.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(a,t)&&(i[t]=a[t])}return i}var p=n.createContext({}),l=function(a){var e=n.useContext(p),t=e;return a&&(t="function"==typeof a?a(e):o(o({},e),a)),t},f=function(a){var e=l(a.components);return n.createElement(p.Provider,{value:e},a.children)},c="mdxType",u={inlineCode:"code",wrapper:function(a){var e=a.children;return n.createElement(n.Fragment,{},e)}},k=n.forwardRef((function(a,e){var t=a.components,i=a.mdxType,s=a.originalType,p=a.parentName,f=r(a,["components","mdxType","originalType","parentName"]),c=l(t),k=i,d=c["".concat(p,".").concat(k)]||c[k]||u[k]||s;return t?n.createElement(d,o(o({ref:e},f),{},{components:t})):n.createElement(d,o({ref:e},f))}));function d(a,e){var t=arguments,i=e&&e.mdxType;if("string"==typeof a||i){var s=t.length,o=new Array(s);o[0]=k;var r={};for(var p in e)hasOwnProperty.call(e,p)&&(r[p]=e[p]);r.originalType=a,r[c]="string"==typeof a?a:i,o[1]=r;for(var l=2;l<s;l++)o[l]=t[l];return n.createElement.apply(null,o)}return n.createElement.apply(null,t)}k.displayName="MDXCreateElement"},7868:(a,e,t)=>{t.r(e),t.d(e,{assets:()=>p,contentTitle:()=>o,default:()=>u,frontMatter:()=>s,metadata:()=>r,toc:()=>l});var n=t(7462),i=(t(7294),t(3905));const s={},o="Using FastAPI to Run FastKafka Application",r={unversionedId:"guides/Guide_32_Using_fastapi_to_run_fastkafka_application",id:"version-0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application",title:"Using FastAPI to Run FastKafka Application",description:"When deploying a FastKafka application, the default approach is to",source:"@site/versioned_docs/version-0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application.md",sourceDirName:"guides",slug:"/guides/Guide_32_Using_fastapi_to_run_fastkafka_application",permalink:"/docs/0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Deploying FastKafka using Docker",permalink:"/docs/0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka"},next:{title:"Benchmarking FastKafka app",permalink:"/docs/0.7.0/guides/Guide_06_Benchmarking_FastKafka"}},p={},l=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"1. Basic FastKafka app",id:"1-basic-fastkafka-app",level:2},{value:"2. Using fastapi_lifespan method",id:"2-using-fastapi_lifespan-method",level:2},{value:"Putting it all together",id:"putting-it-all-together",level:2}],f={toc:l},c="wrapper";function u(a){let{components:e,...t}=a;return(0,i.kt)(c,(0,n.Z)({},f,t,{components:e,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"using-fastapi-to-run-fastkafka-application"},"Using FastAPI to Run FastKafka Application"),(0,i.kt)("p",null,"When deploying a FastKafka application, the default approach is to\nutilize the ",(0,i.kt)("a",{parentName:"p",href:"/docs/cli/fastkafka#fastkafka-run"},(0,i.kt)("inlineCode",{parentName:"a"},"fastkafka run"))," CLI\ncommand. This command allows you to launch your FastKafka application as\na standalone service. However, if you already have a FastAPI application\nin place and wish to run FastKafka application alongside it, you have an\nalternative option."),(0,i.kt)("p",null,"FastKafka provides a method called ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka.fastapi_lifespan")," that\nleverages ",(0,i.kt)("a",{parentName:"p",href:"https://fastapi.tiangolo.com/advanced/events/#lifespan-events"},"FastAPI\u2019s\nlifespan"),"\nfeature. This method allows you to run your FastKafka application\ntogether with your existing FastAPI app, seamlessly integrating their\nfunctionalities. By using the ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka.fastapi_lifespan")," method, you\ncan start the FastKafka application within the same process as the\nFastAPI app."),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka.fastapi_lifespan")," method ensures that both FastAPI and\nFastKafka are initialized and start working simultaneously. This\napproach enables the execution of Kafka-related tasks, such as producing\nand consuming messages, while also handling HTTP requests through\nFastAPI\u2019s routes."),(0,i.kt)("p",null,"By combining FastAPI and FastKafka in this manner, you can build a\ncomprehensive application that harnesses the power of both frameworks.\nWhether you require real-time messaging capabilities or traditional HTTP\nendpoints, this approach allows you to leverage the strengths of FastAPI\nand FastKafka within a single deployment setup."),(0,i.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A basic knowledge of ",(0,i.kt)("inlineCode",{parentName:"li"},"FastKafka")," is needed to proceed with this\nguide. If you are not familiar with ",(0,i.kt)("inlineCode",{parentName:"li"},"FastKafka"),", please go through\nthe ",(0,i.kt)("a",{parentName:"li",href:"/docs#tutorial"},"tutorial")," first."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("inlineCode",{parentName:"li"},"FastKafka")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"FastAPI")," libraries needs to be installed.")),(0,i.kt)("p",null,"This guide will provide a step-by-step explanation, taking you through\neach stage individually, before combining all the components in the\nfinal section for a comprehensive understanding of the process."),(0,i.kt)("h2",{id:"1-basic-fastkafka-app"},"1. Basic FastKafka app"),(0,i.kt)("p",null,"In this step, we will begin by creating a simple FastKafka application."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\nfrom typing import *\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Greetings",\n kafka_brokers=kafka_brokers,\n)\n\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\n\n@kafka_app.consumes()\nasync def on_names(msg: TestMsg):\n await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))\n\n\n@kafka_app.produces()\nasync def to_greetings(greeting: TestMsg) -> TestMsg:\n return greeting\n')),(0,i.kt)("p",null,"In the above example, we consume messages from a topic called ",(0,i.kt)("inlineCode",{parentName:"p"},"names"),',\nwe prepend \u201cHello" to the message, and send it back to another topic\ncalled ',(0,i.kt)("inlineCode",{parentName:"p"},"greetings"),"."),(0,i.kt)("p",null,"We now have a simple ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," app to produce and consume from two\ntopics."),(0,i.kt)("h2",{id:"2-using-fastapi_lifespan-method"},"2. Using fastapi_lifespan method"),(0,i.kt)("p",null,"In this step of the guide, we will explore the integration of a\nFastKafka application with a FastAPI application using the\n",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka.fastapi_lifespan")," method. The ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka.fastapi_lifespan"),"\nmethod is a feature provided by FastKafka, which allows you to\nseamlessly integrate a FastKafka application with a FastAPI application\nby leveraging FastAPI\u2019s lifespan feature."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from fastapi import FastAPI\n\nfastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(kafka_broker_name="localhost"))\n\n\n@fastapi_app.get("/hello")\nasync def hello():\n return {"msg": "hello there"}\n')),(0,i.kt)("p",null,"In the above example, a new instance of the ",(0,i.kt)("inlineCode",{parentName:"p"},"FastAPI")," app is created,\nand when the app is started using uvicorn, it also runs the ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka"),"\napplication concurrently."),(0,i.kt)("h2",{id:"putting-it-all-together"},"Putting it all together"),(0,i.kt)("p",null,"Let\u2019s put the above code together and write it in a file called\n",(0,i.kt)("inlineCode",{parentName:"p"},"fast_apps.py"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "fast_apps.py" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\nfrom typing import *\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Greetings",\n kafka_brokers=kafka_brokers,\n)\n\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\n\n@kafka_app.consumes()\nasync def on_names(msg: TestMsg):\n await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))\n\n\n@kafka_app.produces()\nasync def to_greetings(greeting: TestMsg) -> TestMsg:\n return greeting\n\n\nfrom fastapi import FastAPI\n\nfastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan("localhost"))\n\n@fastapi_app.get("/hello")\nasync def hello():\n return {"msg": "hello there"}\n')),(0,i.kt)("p",null,"Finally, you can run the FastAPI application using a web server of your\nchoice, such as Uvicorn or Hypercorn by running the below command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"uvicorn fast_apps:fastapi_app --host=0.0.0.0 --port=8080\n")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/1efdbea1.365af71b.js b/assets/js/1efdbea1.365af71b.js new file mode 100644 index 0000000..911e083 --- /dev/null +++ b/assets/js/1efdbea1.365af71b.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8914],{3905:(e,t,n)=>{n.d(t,{Zo:()=>h,kt:()=>m});var i=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function a(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?r(Object(n),!0).forEach((function(t){o(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function s(e,t){if(null==e)return{};var n,i,o=function(e,t){if(null==e)return{};var n,i,o={},r=Object.keys(e);for(i=0;i<r.length;i++)n=r[i],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(i=0;i<r.length;i++)n=r[i],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=i.createContext({}),l=function(e){var t=i.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},h=function(e){var t=l(e.components);return i.createElement(c.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},p=i.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,c=e.parentName,h=s(e,["components","mdxType","originalType","parentName"]),d=l(n),p=o,m=d["".concat(c,".").concat(p)]||d[p]||u[p]||r;return n?i.createElement(m,a(a({ref:t},h),{},{components:n})):i.createElement(m,a({ref:t},h))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,a=new Array(r);a[0]=p;var s={};for(var c in t)hasOwnProperty.call(t,c)&&(s[c]=t[c]);s.originalType=e,s[d]="string"==typeof e?e:o,a[1]=s;for(var l=2;l<r;l++)a[l]=n[l];return i.createElement.apply(null,a)}return i.createElement.apply(null,n)}p.displayName="MDXCreateElement"},6129:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>a,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>l});var i=n(7462),o=(n(7294),n(3905));const r={},a=void 0,s={unversionedId:"LICENSE",id:"version-0.6.0/LICENSE",title:"LICENSE",description:"Apache License",source:"@site/versioned_docs/version-0.6.0/LICENSE.md",sourceDirName:".",slug:"/LICENSE",permalink:"/docs/0.6.0/LICENSE",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"run_fastkafka_server_process",permalink:"/docs/0.6.0/cli/run_fastkafka_server_process"},next:{title:"Contributing to fastkafka",permalink:"/docs/0.6.0/CONTRIBUTING"}},c={},l=[],h={toc:l},d="wrapper";function u(e){let{components:t,...n}=e;return(0,o.kt)(d,(0,i.Z)({},h,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Apache License\nVersion 2.0, January 2004\n",(0,o.kt)("a",{parentName:"p",href:"http://www.apache.org/licenses/"},"http://www.apache.org/licenses/")),(0,o.kt)("p",null," TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Definitions."),(0,o.kt)("p",{parentName:"li"},'"License" shall mean the terms and conditions for use, reproduction,\nand distribution as defined by Sections 1 through 9 of this document.'),(0,o.kt)("p",{parentName:"li"},'"Licensor" shall mean the copyright owner or entity authorized by\nthe copyright owner that is granting the License.'),(0,o.kt)("p",{parentName:"li"},'"Legal Entity" shall mean the union of the acting entity and all\nother entities that control, are controlled by, or are under common\ncontrol with that entity. For the purposes of this definition,\n"control" means (i) the power, direct or indirect, to cause the\ndirection or management of such entity, whether by contract or\notherwise, or (ii) ownership of fifty percent (50%) or more of the\noutstanding shares, or (iii) beneficial ownership of such entity.'),(0,o.kt)("p",{parentName:"li"},'"You" (or "Your") shall mean an individual or Legal Entity\nexercising permissions granted by this License.'),(0,o.kt)("p",{parentName:"li"},'"Source" form shall mean the preferred form for making modifications,\nincluding but not limited to software source code, documentation\nsource, and configuration files.'),(0,o.kt)("p",{parentName:"li"},'"Object" form shall mean any form resulting from mechanical\ntransformation or translation of a Source form, including but\nnot limited to compiled object code, generated documentation,\nand conversions to other media types.'),(0,o.kt)("p",{parentName:"li"},'"Work" shall mean the work of authorship, whether in Source or\nObject form, made available under the License, as indicated by a\ncopyright notice that is included in or attached to the work\n(an example is provided in the Appendix below).'),(0,o.kt)("p",{parentName:"li"},'"Derivative Works" shall mean any work, whether in Source or Object\nform, that is based on (or derived from) the Work and for which the\neditorial revisions, annotations, elaborations, or other modifications\nrepresent, as a whole, an original work of authorship. For the purposes\nof this License, Derivative Works shall not include works that remain\nseparable from, or merely link (or bind by name) to the interfaces of,\nthe Work and Derivative Works thereof.'),(0,o.kt)("p",{parentName:"li"},'"Contribution" shall mean any work of authorship, including\nthe original version of the Work and any modifications or additions\nto that Work or Derivative Works thereof, that is intentionally\nsubmitted to Licensor for inclusion in the Work by the copyright owner\nor by an individual or Legal Entity authorized to submit on behalf of\nthe copyright owner. For the purposes of this definition, "submitted"\nmeans any form of electronic, verbal, or written communication sent\nto the Licensor or its representatives, including but not limited to\ncommunication on electronic mailing lists, source code control systems,\nand issue tracking systems that are managed by, or on behalf of, the\nLicensor for the purpose of discussing and improving the Work, but\nexcluding communication that is conspicuously marked or otherwise\ndesignated in writing by the copyright owner as "Not a Contribution."'),(0,o.kt)("p",{parentName:"li"},'"Contributor" shall mean Licensor and any individual or Legal Entity\non behalf of whom a Contribution has been received by Licensor and\nsubsequently incorporated within the Work.')),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Grant of Copyright License. Subject to the terms and conditions of\nthis License, each Contributor hereby grants to You a perpetual,\nworldwide, non-exclusive, no-charge, royalty-free, irrevocable\ncopyright license to reproduce, prepare Derivative Works of,\npublicly display, publicly perform, sublicense, and distribute the\nWork and such Derivative Works in Source or Object form.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Grant of Patent License. Subject to the terms and conditions of\nthis License, each Contributor hereby grants to You a perpetual,\nworldwide, non-exclusive, no-charge, royalty-free, irrevocable\n(except as stated in this section) patent license to make, have made,\nuse, offer to sell, sell, import, and otherwise transfer the Work,\nwhere such license applies only to those patent claims licensable\nby such Contributor that are necessarily infringed by their\nContribution(s) alone or by combination of their Contribution(s)\nwith the Work to which such Contribution(s) was submitted. If You\ninstitute patent litigation against any entity (including a\ncross-claim or counterclaim in a lawsuit) alleging that the Work\nor a Contribution incorporated within the Work constitutes direct\nor contributory patent infringement, then any patent licenses\ngranted to You under this License for that Work shall terminate\nas of the date such litigation is filed.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Redistribution. You may reproduce and distribute copies of the\nWork or Derivative Works thereof in any medium, with or without\nmodifications, and in Source or Object form, provided that You\nmeet the following conditions:"),(0,o.kt)("p",{parentName:"li"},"(a) You must give any other recipients of the Work or\nDerivative Works a copy of this License; and"),(0,o.kt)("p",{parentName:"li"},"(b) You must cause any modified files to carry prominent notices\nstating that You changed the files; and"),(0,o.kt)("p",{parentName:"li"},"(c) You must retain, in the Source form of any Derivative Works\nthat You distribute, all copyright, patent, trademark, and\nattribution notices from the Source form of the Work,\nexcluding those notices that do not pertain to any part of\nthe Derivative Works; and"),(0,o.kt)("p",{parentName:"li"},'(d) If the Work includes a "NOTICE" text file as part of its\ndistribution, then any Derivative Works that You distribute must\ninclude a readable copy of the attribution notices contained\nwithin such NOTICE file, excluding those notices that do not\npertain to any part of the Derivative Works, in at least one\nof the following places: within a NOTICE text file distributed\nas part of the Derivative Works; within the Source form or\ndocumentation, if provided along with the Derivative Works; or,\nwithin a display generated by the Derivative Works, if and\nwherever such third-party notices normally appear. The contents\nof the NOTICE file are for informational purposes only and\ndo not modify the License. You may add Your own attribution\nnotices within Derivative Works that You distribute, alongside\nor as an addendum to the NOTICE text from the Work, provided\nthat such additional attribution notices cannot be construed\nas modifying the License.'),(0,o.kt)("p",{parentName:"li"},"You may add Your own copyright statement to Your modifications and\nmay provide additional or different license terms and conditions\nfor use, reproduction, or distribution of Your modifications, or\nfor any such Derivative Works as a whole, provided Your use,\nreproduction, and distribution of the Work otherwise complies with\nthe conditions stated in this License.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Submission of Contributions. Unless You explicitly state otherwise,\nany Contribution intentionally submitted for inclusion in the Work\nby You to the Licensor shall be under the terms and conditions of\nthis License, without any additional terms or conditions.\nNotwithstanding the above, nothing herein shall supersede or modify\nthe terms of any separate license agreement you may have executed\nwith Licensor regarding such Contributions.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Trademarks. This License does not grant permission to use the trade\nnames, trademarks, service marks, or product names of the Licensor,\nexcept as required for reasonable and customary use in describing the\norigin of the Work and reproducing the content of the NOTICE file.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},'Disclaimer of Warranty. Unless required by applicable law or\nagreed to in writing, Licensor provides the Work (and each\nContributor provides its Contributions) on an "AS IS" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\nimplied, including, without limitation, any warranties or conditions\nof TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\nPARTICULAR PURPOSE. You are solely responsible for determining the\nappropriateness of using or redistributing the Work and assume any\nrisks associated with Your exercise of permissions under this License.')),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Limitation of Liability. In no event and under no legal theory,\nwhether in tort (including negligence), contract, or otherwise,\nunless required by applicable law (such as deliberate and grossly\nnegligent acts) or agreed to in writing, shall any Contributor be\nliable to You for damages, including any direct, indirect, special,\nincidental, or consequential damages of any character arising as a\nresult of this License or out of the use or inability to use the\nWork (including but not limited to damages for loss of goodwill,\nwork stoppage, computer failure or malfunction, or any and all\nother commercial damages or losses), even if such Contributor\nhas been advised of the possibility of such damages.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Accepting Warranty or Additional Liability. While redistributing\nthe Work or Derivative Works thereof, You may choose to offer,\nand charge a fee for, acceptance of support, warranty, indemnity,\nor other liability obligations and/or rights consistent with this\nLicense. However, in accepting such obligations, You may act only\non Your own behalf and on Your sole responsibility, not on behalf\nof any other Contributor, and only if You agree to indemnify,\ndefend, and hold each Contributor harmless for any liability\nincurred by, or claims asserted against, such Contributor by reason\nof your accepting any such warranty or additional liability."),(0,o.kt)("p",{parentName:"li"},"END OF TERMS AND CONDITIONS"),(0,o.kt)("p",{parentName:"li"},"APPENDIX: How to apply the Apache License to your work."),(0,o.kt)("p",{parentName:"li"},' To apply the Apache License to your work, attach the following\nboilerplate notice, with the fields enclosed by brackets "[]"\nreplaced with your own identifying information. (Don\'t include\nthe brackets!) The text should be enclosed in the appropriate\ncomment syntax for the file format. We also recommend that a\nfile or class name and description of purpose be included on the\nsame "printed page" as the copyright notice for easier\nidentification within third-party archives.'),(0,o.kt)("p",{parentName:"li"},"Copyright ","[yyyy][name of copyright owner]"),(0,o.kt)("p",{parentName:"li"},'Licensed under the Apache License, Version 2.0 (the "License");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at'),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre"},"http://www.apache.org/licenses/LICENSE-2.0\n")),(0,o.kt)("p",{parentName:"li"},'Unless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an "AS IS" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.'))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/1f0a946f.fcc5a3d2.js b/assets/js/1f0a946f.fcc5a3d2.js new file mode 100644 index 0000000..e8ca16c --- /dev/null +++ b/assets/js/1f0a946f.fcc5a3d2.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3927],{3905:(a,e,t)=>{t.d(e,{Zo:()=>f,kt:()=>d});var n=t(7294);function i(a,e,t){return e in a?Object.defineProperty(a,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):a[e]=t,a}function s(a,e){var t=Object.keys(a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(a);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(a,e).enumerable}))),t.push.apply(t,n)}return t}function o(a){for(var e=1;e<arguments.length;e++){var t=null!=arguments[e]?arguments[e]:{};e%2?s(Object(t),!0).forEach((function(e){i(a,e,t[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(a,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(e){Object.defineProperty(a,e,Object.getOwnPropertyDescriptor(t,e))}))}return a}function r(a,e){if(null==a)return{};var t,n,i=function(a,e){if(null==a)return{};var t,n,i={},s=Object.keys(a);for(n=0;n<s.length;n++)t=s[n],e.indexOf(t)>=0||(i[t]=a[t]);return i}(a,e);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(a);for(n=0;n<s.length;n++)t=s[n],e.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(a,t)&&(i[t]=a[t])}return i}var p=n.createContext({}),l=function(a){var e=n.useContext(p),t=e;return a&&(t="function"==typeof a?a(e):o(o({},e),a)),t},f=function(a){var e=l(a.components);return n.createElement(p.Provider,{value:e},a.children)},k="mdxType",c={inlineCode:"code",wrapper:function(a){var e=a.children;return n.createElement(n.Fragment,{},e)}},u=n.forwardRef((function(a,e){var t=a.components,i=a.mdxType,s=a.originalType,p=a.parentName,f=r(a,["components","mdxType","originalType","parentName"]),k=l(t),u=i,d=k["".concat(p,".").concat(u)]||k[u]||c[u]||s;return t?n.createElement(d,o(o({ref:e},f),{},{components:t})):n.createElement(d,o({ref:e},f))}));function d(a,e){var t=arguments,i=e&&e.mdxType;if("string"==typeof a||i){var s=t.length,o=new Array(s);o[0]=u;var r={};for(var p in e)hasOwnProperty.call(e,p)&&(r[p]=e[p]);r.originalType=a,r[k]="string"==typeof a?a:i,o[1]=r;for(var l=2;l<s;l++)o[l]=t[l];return n.createElement.apply(null,o)}return n.createElement.apply(null,t)}u.displayName="MDXCreateElement"},8599:(a,e,t)=>{t.r(e),t.d(e,{assets:()=>p,contentTitle:()=>o,default:()=>c,frontMatter:()=>s,metadata:()=>r,toc:()=>l});var n=t(7462),i=(t(7294),t(3905));const s={},o="Using FastAPI to Run FastKafka Application",r={unversionedId:"guides/Guide_32_Using_fastapi_to_run_fastkafka_application",id:"version-0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application",title:"Using FastAPI to Run FastKafka Application",description:"When deploying a FastKafka application, the default approach is to",source:"@site/versioned_docs/version-0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application.md",sourceDirName:"guides",slug:"/guides/Guide_32_Using_fastapi_to_run_fastkafka_application",permalink:"/docs/0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Deploying FastKafka using Docker",permalink:"/docs/0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka"},next:{title:"Benchmarking FastKafka app",permalink:"/docs/0.7.1/guides/Guide_06_Benchmarking_FastKafka"}},p={},l=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"1. Basic FastKafka app",id:"1-basic-fastkafka-app",level:2},{value:"2. Using fastapi_lifespan method",id:"2-using-fastapi_lifespan-method",level:2},{value:"Putting it all together",id:"putting-it-all-together",level:2}],f={toc:l},k="wrapper";function c(a){let{components:e,...t}=a;return(0,i.kt)(k,(0,n.Z)({},f,t,{components:e,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"using-fastapi-to-run-fastkafka-application"},"Using FastAPI to Run FastKafka Application"),(0,i.kt)("p",null,"When deploying a FastKafka application, the default approach is to\nutilize the ",(0,i.kt)("a",{parentName:"p",href:"/docs/cli/fastkafka#fastkafka-run"},(0,i.kt)("inlineCode",{parentName:"a"},"fastkafka run"))," CLI\ncommand. This command allows you to launch your FastKafka application as\na standalone service. However, if you already have a FastAPI application\nin place and wish to run FastKafka application alongside it, you have an\nalternative option."),(0,i.kt)("p",null,"FastKafka provides a method called\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nthat leverages ",(0,i.kt)("a",{parentName:"p",href:"https://fastapi.tiangolo.com/advanced/events/#lifespan-events"},"FastAPI\u2019s\nlifespan"),"\nfeature. This method allows you to run your FastKafka application\ntogether with your existing FastAPI app, seamlessly integrating their\nfunctionalities. By using the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nmethod, you can start the FastKafka application within the same process\nas the FastAPI app."),(0,i.kt)("p",null,"The\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nmethod ensures that both FastAPI and FastKafka are initialized and start\nworking simultaneously. This approach enables the execution of\nKafka-related tasks, such as producing and consuming messages, while\nalso handling HTTP requests through FastAPI\u2019s routes."),(0,i.kt)("p",null,"By combining FastAPI and FastKafka in this manner, you can build a\ncomprehensive application that harnesses the power of both frameworks.\nWhether you require real-time messaging capabilities or traditional HTTP\nendpoints, this approach allows you to leverage the strengths of FastAPI\nand FastKafka within a single deployment setup."),(0,i.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A basic knowledge of\n",(0,i.kt)("a",{parentName:"li",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nis needed to proceed with this guide. If you are not familiar with\n",(0,i.kt)("a",{parentName:"li",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),",\nplease go through the ",(0,i.kt)("a",{parentName:"li",href:"/docs#tutorial"},"tutorial")," first."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("a",{parentName:"li",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nand ",(0,i.kt)("inlineCode",{parentName:"li"},"FastAPI")," libraries needs to be installed.")),(0,i.kt)("p",null,"This guide will provide a step-by-step explanation, taking you through\neach stage individually, before combining all the components in the\nfinal section for a comprehensive understanding of the process."),(0,i.kt)("h2",{id:"1-basic-fastkafka-app"},"1. Basic FastKafka app"),(0,i.kt)("p",null,"In this step, we will begin by creating a simple FastKafka application."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\nfrom typing import *\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Greetings",\n kafka_brokers=kafka_brokers,\n)\n\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\n\n@kafka_app.consumes()\nasync def on_names(msg: TestMsg):\n await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))\n\n\n@kafka_app.produces()\nasync def to_greetings(greeting: TestMsg) -> TestMsg:\n return greeting\n')),(0,i.kt)("p",null,"In the above example, we consume messages from a topic called ",(0,i.kt)("inlineCode",{parentName:"p"},"names"),',\nwe prepend \u201cHello" to the message, and send it back to another topic\ncalled ',(0,i.kt)("inlineCode",{parentName:"p"},"greetings"),"."),(0,i.kt)("p",null,"We now have a simple\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp to produce and consume from two topics."),(0,i.kt)("h2",{id:"2-using-fastapi_lifespan-method"},"2. Using fastapi_lifespan method"),(0,i.kt)("p",null,"In this step of the guide, we will explore the integration of a\nFastKafka application with a FastAPI application using the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nmethod. The\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nmethod is a feature provided by FastKafka, which allows you to\nseamlessly integrate a FastKafka application with a FastAPI application\nby leveraging FastAPI\u2019s lifespan feature."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from fastapi import FastAPI\n\nfastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(kafka_broker_name="localhost"))\n\n\n@fastapi_app.get("/hello")\nasync def hello():\n return {"msg": "hello there"}\n')),(0,i.kt)("p",null,"In the above example, a new instance of the ",(0,i.kt)("inlineCode",{parentName:"p"},"FastAPI")," app is created,\nand when the app is started using uvicorn, it also runs the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napplication concurrently."),(0,i.kt)("h2",{id:"putting-it-all-together"},"Putting it all together"),(0,i.kt)("p",null,"Let\u2019s put the above code together and write it in a file called\n",(0,i.kt)("inlineCode",{parentName:"p"},"fast_apps.py"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "fast_apps.py" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\nfrom typing import *\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Greetings",\n kafka_brokers=kafka_brokers,\n)\n\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\n\n@kafka_app.consumes()\nasync def on_names(msg: TestMsg):\n await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))\n\n\n@kafka_app.produces()\nasync def to_greetings(greeting: TestMsg) -> TestMsg:\n return greeting\n\n\nfrom fastapi import FastAPI\n\nfastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan("localhost"))\n\n@fastapi_app.get("/hello")\nasync def hello():\n return {"msg": "hello there"}\n')),(0,i.kt)("p",null,"Finally, you can run the FastAPI application using a web server of your\nchoice, such as Uvicorn or Hypercorn by running the below command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"uvicorn fast_apps:fastapi_app --host=0.0.0.0 --port=8080\n")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/1f1765ab.bbeac427.js b/assets/js/1f1765ab.bbeac427.js new file mode 100644 index 0000000..9bd09b4 --- /dev/null +++ b/assets/js/1f1765ab.bbeac427.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3645],{3905:(e,n,t)=>{t.d(n,{Zo:()=>l,kt:()=>k});var r=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function c(e){for(var n=1;n<arguments.length;n++){var t=null!=arguments[n]?arguments[n]:{};n%2?a(Object(t),!0).forEach((function(n){o(e,n,t[n])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):a(Object(t)).forEach((function(n){Object.defineProperty(e,n,Object.getOwnPropertyDescriptor(t,n))}))}return e}function i(e,n){if(null==e)return{};var t,r,o=function(e,n){if(null==e)return{};var t,r,o={},a=Object.keys(e);for(r=0;r<a.length;r++)t=a[r],n.indexOf(t)>=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r<a.length;r++)t=a[r],n.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var s=r.createContext({}),d=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):c(c({},n),e)),t},l=function(e){var n=d(e.components);return r.createElement(s.Provider,{value:n},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},u=r.forwardRef((function(e,n){var t=e.components,o=e.mdxType,a=e.originalType,s=e.parentName,l=i(e,["components","mdxType","originalType","parentName"]),p=d(t),u=o,k=p["".concat(s,".").concat(u)]||p[u]||f[u]||a;return t?r.createElement(k,c(c({ref:n},l),{},{components:t})):r.createElement(k,c({ref:n},l))}));function k(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var a=t.length,c=new Array(a);c[0]=u;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[p]="string"==typeof e?e:o,c[1]=i;for(var d=2;d<a;d++)c[d]=t[d];return r.createElement.apply(null,c)}return r.createElement.apply(null,t)}u.displayName="MDXCreateElement"},6409:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>c,default:()=>f,frontMatter:()=>a,metadata:()=>i,toc:()=>d});var r=t(7462),o=(t(7294),t(3905));const a={},c=void 0,i={unversionedId:"api/fastkafka/encoder/json_encoder",id:"version-0.7.0/api/fastkafka/encoder/json_encoder",title:"json_encoder",description:"fastkafka.encoder.jsonencoder {fastkafka.encoder.jsonencoder}",source:"@site/versioned_docs/version-0.7.0/api/fastkafka/encoder/json_encoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/json_encoder",permalink:"/docs/0.7.0/api/fastkafka/encoder/json_encoder",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"json_decoder",permalink:"/docs/0.7.0/api/fastkafka/encoder/json_decoder"},next:{title:"DynamicTaskExecutor",permalink:"/docs/0.7.0/api/fastkafka/executors/DynamicTaskExecutor"}},s={},d=[{value:"<code>fastkafka.encoder.json_encoder</code>",id:"fastkafka.encoder.json_encoder",level:2},{value:"<code>json_encoder</code>",id:"json_encoder",level:3}],l={toc:d},p="wrapper";function f(e){let{components:n,...t}=e;return(0,o.kt)(p,(0,r.Z)({},l,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"fastkafka.encoder.json_encoder"},(0,o.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.json_encoder")),(0,o.kt)("h3",{id:"json_encoder"},(0,o.kt)("inlineCode",{parentName:"h3"},"json_encoder")),(0,o.kt)("p",null,(0,o.kt)("inlineCode",{parentName:"p"},"def json_encoder(msg: pydantic.main.BaseModel) -> bytes")),(0,o.kt)("p",null,"Encoder to encode pydantic instances to json string"),(0,o.kt)("p",null,(0,o.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"msg"),": An instance of pydantic basemodel")),(0,o.kt)("p",null,(0,o.kt)("strong",{parentName:"p"},"Returns"),":"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Json string in bytes which is encoded from pydantic basemodel")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/205a719b.fba55762.js b/assets/js/205a719b.fba55762.js new file mode 100644 index 0000000..859569c --- /dev/null +++ b/assets/js/205a719b.fba55762.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5414],{3905:(e,t,a)=>{a.d(t,{Zo:()=>d,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),p=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},d=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),c=p(a),u=r,f=c["".concat(s,".").concat(u)]||c[u]||k[u]||o;return a?n.createElement(f,i(i({ref:t},d),{},{components:a})):n.createElement(f,i({ref:t},d))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=u;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:r,i[1]=l;for(var p=2;p<o;p++)i[p]=a[p];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},593:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>k,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={},i=void 0,l={unversionedId:"api/fastkafka/testing/LocalRedpandaBroker",id:"version-0.7.1/api/fastkafka/testing/LocalRedpandaBroker",title:"LocalRedpandaBroker",description:"fastkafka.testing.LocalRedpandaBroker {fastkafka.testing.LocalRedpandaBroker}",source:"@site/versioned_docs/version-0.7.1/api/fastkafka/testing/LocalRedpandaBroker.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/LocalRedpandaBroker",permalink:"/docs/0.7.1/api/fastkafka/testing/LocalRedpandaBroker",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"ApacheKafkaBroker",permalink:"/docs/0.7.1/api/fastkafka/testing/ApacheKafkaBroker"},next:{title:"Tester",permalink:"/docs/0.7.1/api/fastkafka/testing/Tester"}},s={},p=[{value:"<code>fastkafka.testing.LocalRedpandaBroker</code>",id:"fastkafka.testing.LocalRedpandaBroker",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>get_service_config_string</code>",id:"get_service_config_string",level:3},{value:"<code>start</code>",id:"start",level:3},{value:"<code>stop</code>",id:"stop",level:3}],d={toc:p},c="wrapper";function k(e){let{components:t,...a}=e;return(0,r.kt)(c,(0,n.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.testing.LocalRedpandaBroker"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.testing.LocalRedpandaBroker")),(0,r.kt)("p",null,"LocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing."),(0,r.kt)("h3",{id:"init"},(0,r.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug', **kwargs: Dict[str, Any]) -> None")),(0,r.kt)("p",null,"Initialises the LocalRedpandaBroker object"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"tag"),": Tag of Redpanda image to use to start container"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"seastar_core"),": Core(s) to use byt Seastar (the framework Redpanda uses under the hood)"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"memory"),": The amount of memory to make available to Redpanda"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"mode"),": Mode to use to load configuration properties in container"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"default_log_level"),": Log levels to use for Redpanda")),(0,r.kt)("h3",{id:"get_service_config_string"},(0,r.kt)("inlineCode",{parentName:"h3"},"get_service_config_string")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def get_service_config_string(self, service: str, data_dir: pathlib.Path) -> str")),(0,r.kt)("p",null,"Generates a configuration for a service"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"service"),': "redpanda", defines which service to get config string for')),(0,r.kt)("h3",{id:"start"},(0,r.kt)("inlineCode",{parentName:"h3"},"start")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def start(self: fastkafka.testing.LocalRedpandaBroker) -> str")),(0,r.kt)("p",null,"Starts a local redpanda broker instance synchronously"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Redpanda broker bootstrap server address in string format: add:port")),(0,r.kt)("h3",{id:"stop"},(0,r.kt)("inlineCode",{parentName:"h3"},"stop")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def stop(self: fastkafka.testing.LocalRedpandaBroker) -> None")),(0,r.kt)("p",null,"Stops a local redpanda broker instance synchronously"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"None")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/20f8c1fd.f55a6b8d.js b/assets/js/20f8c1fd.f55a6b8d.js new file mode 100644 index 0000000..c8dddd6 --- /dev/null +++ b/assets/js/20f8c1fd.f55a6b8d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8215],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function f(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),l=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},c=function(e){var t=l(e.components);return n.createElement(s.Provider,{value:t},e.children)},p="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,c=f(e,["components","mdxType","originalType","parentName"]),p=l(a),u=r,d=p["".concat(s,".").concat(u)]||p[u]||k[u]||o;return a?n.createElement(d,i(i({ref:t},c),{},{components:a})):n.createElement(d,i({ref:t},c))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=u;var f={};for(var s in t)hasOwnProperty.call(t,s)&&(f[s]=t[s]);f.originalType=e,f[p]="string"==typeof e?e:r,i[1]=f;for(var l=2;l<o;l++)i[l]=a[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},7918:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>k,frontMatter:()=>o,metadata:()=>f,toc:()=>l});var n=a(7462),r=(a(7294),a(3905));const o={},i=void 0,f={unversionedId:"api/fastkafka/KafkaEvent",id:"version-0.7.1/api/fastkafka/KafkaEvent",title:"KafkaEvent",description:"fastkafka.KafkaEvent {fastkafka.KafkaEvent}",source:"@site/versioned_docs/version-0.7.1/api/fastkafka/KafkaEvent.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/KafkaEvent",permalink:"/docs/0.7.1/api/fastkafka/KafkaEvent",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"FastKafka",permalink:"/docs/0.7.1/api/fastkafka/"},next:{title:"AvroBase",permalink:"/docs/0.7.1/api/fastkafka/encoder/AvroBase"}},s={},l=[{value:"<code>fastkafka.KafkaEvent</code>",id:"fastkafka.KafkaEvent",level:2}],c={toc:l},p="wrapper";function k(e){let{components:t,...a}=e;return(0,r.kt)(p,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.KafkaEvent")),(0,r.kt)("p",null,"A generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"message"),": The message contained in the Kafka event, can be of type pydantic.BaseModel."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"key"),": The optional key used to identify the Kafka event.")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/222e7c49.9c8fb23c.js b/assets/js/222e7c49.9c8fb23c.js new file mode 100644 index 0000000..b247a94 --- /dev/null +++ b/assets/js/222e7c49.9c8fb23c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1733],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>m});var n=t(7294);function o(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function s(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function i(e){for(var a=1;a<arguments.length;a++){var t=null!=arguments[a]?arguments[a]:{};a%2?s(Object(t),!0).forEach((function(a){o(e,a,t[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(t,a))}))}return e}function r(e,a){if(null==e)return{};var t,n,o=function(e,a){if(null==e)return{};var t,n,o={},s=Object.keys(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||(o[t]=e[t]);return o}(e,a);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var p=n.createContext({}),l=function(e){var a=n.useContext(p),t=a;return e&&(t="function"==typeof e?e(a):i(i({},a),e)),t},c=function(e){var a=l(e.components);return n.createElement(p.Provider,{value:a},e.children)},k="mdxType",d={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},f=n.forwardRef((function(e,a){var t=e.components,o=e.mdxType,s=e.originalType,p=e.parentName,c=r(e,["components","mdxType","originalType","parentName"]),k=l(t),f=o,m=k["".concat(p,".").concat(f)]||k[f]||d[f]||s;return t?n.createElement(m,i(i({ref:a},c),{},{components:t})):n.createElement(m,i({ref:a},c))}));function m(e,a){var t=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var s=t.length,i=new Array(s);i[0]=f;var r={};for(var p in a)hasOwnProperty.call(a,p)&&(r[p]=a[p]);r.originalType=e,r[k]="string"==typeof e?e:o,i[1]=r;for(var l=2;l<s;l++)i[l]=t[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,t)}f.displayName="MDXCreateElement"},71:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>p,contentTitle:()=>i,default:()=>d,frontMatter:()=>s,metadata:()=>r,toc:()=>l});var n=t(7462),o=(t(7294),t(3905));const s={},i="FastKafka tutorial",r={unversionedId:"guides/Guide_00_FastKafka_Demo",id:"version-0.5.0/guides/Guide_00_FastKafka_Demo",title:"FastKafka tutorial",description:"FastKafka is a powerful and easy-to-use Python",source:"@site/versioned_docs/version-0.5.0/guides/Guide_00_FastKafka_Demo.md",sourceDirName:"guides",slug:"/guides/Guide_00_FastKafka_Demo",permalink:"/docs/0.5.0/guides/Guide_00_FastKafka_Demo",draft:!1,tags:[],version:"0.5.0",frontMatter:{}},p={},l=[{value:"Install",id:"install",level:2},{value:"Running in Colab",id:"running-in-colab",level:2},{value:"Writing server code",id:"writing-server-code",level:2},{value:"Preparing the demo model",id:"preparing-the-demo-model",level:3},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"Testing the service",id:"testing-the-service",level:2},{value:"Recap",id:"recap",level:3},{value:"Running the service",id:"running-the-service",level:2},{value:"Documentation",id:"documentation",level:2}],c={toc:l},k="wrapper";function d(e){let{components:a,...t}=e;return(0,o.kt)(k,(0,n.Z)({},c,t,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"fastkafka-tutorial"},"FastKafka tutorial"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use Python\nlibrary for building asynchronous services that interact with Kafka\ntopics. Built on top of ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,o.kt)("h2",{id:"install"},"Install"),(0,o.kt)("p",null,"FastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install it with ",(0,o.kt)("inlineCode",{parentName:"p"},"pip")," as usual:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"try:\n import fastkafka\nexcept:\n ! pip install fastkafka\n")),(0,o.kt)("h2",{id:"running-in-colab"},"Running in Colab"),(0,o.kt)("p",null,"You can start this interactive tutorial in Google Colab by clicking the\nbutton below:"),(0,o.kt)("a",{href:"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb",target:"_blank"},(0,o.kt)("img",{src:"https://colab.research.google.com/assets/colab-badge.svg",alt:"Open In Colab"})),(0,o.kt)("h2",{id:"writing-server-code"},"Writing server code"),(0,o.kt)("p",null,"Here is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic."),(0,o.kt)("h3",{id:"preparing-the-demo-model"},"Preparing the demo model"),(0,o.kt)("p",null,"First we will prepare our model using the Iris dataset so that we can\ndemonstrate the preditions using FastKafka. The following call downloads\nthe dataset and trains the model."),(0,o.kt)("p",null,"We will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n')),(0,o.kt)("h3",{id:"messages"},"Messages"),(0,o.kt)("p",null,"FastKafka uses ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,o.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,o.kt)("p",null,"This example defines two message classes for use in a FastKafka\napplication:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," class is used to represent input data for a\npredictive model. It has four fields of type\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/types/#constrained-types"},(0,o.kt)("inlineCode",{parentName:"a"},"NonNegativeFloat")),",\nwhich is a subclass of float that only allows non-negative floating\npoint values.")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," class is used to represent the output of the\npredictive model. It has a single field ",(0,o.kt)("inlineCode",{parentName:"p"},"species")," of type string\nrepresenting the predicted species."))),(0,o.kt)("p",null,"These message classes will be used to parse and validate incoming data\nin Kafka consumers and producers."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,o.kt)("h3",{id:"application"},"Application"),(0,o.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,o.kt)("p",null,"It starts by defining a dictionary called ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,o.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,o.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\ngenerating the documentation only and it is not being checked by the\nactual server."),(0,o.kt)("p",null,"Next, an object of the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum set of arguments:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generation of documentation")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n')),(0,o.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,o.kt)("p",null,"FastKafka provides convenient function decorators ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,o.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,o.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,o.kt)("p",null,"This following example shows how to use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message class. Specifying the\ntype of the single argument is instructing the Pydantic to use\n",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData.parse_raw()")," on the consumed message before passing it\nto the user defined function ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),' function,\nwhich specifies that this function should produce a message to the\n\u201cpredictions" Kafka topic whenever it is called. The ',(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),"\nfunction takes a single integer argument ",(0,o.kt)("inlineCode",{parentName:"p"},"species_class")," representing\none of three possible strign values predicted by the mdoel. It creates\na new ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," message using this value and then returns it.\nThe framework will call the ",(0,o.kt)("inlineCode",{parentName:"p"},'IrisPrediction.json().encode("utf-8")'),"\nfunction on the returned value and produce it to the specified topic."))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h2",{id:"testing-the-service"},"Testing the service"),(0,o.kt)("p",null,"The service can be tested using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstances which internally starts Kafka broker and zookeeper."),(0,o.kt)("p",null,"Before running tests, we have to install Java runtime and Apache Kafka\nlocally. To simplify the process, we provide the following convenience\ncommand:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka testing install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nmsg = IrisInputData(\n sepal_length=0.1,\n sepal_width=0.2,\n petal_length=0.3,\n petal_width=0.4,\n)\n\n# Start Tester app and create local Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n # Send IrisInputData message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with IrisPrediction in predictions topic\n await tester.awaited_mocks.on_predictions.assert_awaited_with(\n IrisPrediction(species="setosa"), timeout=2\n )\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,o.kt)("h3",{id:"recap"},"Recap"),(0,o.kt)("p",null,"We have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic and produce the predictions to ",(0,o.kt)("inlineCode",{parentName:"p"},"predictions")," topic."),(0,o.kt)("p",null,"To test the app we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Created the app")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Started our Tester class which mirrors the developed app topics for\ntesting purpuoses")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Sent IrisInputData message to ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Asserted and checked that the developed iris classification service\nhas reacted to IrisInputData message"))),(0,o.kt)("h2",{id:"running-the-service"},"Running the service"),(0,o.kt)("p",null,"The service can be started using builtin ",(0,o.kt)("inlineCode",{parentName:"p"},"faskafka run")," CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file ",(0,o.kt)("inlineCode",{parentName:"p"},'"application.py"')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("p",null,"To run the service, you will need a running Kafka broker on localhost as\nspecified in the ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")," parameter above. We can start the Kafka\nbroker locally using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker/#fastkafka.testing.ApacheKafkaBroker"},(0,o.kt)("inlineCode",{parentName:"a"},"ApacheKafkaBroker")),".\nNotice that the same happens automatically in the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\nas shown above."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n'127.0.0.1:9092'\n")),(0,o.kt)("p",null,"Then, we start the FastKafka service by running the following command in\nthe folder where the ",(0,o.kt)("inlineCode",{parentName:"p"},"application.py")," file is located:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n")),(0,o.kt)("p",null,"In the above command, we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--num-workers")," option to specify how many\nworkers to launch and we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--kafka-broker")," option to specify which\nkafka broker configuration to use from earlier specified ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[1200654]: [ERROR] aiokafka: Unable to update metadata from [0]\n[1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[1200656]: [ERROR] aiokafka: Unable to update metadata from [0]\n^C\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...\n")),(0,o.kt)("p",null,"You need to interupt running of the cell above by selecting\n",(0,o.kt)("inlineCode",{parentName:"p"},"Runtime->Interupt execution")," on the toolbar above."),(0,o.kt)("p",null,"Finally, we can stop the local Kafka Broker:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n")),(0,o.kt)("h2",{id:"documentation"},"Documentation"),(0,o.kt)("p",null,"The kafka app comes with builtin documentation generation using\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/tools/generator"},"AsyncApi HTML generator"),"."),(0,o.kt)("p",null,"When running in Colab, we need to update Node.js first:"),(0,o.kt)("p",null,"We need to install all dependancies for the generator using the\nfollowing command line:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n")),(0,o.kt)("p",null,"To generate the documentation programatically you just need to call the\nfolloving command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs generate application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n")),(0,o.kt)("p",null,". This will generate the ",(0,o.kt)("em",{parentName:"p"},"asyncapi")," folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"ls -l asyncapi\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"total 8\ndrwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs\ndrwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec\n")),(0,o.kt)("p",null,"In docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka docs serve"),"\nCLI command (more on that in our guides)."),(0,o.kt)("p",null,"In spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application."),(0,o.kt)("p",null,"We can locally preview the generated documentation by running the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs serve application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n\nServing documentation on http://127.0.0.1:8000\n^C\nInterupting serving of documentation and cleaning up...\n")),(0,o.kt)("p",null,"From the parameters passed to the application constructor, we get the\ndocumentation bellow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n bootstrap_servers="localhost:9092",\n)\n')),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png",alt:"Kafka_servers"})),(0,o.kt)("p",null,"The following documentation snippet are for the consumer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png",alt:"Kafka_consumer"})),(0,o.kt)("p",null,"The following documentation snippet are for the producer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png",alt:"Kafka_producer"})),(0,o.kt)("p",null,"Finally, all messages as defined as subclasses of ",(0,o.kt)("em",{parentName:"p"},"BaseModel")," are\ndocumented as well:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png",alt:"Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)"})))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/232ab88c.c37b9f14.js b/assets/js/232ab88c.c37b9f14.js new file mode 100644 index 0000000..f8ec960 --- /dev/null +++ b/assets/js/232ab88c.c37b9f14.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7100],{3905:(e,a,n)=>{n.d(a,{Zo:()=>k,kt:()=>m});var t=n(7294);function o(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function r(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function s(e){for(var a=1;a<arguments.length;a++){var n=null!=arguments[a]?arguments[a]:{};a%2?r(Object(n),!0).forEach((function(a){o(e,a,n[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(n,a))}))}return e}function l(e,a){if(null==e)return{};var n,t,o=function(e,a){if(null==e)return{};var n,t,o={},r=Object.keys(e);for(t=0;t<r.length;t++)n=r[t],a.indexOf(n)>=0||(o[n]=e[n]);return o}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(t=0;t<r.length;t++)n=r[t],a.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var i=t.createContext({}),p=function(e){var a=t.useContext(i),n=a;return e&&(n="function"==typeof e?e(a):s(s({},a),e)),n},k=function(e){var a=p(e.components);return t.createElement(i.Provider,{value:a},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},f=t.forwardRef((function(e,a){var n=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,k=l(e,["components","mdxType","originalType","parentName"]),c=p(n),f=o,m=c["".concat(i,".").concat(f)]||c[f]||u[f]||r;return n?t.createElement(m,s(s({ref:a},k),{},{components:n})):t.createElement(m,s({ref:a},k))}));function m(e,a){var n=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var r=n.length,s=new Array(r);s[0]=f;var l={};for(var i in a)hasOwnProperty.call(a,i)&&(l[i]=a[i]);l.originalType=e,l[c]="string"==typeof e?e:o,s[1]=l;for(var p=2;p<r;p++)s[p]=n[p];return t.createElement.apply(null,s)}return t.createElement.apply(null,n)}f.displayName="MDXCreateElement"},3097:(e,a,n)=>{n.r(a),n.d(a,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var t=n(7462),o=(n(7294),n(3905));const r={},s="First Steps",l={unversionedId:"guides/Guide_02_First_Steps",id:"version-0.8.0/guides/Guide_02_First_Steps",title:"First Steps",description:"Creating a simple Kafka consumer app",source:"@site/versioned_docs/version-0.8.0/guides/Guide_02_First_Steps.md",sourceDirName:"guides",slug:"/guides/Guide_02_First_Steps",permalink:"/docs/guides/Guide_02_First_Steps",draft:!1,tags:[],version:"0.8.0",frontMatter:{}},i={},p=[{value:"Creating a simple Kafka consumer app",id:"creating-a-simple-kafka-consumer-app",level:2},{value:"Sending first message to your consumer",id:"sending-first-message-to-your-consumer",level:2},{value:"Creating a hello Kafka producer",id:"creating-a-hello-kafka-producer",level:2},{value:"Recap",id:"recap",level:2}],k={toc:p},c="wrapper";function u(e){let{components:a,...n}=e;return(0,o.kt)(c,(0,t.Z)({},k,n,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"first-steps"},"First Steps"),(0,o.kt)("h2",{id:"creating-a-simple-kafka-consumer-app"},"Creating a simple Kafka consumer app"),(0,o.kt)("p",null,"For our first demo we will create the simplest possible Kafka consumer\nand run it using \u2018fastkafka run\u2019 command."),(0,o.kt)("p",null,"The consumer will:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Connect to the Kafka Broker we setup in the Intro guide")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Listen to the hello topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Write any message received from the hello topic to stdout"))),(0,o.kt)("p",null,"To create the consumer, first, create a file named"),(0,o.kt)("b",null,"hello_kafka_consumer.py")," and copy the following code to it:",(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom os import environ\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nkafka_server_url = environ["KAFKA_HOSTNAME"]\nkafka_server_port = environ["KAFKA_PORT"]\n\nkafka_brokers = {\n "localhost": {\n "description": "local development kafka",\n "url": kafka_server_url,\n "port": kafka_server_port\n }\n}\n\nclass HelloKafkaMsg(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_app = FastKafka(\n kafka_brokers=kafka_brokers\n)\n \n@kafka_app.consumes()\nasync def on_hello(msg: HelloKafkaMsg):\n print(f"Got data, msg={msg.msg}", flush=True)\n')),(0,o.kt)("p",null,'!!! info "Kafka configuration"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n")),(0,o.kt)("p",null,'!!! warning "Remember to flush"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.\n")),(0,o.kt)("p",null,"To run this consumer, in your terminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see something similar to the ouput\nbelow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})\n[878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n[878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}. \nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.\n\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.\n")),(0,o.kt)("p",null,"Now you can interact with your consumer, by sending the messages to the\nsubscribed \u2018hello\u2019 topic, don\u2019t worry, we will cover this in the next\nstep of this guide."),(0,o.kt)("h2",{id:"sending-first-message-to-your-consumer"},"Sending first message to your consumer"),(0,o.kt)("p",null,"After we have created and run our first consumer, we should send a\nmessage to it, to make sure it is working properly."),(0,o.kt)("p",null,"If you are using the Kafka setup as described in the Intro guide, you\ncan follow the steps listed here to send a message to the hello topic."),(0,o.kt)("p",null,"First, connect to your running kafka broker by running:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"docker run -it kafka /bin/bash\n")),(0,o.kt)("p",null,"Then, when connected to the container, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello\n")),(0,o.kt)("p",null,"This will open an interactive connection to the hello topic, now you can\nwrite your mesages to the topic and they will be consumed by our\nconsumer."),(0,o.kt)("p",null,"In the shell, type:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'{"msg":"hello"}\n')),(0,o.kt)("p",null,"and press enter. This will send a hello message to the topic which will\nbe read by our running consumer and outputed to stdout."),(0,o.kt)("p",null,"Check the output of your consumer (terminal where you ran the \u2018fastkafka\nrun\u2019 command) and confirm that your consumer has read the Kafka message.\nYou shoud see something like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"Got data, msg=hello\n")),(0,o.kt)("h2",{id:"creating-a-hello-kafka-producer"},"Creating a hello Kafka producer"),(0,o.kt)("p",null,"Consuming messages is only a part of this Library functionality, the\nother big part is producing the messages. So, let\u2019s create our first\nkafka producer which will send it\u2019s greetings to our consumer\nperiodically."),(0,o.kt)("p",null,"The producer will:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Connect to the Kafka Broker we setup in the Intro guide"),(0,o.kt)("li",{parentName:"ol"},"Connect to the hello topic"),(0,o.kt)("li",{parentName:"ol"},"Periodically send a message to the hello world topic")),(0,o.kt)("p",null,"To create the producer, first, create a file named"),(0,o.kt)("b",null,"hello_kafka_producer.py")," and copy the following code to it:",(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom os import environ\n\nimport asyncio\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nkafka_server_url = environ["KAFKA_HOSTNAME"]\nkafka_server_port = environ["KAFKA_PORT"]\n\nkafka_brokers = {\n "localhost": {\n "description": "local development kafka",\n "url": kafka_server_url,\n "port": kafka_server_port\n }\n}\n\nclass HelloKafkaMsg(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_app = FastKafka(\n kafka_brokers=kafka_brokers\n)\n\nlogger = get_logger(__name__)\n\n@kafka_app.produces()\nasync def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:\n logger.info(f"Producing: {msg}")\n return msg\n\n@kafka_app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello(HelloKafkaMsg(msg="hello"))\n await asyncio.sleep(1)\n')),(0,o.kt)("p",null,'!!! info "Kafka configuration"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n")),(0,o.kt)("p",null,"To run this producer, in your terminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see something similar to the ouput\nbelow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.\n\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.\n")),(0,o.kt)("p",null,"Now, while the producer is running, it will send a HelloKafkaMsg every\nsecond to the hello kafka topic. If your consumer is still running, you\nshould see the messages appear in its log."),(0,o.kt)("h2",{id:"recap"},"Recap"),(0,o.kt)("p",null,"In this guide we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Created a simple Kafka consumer using FastKafka"),(0,o.kt)("li",{parentName:"ol"},"Sent a message to our consumer trough Kafka"),(0,o.kt)("li",{parentName:"ol"},"Created a simple Kafka producer using FastKafka")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/23c607c1.55560318.js b/assets/js/23c607c1.55560318.js new file mode 100644 index 0000000..ad03175 --- /dev/null +++ b/assets/js/23c607c1.55560318.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9840],{3905:(e,t,r)=>{r.d(t,{Zo:()=>c,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function s(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?o(Object(r),!0).forEach((function(t){a(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):o(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function i(e,t){if(null==e)return{};var r,n,a=function(e,t){if(null==e)return{};var r,n,a={},o=Object.keys(e);for(n=0;n<o.length;n++)r=o[n],t.indexOf(r)>=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)r=o[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var l=n.createContext({}),p=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},c=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,l=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),f=p(r),k=a,m=f["".concat(l,".").concat(k)]||f[k]||u[k]||o;return r?n.createElement(m,s(s({ref:t},c),{},{components:r})):n.createElement(m,s({ref:t},c))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,s=new Array(o);s[0]=k;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[f]="string"==typeof e?e:a,s[1]=i;for(var p=2;p<o;p++)s[p]=r[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,r)}k.displayName="MDXCreateElement"},1025:(e,t,r)=>{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>u,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var n=r(7462),a=(r(7294),r(3905));const o={},s="run_fastkafka_server_process",i={unversionedId:"cli/run_fastkafka_server_process",id:"version-0.7.0/cli/run_fastkafka_server_process",title:"run_fastkafka_server_process",description:"Usage:",source:"@site/versioned_docs/version-0.7.0/cli/run_fastkafka_server_process.md",sourceDirName:"cli",slug:"/cli/run_fastkafka_server_process",permalink:"/docs/0.7.0/cli/run_fastkafka_server_process",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"fastkafka",permalink:"/docs/0.7.0/cli/fastkafka"},next:{title:"LICENSE",permalink:"/docs/0.7.0/LICENSE"}},l={},p=[],c={toc:p},f="wrapper";function u(e){let{components:t,...r}=e;return(0,a.kt)(f,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"run_fastkafka_server_process"},(0,a.kt)("inlineCode",{parentName:"h1"},"run_fastkafka_server_process")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Usage"),":"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-console"},"$ run_fastkafka_server_process [OPTIONS] APP\n")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"APP"),": Input in the form of 'path:app', where ",(0,a.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,a.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,a.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Options"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--kafka-broker TEXT"),": Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class. ","[required]"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--install-completion"),": Install completion for the current shell."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--show-completion"),": Show completion for the current shell, to copy it or customize the installation."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/243cddb9.83dc99e3.js b/assets/js/243cddb9.83dc99e3.js new file mode 100644 index 0000000..ac3eba5 --- /dev/null +++ b/assets/js/243cddb9.83dc99e3.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1294],{3905:(t,e,a)=>{a.d(e,{Zo:()=>k,kt:()=>m});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function i(t){for(var e=1;e<arguments.length;e++){var a=null!=arguments[e]?arguments[e]:{};e%2?l(Object(a),!0).forEach((function(e){r(t,e,a[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(a)):l(Object(a)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(a,e))}))}return t}function o(t,e){if(null==t)return{};var a,n,r=function(t,e){if(null==t)return{};var a,n,r={},l=Object.keys(t);for(n=0;n<l.length;n++)a=l[n],e.indexOf(a)>=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n<l.length;n++)a=l[n],e.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var d=n.createContext({}),p=function(t){var e=n.useContext(d),a=e;return t&&(a="function"==typeof t?t(e):i(i({},e),t)),a},k=function(t){var e=p(t.components);return n.createElement(d.Provider,{value:e},t.children)},s="mdxType",c={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},u=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,d=t.parentName,k=o(t,["components","mdxType","originalType","parentName"]),s=p(a),u=r,m=s["".concat(d,".").concat(u)]||s[u]||c[u]||l;return a?n.createElement(m,i(i({ref:e},k),{},{components:a})):n.createElement(m,i({ref:e},k))}));function m(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,i=new Array(l);i[0]=u;var o={};for(var d in e)hasOwnProperty.call(e,d)&&(o[d]=e[d]);o.originalType=t,o[s]="string"==typeof t?t:r,i[1]=o;for(var p=2;p<l;p++)i[p]=a[p];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},5043:(t,e,a)=>{a.r(e),a.d(e,{assets:()=>d,contentTitle:()=>i,default:()=>c,frontMatter:()=>l,metadata:()=>o,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const l={},i=void 0,o={unversionedId:"api/fastkafka/testing/LocalRedpandaBroker",id:"version-0.8.0/api/fastkafka/testing/LocalRedpandaBroker",title:"LocalRedpandaBroker",description:"fastkafka.testing.LocalRedpandaBroker {fastkafka.testing.LocalRedpandaBroker}",source:"@site/versioned_docs/version-0.8.0/api/fastkafka/testing/LocalRedpandaBroker.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/LocalRedpandaBroker",permalink:"/docs/api/fastkafka/testing/LocalRedpandaBroker",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"ApacheKafkaBroker",permalink:"/docs/api/fastkafka/testing/ApacheKafkaBroker"},next:{title:"Tester",permalink:"/docs/api/fastkafka/testing/Tester"}},d={},p=[{value:"fastkafka.testing.LocalRedpandaBroker",id:"fastkafka.testing.LocalRedpandaBroker",level:2},{value:"<strong>init</strong>",id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.init",level:3},{value:"get_service_config_string",id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.get_service_config_string",level:3},{value:"is_started",id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.is_started",level:3},{value:"start",id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.start",level:3},{value:"stop",id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.stop",level:3}],k={toc:p},s="wrapper";function c(t){let{components:e,...a}=t;return(0,r.kt)(s,(0,n.Z)({},k,a,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.testing.LocalRedpandaBroker"},"fastkafka.testing.LocalRedpandaBroker"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/local_redpanda_broker.py#L84-L200",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("p",null,"LocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing."),(0,r.kt)("h3",{id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.init"},(0,r.kt)("strong",{parentName:"h3"},"init")),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/local_redpanda_broker.py#L88-L120",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n self,\n topics=[],\n retries=3,\n apply_nest_asyncio=False,\n listener_port=9092,\n tag='v23.1.2',\n seastar_core=1,\n memory='1G',\n mode='dev-container',\n default_log_level='debug',\n kwargs,\n)\n")),(0,r.kt)("p",null,"Initialises the LocalRedpandaBroker object"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"topics")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Iterable[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"List of topics to create after sucessfull redpanda broker startup"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"[]"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"retries")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"Number of retries to create redpanda service"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"3"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"apply_nest_asyncio")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bool")),(0,r.kt)("td",{parentName:"tr",align:null},"set to True if running in notebook"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"False"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"listener_port")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"Port on which the clients (producers and consumers) can connect"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"9092"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"tag")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"Tag of Redpanda image to use to start container"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'v23.1.2'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"seastar_core")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"Core(s) to use byt Seastar (the framework Redpanda uses under the hood)"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"memory")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"The amount of memory to make available to Redpanda"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'1G'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"mode")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"Mode to use to load configuration properties in container"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'dev-container'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"default_log_level")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"Log levels to use for Redpanda"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'debug'"))))),(0,r.kt)("h3",{id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.get_service_config_string"},"get_service_config_string"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/local_redpanda_broker.py#L168-L174",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"get_service_config_string(\n self, service, data_dir\n)\n")),(0,r.kt)("p",null,"Generates a configuration for a service"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"data_dir")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Path")),(0,r.kt)("td",{parentName:"tr",align:null},"Path to the directory where the zookeepeer instance will save data"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"service")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},'"redpanda", defines which service to get config string for'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("h3",{id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.is_started"},"is_started"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/local_redpanda_broker.py#L123-L133",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"@property\nis_started(\n self\n)\n")),(0,r.kt)("p",null,"Property indicating whether the LocalRedpandaBroker object is started."),(0,r.kt)("p",null,"The is_started property indicates if the LocalRedpandaBroker object is currently\nin a started state. This implies that Redpanda docker container has sucesfully\nstarted and is ready for handling events."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bool")),(0,r.kt)("td",{parentName:"tr",align:null},"True if the object is started, False otherwise.")))),(0,r.kt)("h3",{id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.start"},"start"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/local_redpanda_broker.py#L333-L372",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"start(\n self\n)\n")),(0,r.kt)("p",null,"Starts a local redpanda broker instance synchronously"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"Redpanda broker bootstrap server address in string format: add:port")))),(0,r.kt)("h3",{id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.stop"},"stop"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/local_redpanda_broker.py#L376-L388",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"stop(\n self\n)\n")),(0,r.kt)("p",null,"Stops a local redpanda broker instance synchronously"))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/2622e95a.0b16f714.js b/assets/js/2622e95a.0b16f714.js new file mode 100644 index 0000000..53dd4cb --- /dev/null +++ b/assets/js/2622e95a.0b16f714.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5340],{3905:(t,e,a)=>{a.d(e,{Zo:()=>s,kt:()=>u});var r=a(7294);function n(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(t);e&&(r=r.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,r)}return a}function k(t){for(var e=1;e<arguments.length;e++){var a=null!=arguments[e]?arguments[e]:{};e%2?l(Object(a),!0).forEach((function(e){n(t,e,a[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(a)):l(Object(a)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(a,e))}))}return t}function i(t,e){if(null==t)return{};var a,r,n=function(t,e){if(null==t)return{};var a,r,n={},l=Object.keys(t);for(r=0;r<l.length;r++)a=l[r],e.indexOf(a)>=0||(n[a]=t[a]);return n}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(r=0;r<l.length;r++)a=l[r],e.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(n[a]=t[a])}return n}var o=r.createContext({}),p=function(t){var e=r.useContext(o),a=e;return t&&(a="function"==typeof t?t(e):k(k({},e),t)),a},s=function(t){var e=p(t.components);return r.createElement(o.Provider,{value:e},t.children)},c="mdxType",f={inlineCode:"code",wrapper:function(t){var e=t.children;return r.createElement(r.Fragment,{},e)}},d=r.forwardRef((function(t,e){var a=t.components,n=t.mdxType,l=t.originalType,o=t.parentName,s=i(t,["components","mdxType","originalType","parentName"]),c=p(a),d=n,u=c["".concat(o,".").concat(d)]||c[d]||f[d]||l;return a?r.createElement(u,k(k({ref:e},s),{},{components:a})):r.createElement(u,k({ref:e},s))}));function u(t,e){var a=arguments,n=e&&e.mdxType;if("string"==typeof t||n){var l=a.length,k=new Array(l);k[0]=d;var i={};for(var o in e)hasOwnProperty.call(e,o)&&(i[o]=e[o]);i.originalType=t,i[c]="string"==typeof t?t:n,k[1]=i;for(var p=2;p<l;p++)k[p]=a[p];return r.createElement.apply(null,k)}return r.createElement.apply(null,a)}d.displayName="MDXCreateElement"},7743:(t,e,a)=>{a.r(e),a.d(e,{assets:()=>o,contentTitle:()=>k,default:()=>f,frontMatter:()=>l,metadata:()=>i,toc:()=>p});var r=a(7462),n=(a(7294),a(3905));const l={},k=void 0,i={unversionedId:"api/fastkafka/testing/ApacheKafkaBroker",id:"version-0.8.0/api/fastkafka/testing/ApacheKafkaBroker",title:"ApacheKafkaBroker",description:"fastkafka.testing.ApacheKafkaBroker {fastkafka.testing.ApacheKafkaBroker}",source:"@site/versioned_docs/version-0.8.0/api/fastkafka/testing/ApacheKafkaBroker.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/ApacheKafkaBroker",permalink:"/docs/api/fastkafka/testing/ApacheKafkaBroker",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"SequentialExecutor",permalink:"/docs/api/fastkafka/executors/SequentialExecutor"},next:{title:"LocalRedpandaBroker",permalink:"/docs/api/fastkafka/testing/LocalRedpandaBroker"}},o={},p=[{value:"fastkafka.testing.ApacheKafkaBroker",id:"fastkafka.testing.ApacheKafkaBroker",level:2},{value:"<strong>init</strong>",id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.init",level:3},{value:"get_service_config_string",id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.get_service_config_string",level:3},{value:"is_started",id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.is_started",level:3},{value:"start",id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.start",level:3},{value:"stop",id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.stop",level:3}],s={toc:p},c="wrapper";function f(t){let{components:e,...a}=t;return(0,n.kt)(c,(0,r.Z)({},s,a,{components:e,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"fastkafka.testing.ApacheKafkaBroker"},"fastkafka.testing.ApacheKafkaBroker"),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/apache_kafka_broker.py#L168-L305",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("p",null,"ApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing."),(0,n.kt)("h3",{id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.init"},(0,n.kt)("strong",{parentName:"h3"},"init")),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/apache_kafka_broker.py#L173-L209",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n self,\n topics=[],\n retries=3,\n apply_nest_asyncio=False,\n zookeeper_port=2181,\n listener_port=9092,\n)\n")),(0,n.kt)("p",null,"Initialises the ApacheKafkaBroker object"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Name"),(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"),(0,n.kt)("th",{parentName:"tr",align:null},"Default"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"topics")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"Iterable[str]")),(0,n.kt)("td",{parentName:"tr",align:null},"List of topics to create after sucessfull Kafka broker startup"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"[]"))),(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"retries")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"int")),(0,n.kt)("td",{parentName:"tr",align:null},"Number of retries to create kafka and zookeeper services using random"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"3"))),(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"apply_nest_asyncio")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"bool")),(0,n.kt)("td",{parentName:"tr",align:null},"set to True if running in notebook"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"False"))),(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"zookeeper_port")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"int")),(0,n.kt)("td",{parentName:"tr",align:null},"Port for clients (Kafka brokes) to connect"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"2181"))),(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"listener_port")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"int")),(0,n.kt)("td",{parentName:"tr",align:null},"Port on which the clients (producers and consumers) can connect"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"9092"))))),(0,n.kt)("h3",{id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.get_service_config_string"},"get_service_config_string"),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/apache_kafka_broker.py#L459-L475",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-py"},"get_service_config_string(\n self, service, data_dir\n)\n")),(0,n.kt)("p",null,"Gets the configuration string for a service."),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Name"),(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"),(0,n.kt)("th",{parentName:"tr",align:null},"Default"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"service")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"str")),(0,n.kt)("td",{parentName:"tr",align:null},'Name of the service ("kafka" or "zookeeper").'),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("em",{parentName:"td"},"required"))),(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"data_dir")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"Path")),(0,n.kt)("td",{parentName:"tr",align:null},"Path to the directory where the service will save data."),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("em",{parentName:"td"},"required"))))),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"str")),(0,n.kt)("td",{parentName:"tr",align:null},"The service configuration string.")))),(0,n.kt)("h3",{id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.is_started"},"is_started"),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/apache_kafka_broker.py#L212-L222",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-py"},"@property\nis_started(\n self\n)\n")),(0,n.kt)("p",null,"Property indicating whether the ApacheKafkaBroker object is started."),(0,n.kt)("p",null,"The is_started property indicates if the ApacheKafkaBroker object is currently\nin a started state. This implies that Zookeeper and Kafka broker processes have\nsucesfully started and are ready for handling events."),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"bool")),(0,n.kt)("td",{parentName:"tr",align:null},"True if the object is started, False otherwise.")))),(0,n.kt)("h3",{id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.start"},"start"),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/apache_kafka_broker.py#L624-L664",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-py"},"start(\n self\n)\n")),(0,n.kt)("p",null,"Starts a local Kafka broker and ZooKeeper instance synchronously."),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"str")),(0,n.kt)("td",{parentName:"tr",align:null},"The Kafka broker bootstrap server address in string format: host:port.")))),(0,n.kt)("h3",{id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.stop"},"stop"),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_testing/apache_kafka_broker.py#L668-L680",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-py"},"stop(\n self\n)\n")),(0,n.kt)("p",null,"Stops a local kafka broker and zookeeper instance synchronously"))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/29105828.3c1f0326.js b/assets/js/29105828.3c1f0326.js new file mode 100644 index 0000000..bc9bc1c --- /dev/null +++ b/assets/js/29105828.3c1f0326.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9050],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>k});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){o(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function p(e,t){if(null==e)return{};var a,n,o=function(e,t){if(null==e)return{};var a,n,o={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var s=n.createContext({}),l=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},u=function(e){var t=l(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,r=e.originalType,s=e.parentName,u=p(e,["components","mdxType","originalType","parentName"]),c=l(a),d=o,k=c["".concat(s,".").concat(d)]||c[d]||f[d]||r;return a?n.createElement(k,i(i({ref:t},u),{},{components:a})):n.createElement(k,i({ref:t},u))}));function k(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=a.length,i=new Array(r);i[0]=d;var p={};for(var s in t)hasOwnProperty.call(t,s)&&(p[s]=t[s]);p.originalType=e,p[c]="string"==typeof e?e:o,i[1]=p;for(var l=2;l<r;l++)i[l]=a[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}d.displayName="MDXCreateElement"},8367:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>f,frontMatter:()=>r,metadata:()=>p,toc:()=>l});var n=a(7462),o=(a(7294),a(3905));const r={},i="Deploy FastKafka docs to GitHub Pages",p={unversionedId:"guides/Guide_04_Github_Actions_Workflow",id:"version-0.7.0/guides/Guide_04_Github_Actions_Workflow",title:"Deploy FastKafka docs to GitHub Pages",description:"Getting started",source:"@site/versioned_docs/version-0.7.0/guides/Guide_04_Github_Actions_Workflow.md",sourceDirName:"guides",slug:"/guides/Guide_04_Github_Actions_Workflow",permalink:"/docs/0.7.0/guides/Guide_04_Github_Actions_Workflow",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using Redpanda to test FastKafka",permalink:"/docs/0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka"},next:{title:"Deploying FastKafka using Docker",permalink:"/docs/0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka"}},s={},l=[{value:"Getting started",id:"getting-started",level:2},{value:"Options",id:"options",level:2},{value:"Set app location",id:"set-app-location",level:3},{value:"Example Repository",id:"example-repository",level:2}],u={toc:l},c="wrapper";function f(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"deploy-fastkafka-docs-to-github-pages"},"Deploy FastKafka docs to GitHub Pages"),(0,o.kt)("h2",{id:"getting-started"},"Getting started"),(0,o.kt)("p",null,"Add your workflow file ",(0,o.kt)("inlineCode",{parentName:"p"},".github/workflows/fastkafka_docs_deploy.yml")," and\npush it to your remote default branch."),(0,o.kt)("p",null,"Here is an example workflow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'name: Deploy FastKafka Generated Documentation to GitHub Pages\n\non:\n push:\n branches: [ "main", "master" ]\n workflow_dispatch:\n\njobs:\n deploy:\n runs-on: ubuntu-latest\n permissions:\n contents: write\n steps:\n - uses: airtai/workflows/fastkafka-ghp@main\n with:\n app: "test_fastkafka.application:kafka_app"\n')),(0,o.kt)("h2",{id:"options"},"Options"),(0,o.kt)("h3",{id:"set-app-location"},"Set app location"),(0,o.kt)("p",null,"Input in the form of ",(0,o.kt)("inlineCode",{parentName:"p"},"path:app"),", where ",(0,o.kt)("inlineCode",{parentName:"p"},"path")," is the path to a Python\nfile and ",(0,o.kt)("inlineCode",{parentName:"p"},"app")," is an object of type ",(0,o.kt)("inlineCode",{parentName:"p"},"FastKafka"),":"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'- name: Deploy\n uses: airtai/workflows/fastkafka-ghp@main\n with:\n app: "test_fastkafka.application:kafka_app"\n')),(0,o.kt)("p",null,"In the above example, ",(0,o.kt)("inlineCode",{parentName:"p"},"FastKafka")," app is named as ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_app")," and it is\navailable in the ",(0,o.kt)("inlineCode",{parentName:"p"},"application")," submodule of the ",(0,o.kt)("inlineCode",{parentName:"p"},"test_fastkafka")," module."),(0,o.kt)("h2",{id:"example-repository"},"Example Repository"),(0,o.kt)("p",null,"A ",(0,o.kt)("inlineCode",{parentName:"p"},"FastKafka"),"-based library that uses the above-mentioned workfow\nactions to publish FastKafka docs to ",(0,o.kt)("inlineCode",{parentName:"p"},"Github Pages")," can be found\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_project/blob/main/.github/workflows/fastkafka_deploy.yaml"},"here"),"."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/2ae68e65.2da2affc.js b/assets/js/2ae68e65.2da2affc.js new file mode 100644 index 0000000..9a48e9a --- /dev/null +++ b/assets/js/2ae68e65.2da2affc.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[88],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>k});var a=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function r(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){i(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function l(e,t){if(null==e)return{};var n,a,i=function(e,t){if(null==e)return{};var n,a,i={},o=Object.keys(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var s=a.createContext({}),m=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},d=function(e){var t=m(e.components);return a.createElement(s.Provider,{value:t},e.children)},p="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,o=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),p=m(n),u=i,k=p["".concat(s,".").concat(u)]||p[u]||c[u]||o;return n?a.createElement(k,r(r({ref:t},d),{},{components:n})):a.createElement(k,r({ref:t},d))}));function k(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=n.length,r=new Array(o);r[0]=u;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:i,r[1]=l;for(var m=2;m<o;m++)r[m]=n[m];return a.createElement.apply(null,r)}return a.createElement.apply(null,n)}u.displayName="MDXCreateElement"},7193:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>r,default:()=>c,frontMatter:()=>o,metadata:()=>l,toc:()=>m});var a=n(7462),i=(n(7294),n(3905));const o={},r=void 0,l={unversionedId:"api/fastkafka/FastKafka",id:"version-0.5.0/api/fastkafka/FastKafka",title:"FastKafka",description:"fastkafka.FastKafka {fastkafka.FastKafka}",source:"@site/versioned_docs/version-0.5.0/api/fastkafka/FastKafka.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/",permalink:"/docs/0.5.0/api/fastkafka/",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Benchmarking FastKafka app",permalink:"/docs/0.5.0/guides/Guide_06_Benchmarking_FastKafka"},next:{title:"KafkaEvent",permalink:"/docs/0.5.0/api/fastkafka/KafkaEvent"}},s={},m=[{value:"<code>fastkafka.FastKafka</code>",id:"fastkafka.FastKafka",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>benchmark</code>",id:"benchmark",level:3},{value:"<code>consumes</code>",id:"consumes",level:3},{value:"<code>create_mocks</code>",id:"create_mocks",level:3},{value:"<code>produces</code>",id:"produces",level:3},{value:"<code>run_in_background</code>",id:"run_in_background",level:3}],d={toc:m},p="wrapper";function c(e){let{components:t,...n}=e;return(0,i.kt)(p,(0,a.Z)({},d,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"h2"},"fastkafka.FastKafka")),(0,i.kt)("h3",{id:"init"},(0,i.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def __init__(self, title: Optional[str] = None, description: Optional[str] = None, version: Optional[str] = None, contact: Optional[Dict[str, str]] = None, kafka_brokers: Dict[str, Any], root_path: Optional[pathlib.Path, str] = None, lifespan: Optional[Callable[[ForwardRef('FastKafka')], AbstractAsyncContextManager[NoneType]]] = None, loop=None, client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x101ca6040>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x101c80310>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None, group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, exclude_internal_topics=True, isolation_level='read_uncommitted') -> None")),(0,i.kt)("p",null,"Creates FastKafka application"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"title"),": optional title for the documentation. If None,\nthe title will be set to empty string"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"description"),": optional description for the documentation. If\nNone, the description will be set to empty string"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"version"),": optional version for the documentation. If None,\nthe version will be set to empty string"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"contact"),": optional contact for the documentation. If None, the\ncontact will be set to placeholder values:\nname='Author' url=HttpUrl(' ",(0,i.kt)("a",{parentName:"li",href:"https://www.google.com"},"https://www.google.com")," ', ) email='",(0,i.kt)("a",{parentName:"li",href:"mailto:noreply@gmail.com"},"noreply@gmail.com"),"'"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": dictionary describing kafka brokers used for\ngenerating documentation"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"root_path"),": path to where documentation will be created"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"lifespan"),": asynccontextmanager that is used for setting lifespan hooks.\n",(0,i.kt)("strong",{parentName:"li"},"aenter")," is called before app start and ",(0,i.kt)("strong",{parentName:"li"},"aexit")," after app stop.\nThe lifespan is called whe application is started as async context\nmanager, e.g.:",(0,i.kt)("inlineCode",{parentName:"li"},"async with kafka_app...")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-producer-#")," (appended with a unique number\nper instance)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_serializer"),": used to convert user-supplied keys to bytes\nIf not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as ",(0,i.kt)("inlineCode",{parentName:"li"},"f(key),")," should return\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_serializer"),": used to convert user-supplied message\nvalues to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),". If not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as\n",(0,i.kt)("inlineCode",{parentName:"li"},"f(value)"),", should return :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"acks"),": one of ",(0,i.kt)("inlineCode",{parentName:"li"},"0"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"1"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"0"),": Producer will not wait for any acknowledgment from the server\nat all. The message will immediately be added to the socket\nbuffer and considered sent. No guarantee can be made that the\nserver has received the record in this case, and the retries\nconfiguration will not take effect (as the client won't\ngenerally know of any failures). The offset given back for each\nrecord will always be set to -1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"1"),": The broker leader will write the record to its local log but\nwill respond without awaiting full acknowledgement from all\nfollowers. In this case should the leader fail immediately\nafter acknowledging the record but before the followers have\nreplicated it then the record will be lost."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"all"),": The broker leader will wait for the full set of in-sync\nreplicas to acknowledge the record. This guarantees that the\nrecord will not be lost as long as at least one in-sync replica\nremains alive. This is the strongest available guarantee.")),(0,i.kt)("p",null,"If unset, defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=1"),". If ",(0,i.kt)("inlineCode",{parentName:"p"},"enable_idempotence")," is\n:data:",(0,i.kt)("inlineCode",{parentName:"p"},"True")," defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=all")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"compression_type"),": The compression type for all data generated by\nthe producer. Valid values are ",(0,i.kt)("inlineCode",{parentName:"li"},"gzip"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"snappy"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"lz4"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"zstd"),"\nor :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),".\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_batch_size"),": Maximum size of buffered data per partition.\nAfter this amount :meth:",(0,i.kt)("inlineCode",{parentName:"li"},"send")," coroutine will block until batch is\ndrained.\nDefault: 16384"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),": The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),", producer will wait ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms - process_time"),".\nDefault: 0 (i.e. no delay)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partitioner"),": Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n",(0,i.kt)("inlineCode",{parentName:"li"},"partitioner(key_bytes, all_partitions, available_partitions)"),".\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_request_size"),": The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"~kafka.protocol.produce.ProduceRequest")," (it's a blocking\ncall), maximum waiting time can be up to ",(0,i.kt)("inlineCode",{parentName:"li"},"2 *\nrequest_timeout_ms"),".\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more\ninformation see :ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_idempotence"),": When set to :data:",(0,i.kt)("inlineCode",{parentName:"li"},"True"),", the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:",(0,i.kt)("inlineCode",{parentName:"li"},"False"),", producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:",(0,i.kt)("inlineCode",{parentName:"li"},"ValueError")," will be thrown.\nNew in version 0.5.0."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values\nare: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider ("),": class:",(0,i.kt)("inlineCode",{parentName:"li"},"~aiokafka.abc.AbstractTokenProvider"),"):\nOAuthBearer token provider instance. (See\n:mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"*topics"),": optional list of topics to subscribe to. If not set,\ncall :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," or :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".assign")," before consuming records.\nPassing topics directly is same as calling :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," API."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"group_id"),": name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_deserializer"),": Any callable that takes a\nraw message key and returns a deserialized key."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_deserializer"),": Any callable that takes a\nraw message value and returns a deserialized value."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_min_bytes"),": Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n",(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_bytes"),": The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms"),": The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_partition_fetch_bytes"),": The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ",(0,i.kt)("inlineCode",{parentName:"li"},"= #partitions * max_partition_fetch_bytes"),".\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_records"),": The maximum number of records returned in a\nsingle call to :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),". Defaults ",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", no limit."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_offset_reset"),": A policy for resetting offsets on\n:exc:",(0,i.kt)("inlineCode",{parentName:"li"},".OffsetOutOfRangeError")," errors: ",(0,i.kt)("inlineCode",{parentName:"li"},"earliest")," will move to the oldest\navailable message, ",(0,i.kt)("inlineCode",{parentName:"li"},"latest")," will move to the most recent, and\n",(0,i.kt)("inlineCode",{parentName:"li"},"none")," will raise an exception so you can handle this case.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"latest"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_auto_commit"),": If true the consumer's offset will be\nperiodically committed in the background. Default: True."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_commit_interval_ms"),": milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"check_crcs"),": Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partition_assignment_strategy"),": List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: ","[:class:",(0,i.kt)("inlineCode",{parentName:"li"},".RoundRobinPartitionAssignor"),"]"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_interval_ms"),": Maximum allowed time between calls to\nconsume messages (e.g., :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),"). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See ",(0,i.kt)("inlineCode",{parentName:"li"},"KIP-62"),"_ for more\ninformation. Default 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"rebalance_timeout_ms"),": The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to ",(0,i.kt)("inlineCode",{parentName:"li"},"max.poll.interval.ms")," configuration,\nbut as ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka")," will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},".ConsumerRebalanceListener")," to delay rebalacing. Defaults\nto ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),": Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(",(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe ",(0,i.kt)("strong",{parentName:"li"},"broker")," configuration properties\n",(0,i.kt)("inlineCode",{parentName:"li"},"group.min.session.timeout.ms")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"group.max.session.timeout.ms"),".\nDefault: 10000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat_interval_ms"),": The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),", but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"consumer_timeout_ms"),": maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"exclude_internal_topics"),": Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"isolation_level"),": Controls how to read messages written\ntransactionally.")),(0,i.kt)("p",null,"If set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed"),", :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\ntransactional messages which have been committed.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")," (the default), :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will\nreturn all messages, even transactional messages which have been\naborted."),(0,i.kt)("p",null,"Non-transactional messages will be returned unconditionally in\neither mode."),(0,i.kt)("p",null,"Messages will always be returned in offset order. Hence, in\n",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," mode, :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," the seek_to_end method will\nreturn the LSO. See method docs below. Default: ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider"),": OAuthBearer token provider instance. (See :mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: None")),(0,i.kt)("h3",{id:"benchmark"},(0,i.kt)("inlineCode",{parentName:"h3"},"benchmark")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Union[~O, NoneType]]], typing.Callable[[~I], typing.Union[~O, NoneType]]]")),(0,i.kt)("p",null,"Decorator to benchmark produces/consumes functions"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"interval"),": Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sliding_window_size"),": The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated")),(0,i.kt)("h3",{id:"consumes"},(0,i.kt)("inlineCode",{parentName:"h3"},"consumes")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', prefix: str = 'on_', loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Callable[[pydantic.main.BaseModel], typing.Union[NoneType, typing.Awaitable[NoneType]]]], typing.Callable[[pydantic.main.BaseModel], typing.Union[NoneType, typing.Awaitable[NoneType]]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when a message is received in a topic."),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"decoder"),": Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: "on_". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"*topics"),": optional list of topics to subscribe to. If not set,\ncall :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," or :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".assign")," before consuming records.\nPassing topics directly is same as calling :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," API."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string (or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings) that the consumer should contact to bootstrap\ninitial cluster metadata.")),(0,i.kt)("p",null,"This does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ",(0,i.kt)("inlineCode",{parentName:"p"},"localhost:9092"),"."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~.consumer.group_coordinator.GroupCoordinator"),"\nfor logging with respect to consumer group administration. Default:\n",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-{version}")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"group_id"),": name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_deserializer"),": Any callable that takes a\nraw message key and returns a deserialized key."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_deserializer"),": Any callable that takes a\nraw message value and returns a deserialized value."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_min_bytes"),": Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n",(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_bytes"),": The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms"),": The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_partition_fetch_bytes"),": The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ",(0,i.kt)("inlineCode",{parentName:"li"},"= #partitions * max_partition_fetch_bytes"),".\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_records"),": The maximum number of records returned in a\nsingle call to :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),". Defaults ",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", no limit."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Client request timeout in milliseconds.\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_offset_reset"),": A policy for resetting offsets on\n:exc:",(0,i.kt)("inlineCode",{parentName:"li"},".OffsetOutOfRangeError")," errors: ",(0,i.kt)("inlineCode",{parentName:"li"},"earliest")," will move to the oldest\navailable message, ",(0,i.kt)("inlineCode",{parentName:"li"},"latest")," will move to the most recent, and\n",(0,i.kt)("inlineCode",{parentName:"li"},"none")," will raise an exception so you can handle this case.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"latest"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_auto_commit"),": If true the consumer's offset will be\nperiodically committed in the background. Default: True."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_commit_interval_ms"),": milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"check_crcs"),": Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partition_assignment_strategy"),": List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: ","[:class:",(0,i.kt)("inlineCode",{parentName:"li"},".RoundRobinPartitionAssignor"),"]"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_interval_ms"),": Maximum allowed time between calls to\nconsume messages (e.g., :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),"). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See ",(0,i.kt)("inlineCode",{parentName:"li"},"KIP-62"),"_ for more\ninformation. Default 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"rebalance_timeout_ms"),": The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to ",(0,i.kt)("inlineCode",{parentName:"li"},"max.poll.interval.ms")," configuration,\nbut as ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka")," will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},".ConsumerRebalanceListener")," to delay rebalacing. Defaults\nto ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),": Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(",(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe ",(0,i.kt)("strong",{parentName:"li"},"broker")," configuration properties\n",(0,i.kt)("inlineCode",{parentName:"li"},"group.min.session.timeout.ms")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"group.max.session.timeout.ms"),".\nDefault: 10000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat_interval_ms"),": The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),", but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"consumer_timeout_ms"),": maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"AIOKafkaConsumer")," supports Kafka API versions >=0.9 only.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more information see\n:ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),". Default: None."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"exclude_internal_topics"),": Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying ",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"isolation_level"),": Controls how to read messages written\ntransactionally.")),(0,i.kt)("p",null,"If set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed"),", :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\ntransactional messages which have been committed.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")," (the default), :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will\nreturn all messages, even transactional messages which have been\naborted."),(0,i.kt)("p",null,"Non-transactional messages will be returned unconditionally in\neither mode."),(0,i.kt)("p",null,"Messages will always be returned in offset order. Hence, in\n",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," mode, :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," the seek_to_end method will\nreturn the LSO. See method docs below. Default: ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values are:\n",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider"),": OAuthBearer token provider instance. (See :mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: None")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("h3",{id:"create_mocks"},(0,i.kt)("inlineCode",{parentName:"h3"},"create_mocks")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def create_mocks(self: fastkafka.FastKafka) -> None")),(0,i.kt)("p",null,"Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock"),(0,i.kt)("h3",{id:"produces"},(0,i.kt)("inlineCode",{parentName:"h3"},"produces")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x101ca6040>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x101c80310>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]]]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when delivery report for a produced message is received"),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"encoder"),": Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: "to_". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list. It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ",(0,i.kt)("inlineCode",{parentName:"li"},"localhost:9092"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-producer-#")," (appended with a unique number\nper instance)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_serializer"),": used to convert user-supplied keys to bytes\nIf not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as ",(0,i.kt)("inlineCode",{parentName:"li"},"f(key),")," should return\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_serializer"),": used to convert user-supplied message\nvalues to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),". If not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as\n",(0,i.kt)("inlineCode",{parentName:"li"},"f(value)"),", should return :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"acks"),": one of ",(0,i.kt)("inlineCode",{parentName:"li"},"0"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"1"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"0"),": Producer will not wait for any acknowledgment from the server\nat all. The message will immediately be added to the socket\nbuffer and considered sent. No guarantee can be made that the\nserver has received the record in this case, and the retries\nconfiguration will not take effect (as the client won't\ngenerally know of any failures). The offset given back for each\nrecord will always be set to -1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"1"),": The broker leader will write the record to its local log but\nwill respond without awaiting full acknowledgement from all\nfollowers. In this case should the leader fail immediately\nafter acknowledging the record but before the followers have\nreplicated it then the record will be lost."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"all"),": The broker leader will wait for the full set of in-sync\nreplicas to acknowledge the record. This guarantees that the\nrecord will not be lost as long as at least one in-sync replica\nremains alive. This is the strongest available guarantee.")),(0,i.kt)("p",null,"If unset, defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=1"),". If ",(0,i.kt)("inlineCode",{parentName:"p"},"enable_idempotence")," is\n:data:",(0,i.kt)("inlineCode",{parentName:"p"},"True")," defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=all")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"compression_type"),": The compression type for all data generated by\nthe producer. Valid values are ",(0,i.kt)("inlineCode",{parentName:"li"},"gzip"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"snappy"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"lz4"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"zstd"),"\nor :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),".\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_batch_size"),": Maximum size of buffered data per partition.\nAfter this amount :meth:",(0,i.kt)("inlineCode",{parentName:"li"},"send")," coroutine will block until batch is\ndrained.\nDefault: 16384"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),": The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),", producer will wait ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms - process_time"),".\nDefault: 0 (i.e. no delay)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partitioner"),": Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n",(0,i.kt)("inlineCode",{parentName:"li"},"partitioner(key_bytes, all_partitions, available_partitions)"),".\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_request_size"),": The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"~kafka.protocol.produce.ProduceRequest")," (it's a blocking\ncall), maximum waiting time can be up to ",(0,i.kt)("inlineCode",{parentName:"li"},"2 *\nrequest_timeout_ms"),".\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more\ninformation see :ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_idempotence"),": When set to :data:",(0,i.kt)("inlineCode",{parentName:"li"},"True"),", the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:",(0,i.kt)("inlineCode",{parentName:"li"},"False"),", producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:",(0,i.kt)("inlineCode",{parentName:"li"},"ValueError")," will be thrown.\nNew in version 0.5.0."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values\nare: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider ("),": class:",(0,i.kt)("inlineCode",{parentName:"li"},"~aiokafka.abc.AbstractTokenProvider"),"):\nOAuthBearer token provider instance. (See\n:mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"))),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ValueError"),": when needed")),(0,i.kt)("h3",{id:"run_in_background"},(0,i.kt)("inlineCode",{parentName:"h3"},"run_in_background")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]")),(0,i.kt)("p",null,"Decorator to schedule a task to be run in the background."),(0,i.kt)("p",null,"This decorator is used to schedule a task to be run in the background when the app's ",(0,i.kt)("inlineCode",{parentName:"p"},"_on_startup")," event is triggered."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"A decorator function that takes a background task as an input and stores it to be run in the backround.")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/2afa602b.f892f0f7.js b/assets/js/2afa602b.f892f0f7.js new file mode 100644 index 0000000..4ee7d96 --- /dev/null +++ b/assets/js/2afa602b.f892f0f7.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8653],{3905:(e,t,a)=>{a.d(t,{Zo:()=>l,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function f(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),c=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},l=function(e){var t=c(e.components);return n.createElement(s.Provider,{value:t},e.children)},p="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,l=f(e,["components","mdxType","originalType","parentName"]),p=c(a),u=r,d=p["".concat(s,".").concat(u)]||p[u]||k[u]||o;return a?n.createElement(d,i(i({ref:t},l),{},{components:a})):n.createElement(d,i({ref:t},l))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=u;var f={};for(var s in t)hasOwnProperty.call(t,s)&&(f[s]=t[s]);f.originalType=e,f[p]="string"==typeof e?e:r,i[1]=f;for(var c=2;c<o;c++)i[c]=a[c];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},4721:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>k,frontMatter:()=>o,metadata:()=>f,toc:()=>c});var n=a(7462),r=(a(7294),a(3905));const o={},i=void 0,f={unversionedId:"api/fastkafka/KafkaEvent",id:"version-0.5.0/api/fastkafka/KafkaEvent",title:"KafkaEvent",description:"fastkafka.KafkaEvent {fastkafka.KafkaEvent}",source:"@site/versioned_docs/version-0.5.0/api/fastkafka/KafkaEvent.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/KafkaEvent",permalink:"/docs/0.5.0/api/fastkafka/KafkaEvent",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"FastKafka",permalink:"/docs/0.5.0/api/fastkafka/"},next:{title:"ApacheKafkaBroker",permalink:"/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker"}},s={},c=[{value:"<code>fastkafka.KafkaEvent</code>",id:"fastkafka.KafkaEvent",level:2}],l={toc:c},p="wrapper";function k(e){let{components:t,...a}=e;return(0,r.kt)(p,(0,n.Z)({},l,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.KafkaEvent")),(0,r.kt)("p",null,"A generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"message"),": The message contained in the Kafka event, can be of type pydantic.BaseModel."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"key"),": The optional key used to identify the Kafka event.")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/2b2faa0a.bb2930b4.js b/assets/js/2b2faa0a.bb2930b4.js new file mode 100644 index 0000000..5807f6f --- /dev/null +++ b/assets/js/2b2faa0a.bb2930b4.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5050],{3905:(e,r,t)=>{t.d(r,{Zo:()=>s,kt:()=>k});var a=t(7294);function n(e,r,t){return r in e?Object.defineProperty(e,r,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[r]=t,e}function o(e,r){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);r&&(a=a.filter((function(r){return Object.getOwnPropertyDescriptor(e,r).enumerable}))),t.push.apply(t,a)}return t}function c(e){for(var r=1;r<arguments.length;r++){var t=null!=arguments[r]?arguments[r]:{};r%2?o(Object(t),!0).forEach((function(r){n(e,r,t[r])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):o(Object(t)).forEach((function(r){Object.defineProperty(e,r,Object.getOwnPropertyDescriptor(t,r))}))}return e}function d(e,r){if(null==e)return{};var t,a,n=function(e,r){if(null==e)return{};var t,a,n={},o=Object.keys(e);for(a=0;a<o.length;a++)t=o[a],r.indexOf(t)>=0||(n[t]=e[t]);return n}(e,r);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)t=o[a],r.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(n[t]=e[t])}return n}var i=a.createContext({}),l=function(e){var r=a.useContext(i),t=r;return e&&(t="function"==typeof e?e(r):c(c({},r),e)),t},s=function(e){var r=l(e.components);return a.createElement(i.Provider,{value:r},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var r=e.children;return a.createElement(a.Fragment,{},r)}},u=a.forwardRef((function(e,r){var t=e.components,n=e.mdxType,o=e.originalType,i=e.parentName,s=d(e,["components","mdxType","originalType","parentName"]),p=l(t),u=n,k=p["".concat(i,".").concat(u)]||p[u]||f[u]||o;return t?a.createElement(k,c(c({ref:r},s),{},{components:t})):a.createElement(k,c({ref:r},s))}));function k(e,r){var t=arguments,n=r&&r.mdxType;if("string"==typeof e||n){var o=t.length,c=new Array(o);c[0]=u;var d={};for(var i in r)hasOwnProperty.call(r,i)&&(d[i]=r[i]);d.originalType=e,d[p]="string"==typeof e?e:n,c[1]=d;for(var l=2;l<o;l++)c[l]=t[l];return a.createElement.apply(null,c)}return a.createElement.apply(null,t)}u.displayName="MDXCreateElement"},2490:(e,r,t)=>{t.r(r),t.d(r,{assets:()=>i,contentTitle:()=>c,default:()=>f,frontMatter:()=>o,metadata:()=>d,toc:()=>l});var a=t(7462),n=(t(7294),t(3905));const o={},c=void 0,d={unversionedId:"api/fastkafka/encoder/avro_decoder",id:"version-0.6.0/api/fastkafka/encoder/avro_decoder",title:"avro_decoder",description:"fastkafka.encoder.avrodecoder {fastkafka.encoder.avrodecoder}",source:"@site/versioned_docs/version-0.6.0/api/fastkafka/encoder/avro_decoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avro_decoder",permalink:"/docs/0.6.0/api/fastkafka/encoder/avro_decoder",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"AvroBase",permalink:"/docs/0.6.0/api/fastkafka/encoder/AvroBase"},next:{title:"avro_encoder",permalink:"/docs/0.6.0/api/fastkafka/encoder/avro_encoder"}},i={},l=[{value:"<code>fastkafka.encoder.avro_decoder</code>",id:"fastkafka.encoder.avro_decoder",level:2},{value:"<code>avro_decoder</code>",id:"avro_decoder",level:3}],s={toc:l},p="wrapper";function f(e){let{components:r,...t}=e;return(0,n.kt)(p,(0,a.Z)({},s,t,{components:r,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"fastkafka.encoder.avro_decoder"},(0,n.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.avro_decoder")),(0,n.kt)("h3",{id:"avro_decoder"},(0,n.kt)("inlineCode",{parentName:"h3"},"avro_decoder")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def avro_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any")),(0,n.kt)("p",null,"Decoder to decode avro encoded messages to pydantic model instance"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"raw_msg"),": Avro encoded bytes message received from Kafka topic"),(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"cls"),": Pydantic class; This pydantic class will be used to construct instance of same class")),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"An instance of given pydantic class")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/2bc15a09.b35b76c3.js b/assets/js/2bc15a09.b35b76c3.js new file mode 100644 index 0000000..dc10933 --- /dev/null +++ b/assets/js/2bc15a09.b35b76c3.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8908],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function s(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},u=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},k="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),k=l(a),d=r,f=k["".concat(p,".").concat(d)]||k[d]||c[d]||o;return a?n.createElement(f,i(i({ref:t},u),{},{components:a})):n.createElement(f,i({ref:t},u))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=d;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[k]="string"==typeof e?e:r,i[1]=s;for(var l=2;l<o;l++)i[l]=a[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}d.displayName="MDXCreateElement"},8690:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>c,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var n=a(7462),r=(a(7294),a(3905));const o={},i="Defining a partition key",s={unversionedId:"guides/Guide_22_Partition_Keys",id:"version-0.7.1/guides/Guide_22_Partition_Keys",title:"Defining a partition key",description:"Partition keys are used in Apache Kafka to determine which partition a",source:"@site/versioned_docs/version-0.7.1/guides/Guide_22_Partition_Keys.md",sourceDirName:"guides",slug:"/guides/Guide_22_Partition_Keys",permalink:"/docs/0.7.1/guides/Guide_22_Partition_Keys",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"@produces basics",permalink:"/docs/0.7.1/guides/Guide_21_Produces_Basics"},next:{title:"Batch producing",permalink:"/docs/0.7.1/guides/Guide_23_Batch_Producing"}},p={},l=[{value:"Return a key from the producing function",id:"return-a-key-from-the-producing-function",level:2},{value:"App example",id:"app-example",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the message was sent to the Kafka topic with the desired key",id:"check-if-the-message-was-sent-to-the-kafka-topic-with-the-desired-key",level:2}],u={toc:l},k="wrapper";function c(e){let{components:t,...a}=e;return(0,r.kt)(k,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"defining-a-partition-key"},"Defining a partition key"),(0,r.kt)("p",null,"Partition keys are used in Apache Kafka to determine which partition a\nmessage should be written to. This ensures that related messages are\nkept together in the same partition, which can be useful for ensuring\norder or for grouping related messages together for efficient\nprocessing. Additionally, partitioning data across multiple partitions\nallows Kafka to distribute load across multiple brokers and scale\nhorizontally, while replicating data across multiple brokers provides\nfault tolerance."),(0,r.kt)("p",null,"You can define your partition keys when using the ",(0,r.kt)("inlineCode",{parentName:"p"},"@produces")," decorator,\nthis guide will demonstrate to you this feature."),(0,r.kt)("h2",{id:"return-a-key-from-the-producing-function"},"Return a key from the producing function"),(0,r.kt)("p",null,"To define a key for the message that you want to produce to Kafka topic,\nyou need to wrap the response into\n",(0,r.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/KafkaEvent/#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass and set the key value. Check the example below:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")\n')),(0,r.kt)("p",null,"In the example, we want to return the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message class with\nthe key defined as ",(0,r.kt)("em",{parentName:"p"},"my_key"),". So, we wrap the message and key into a\nKafkaEvent class and return it as such."),(0,r.kt)("p",null,"While generating the documentation, the\n",(0,r.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/KafkaEvent/#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass will be unwrapped and the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class will be documented in\nthe definition of message type, same way if you didn\u2019t use the key."),(0,r.kt)("p",null,'!!! info "Which key to choose?"'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.\n")),(0,r.kt)("h2",{id:"app-example"},"App example"),(0,r.kt)("p",null,"We will modify the app example from ",(0,r.kt)("strong",{parentName:"p"},"@producer basics")," guide to return\nthe ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," with our key. The final app will look like this (make\nsure you replace the ",(0,r.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,r.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values):"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,r.kt)("h2",{id:"run-the-app"},"Run the app"),(0,r.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n")),(0,r.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"[347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.\n")),(0,r.kt)("h2",{id:"check-if-the-message-was-sent-to-the-kafka-topic-with-the-desired-key"},"Check if the message was sent to the Kafka topic with the desired key"),(0,r.kt)("p",null,'Lets check the topic and see if there is a \u201cHello world!" message in the\nhello_world topic with the defined key. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n")),(0,r.kt)("p",null,"You should see the ",(0,r.kt)("em",{parentName:"p"},'my_key {\u201cmsg": \u201cHello world!"}')," messages in your\ntopic appearing, the ",(0,r.kt)("em",{parentName:"p"},"my_key")," part of the message is the key that we\ndefined in our producing function."))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/2c797d78.264327d1.js b/assets/js/2c797d78.264327d1.js new file mode 100644 index 0000000..5c54c84 --- /dev/null +++ b/assets/js/2c797d78.264327d1.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7011],{3905:(e,t,n)=>{n.d(t,{Zo:()=>l,kt:()=>m});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?a(Object(n),!0).forEach((function(t){o(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):a(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function u(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=r.createContext({}),s=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},l=function(e){var t=s(e.components);return r.createElement(c.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,l=u(e,["components","mdxType","originalType","parentName"]),p=s(n),d=o,m=p["".concat(c,".").concat(d)]||p[d]||f[d]||a;return n?r.createElement(m,i(i({ref:t},l),{},{components:n})):r.createElement(m,i({ref:t},l))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=d;var u={};for(var c in t)hasOwnProperty.call(t,c)&&(u[c]=t[c]);u.originalType=e,u[p]="string"==typeof e?e:o,i[1]=u;for(var s=2;s<a;s++)i[s]=n[s];return r.createElement.apply(null,i)}return r.createElement.apply(null,n)}d.displayName="MDXCreateElement"},6128:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>f,frontMatter:()=>a,metadata:()=>u,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={},i="Authentication",u={unversionedId:"guides/Guide_03_Authentication",id:"version-0.6.0/guides/Guide_03_Authentication",title:"Authentication",description:"TLS Authentication",source:"@site/versioned_docs/version-0.6.0/guides/Guide_03_Authentication.md",sourceDirName:"guides",slug:"/guides/Guide_03_Authentication",permalink:"/docs/0.6.0/guides/Guide_03_Authentication",draft:!1,tags:[],version:"0.6.0",frontMatter:{}},c={},s=[{value:"TLS Authentication",id:"tls-authentication",level:2}],l={toc:s},p="wrapper";function f(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},l,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"authentication"},"Authentication"),(0,o.kt)("h2",{id:"tls-authentication"},"TLS Authentication"),(0,o.kt)("p",null,"sasl_mechanism (str) \u2013 Authentication mechanism when security_protocol\nis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN,\nGSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN"),(0,o.kt)("p",null,"sasl_plain_username (str) \u2013 username for SASL PLAIN authentication.\nDefault: None"),(0,o.kt)("p",null,"sasl_plain_password (str) \u2013 password for SASL PLAIN authentication.\nDefault: None"),(0,o.kt)("p",null,"sasl_oauth_token_provider (AbstractTokenProvider) \u2013 OAuthBearer token\nprovider instance. (See kafka.oauth.abstract). Default: None"))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/2e96a196.47765f9b.js b/assets/js/2e96a196.47765f9b.js new file mode 100644 index 0000000..ead832d --- /dev/null +++ b/assets/js/2e96a196.47765f9b.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3623],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function l(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?l(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):l(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function i(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},l=Object.keys(e);for(n=0;n<l.length;n++)a=l[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(n=0;n<l.length;n++)a=l[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),u=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},c=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},p="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,l=e.originalType,s=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),p=u(a),m=r,f=p["".concat(s,".").concat(m)]||p[m]||k[m]||l;return a?n.createElement(f,o(o({ref:t},c),{},{components:a})):n.createElement(f,o({ref:t},c))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var l=a.length,o=new Array(l);o[0]=m;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[p]="string"==typeof e?e:r,o[1]=i;for(var u=2;u<l;u++)o[u]=a[u];return n.createElement.apply(null,o)}return n.createElement.apply(null,a)}m.displayName="MDXCreateElement"},5413:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>o,default:()=>k,frontMatter:()=>l,metadata:()=>i,toc:()=>u});var n=a(7462),r=(a(7294),a(3905));const l={},o=void 0,i={unversionedId:"api/fastkafka/executors/SequentialExecutor",id:"version-0.8.0/api/fastkafka/executors/SequentialExecutor",title:"SequentialExecutor",description:"fastkafka.executors.SequentialExecutor {fastkafka.executors.SequentialExecutor}",source:"@site/versioned_docs/version-0.8.0/api/fastkafka/executors/SequentialExecutor.md",sourceDirName:"api/fastkafka/executors",slug:"/api/fastkafka/executors/SequentialExecutor",permalink:"/docs/api/fastkafka/executors/SequentialExecutor",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"DynamicTaskExecutor",permalink:"/docs/api/fastkafka/executors/DynamicTaskExecutor"},next:{title:"ApacheKafkaBroker",permalink:"/docs/api/fastkafka/testing/ApacheKafkaBroker"}},s={},u=[{value:"fastkafka.executors.SequentialExecutor",id:"fastkafka.executors.SequentialExecutor",level:2},{value:"<strong>init</strong>",id:"fastkafka._components.task_streaming.SequentialExecutor.init",level:3},{value:"run",id:"fastkafka._components.task_streaming.SequentialExecutor.run",level:3}],c={toc:u},p="wrapper";function k(e){let{components:t,...a}=e;return(0,r.kt)(p,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.executors.SequentialExecutor"},"fastkafka.executors.SequentialExecutor"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/task_streaming.py#L305-L356",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("p",null,"A class that implements a sequential executor for processing consumer records."),(0,r.kt)("p",null,"The SequentialExecutor class extends the StreamExecutor class and provides functionality\nfor running processing tasks in sequence by awaiting their coroutines."),(0,r.kt)("h3",{id:"fastkafka._components.task_streaming.SequentialExecutor.init"},(0,r.kt)("strong",{parentName:"h3"},"init")),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/task_streaming.py#L312-L326",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n self, throw_exceptions=False, max_buffer_size=100000\n)\n")),(0,r.kt)("p",null,"Create an instance of SequentialExecutor"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"throw_exceptions")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bool")),(0,r.kt)("td",{parentName:"tr",align:null},"Flag indicating whether exceptions should be thrown or logged.Defaults to False."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"False"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_buffer_size")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum buffer size for the memory object stream.Defaults to 100_000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100000"))))),(0,r.kt)("h3",{id:"fastkafka._components.task_streaming.SequentialExecutor.run"},"run"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/task_streaming.py#L328-L356",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"run(\n self, is_shutting_down_f, generator, processor\n)\n")),(0,r.kt)("p",null,"Runs the sequential executor."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"is_shutting_down_f")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[], bool]")),(0,r.kt)("td",{parentName:"tr",align:null},"Function to check if the executor is shutting down."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"generator")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Generator function for retrieving consumer records."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"processor")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Processor function for processing consumer records."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/2fe15297.5d5bccf7.js b/assets/js/2fe15297.5d5bccf7.js new file mode 100644 index 0000000..660ce65 --- /dev/null +++ b/assets/js/2fe15297.5d5bccf7.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[2111],{1388:a=>{a.exports=JSON.parse('{"pluginId":"default","version":"0.7.0","label":"0.7.0","banner":"unmaintained","badge":true,"noIndex":false,"className":"docs-version-0.7.0","isLast":false,"docsSidebars":{"tutorialSidebar":[{"type":"link","label":"FastKafka","href":"/docs/0.7.0/","docId":"index"},{"type":"category","label":"Guides","items":[{"type":"category","label":"Writing services","items":[{"type":"link","label":"@consumes basics","href":"/docs/0.7.0/guides/Guide_11_Consumes_Basics","docId":"guides/Guide_11_Consumes_Basics"},{"type":"link","label":"Batch consuming","href":"/docs/0.7.0/guides/Guide_12_Batch_Consuming","docId":"guides/Guide_12_Batch_Consuming"},{"type":"link","label":"@produces basics","href":"/docs/0.7.0/guides/Guide_21_Produces_Basics","docId":"guides/Guide_21_Produces_Basics"},{"type":"link","label":"Defining a partition key","href":"/docs/0.7.0/guides/Guide_22_Partition_Keys","docId":"guides/Guide_22_Partition_Keys"},{"type":"link","label":"Batch producing","href":"/docs/0.7.0/guides/Guide_23_Batch_Producing","docId":"guides/Guide_23_Batch_Producing"},{"type":"link","label":"Lifespan Events","href":"/docs/0.7.0/guides/Guide_05_Lifespan_Handler","docId":"guides/Guide_05_Lifespan_Handler"},{"type":"link","label":"Encoding and Decoding Kafka Messages with FastKafka","href":"/docs/0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","docId":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"},{"type":"link","label":"Using multiple Kafka clusters","href":"/docs/0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters","docId":"guides/Guide_24_Using_Multiple_Kafka_Clusters"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Testing","items":[{"type":"link","label":"Using Redpanda to test FastKafka","href":"/docs/0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka","docId":"guides/Guide_31_Using_redpanda_to_test_fastkafka"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Documentation generation","items":[{"type":"link","label":"Deploy FastKafka docs to GitHub Pages","href":"/docs/0.7.0/guides/Guide_04_Github_Actions_Workflow","docId":"guides/Guide_04_Github_Actions_Workflow"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Deployment","items":[{"type":"link","label":"Deploying FastKafka using Docker","href":"/docs/0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka","docId":"guides/Guide_30_Using_docker_to_deploy_fastkafka"},{"type":"link","label":"Using FastAPI to Run FastKafka Application","href":"/docs/0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application","docId":"guides/Guide_32_Using_fastapi_to_run_fastkafka_application"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Benchmarking","items":[{"type":"link","label":"Benchmarking FastKafka app","href":"/docs/0.7.0/guides/Guide_06_Benchmarking_FastKafka","docId":"guides/Guide_06_Benchmarking_FastKafka"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"API","items":[{"type":"link","label":"EventMetadata","href":"/docs/0.7.0/api/fastkafka/EventMetadata","docId":"api/fastkafka/EventMetadata"},{"type":"link","label":"FastKafka","href":"/docs/0.7.0/api/fastkafka/","docId":"api/fastkafka/FastKafka"},{"type":"link","label":"KafkaEvent","href":"/docs/0.7.0/api/fastkafka/KafkaEvent","docId":"api/fastkafka/KafkaEvent"},{"type":"category","label":"encoder","items":[{"type":"link","label":"AvroBase","href":"/docs/0.7.0/api/fastkafka/encoder/AvroBase","docId":"api/fastkafka/encoder/AvroBase"},{"type":"link","label":"avro_decoder","href":"/docs/0.7.0/api/fastkafka/encoder/avro_decoder","docId":"api/fastkafka/encoder/avro_decoder"},{"type":"link","label":"avro_encoder","href":"/docs/0.7.0/api/fastkafka/encoder/avro_encoder","docId":"api/fastkafka/encoder/avro_encoder"},{"type":"link","label":"avsc_to_pydantic","href":"/docs/0.7.0/api/fastkafka/encoder/avsc_to_pydantic","docId":"api/fastkafka/encoder/avsc_to_pydantic"},{"type":"link","label":"json_decoder","href":"/docs/0.7.0/api/fastkafka/encoder/json_decoder","docId":"api/fastkafka/encoder/json_decoder"},{"type":"link","label":"json_encoder","href":"/docs/0.7.0/api/fastkafka/encoder/json_encoder","docId":"api/fastkafka/encoder/json_encoder"}],"collapsed":true,"collapsible":true},{"type":"category","label":"executors","items":[{"type":"link","label":"DynamicTaskExecutor","href":"/docs/0.7.0/api/fastkafka/executors/DynamicTaskExecutor","docId":"api/fastkafka/executors/DynamicTaskExecutor"},{"type":"link","label":"SequentialExecutor","href":"/docs/0.7.0/api/fastkafka/executors/SequentialExecutor","docId":"api/fastkafka/executors/SequentialExecutor"}],"collapsed":true,"collapsible":true},{"type":"category","label":"testing","items":[{"type":"link","label":"ApacheKafkaBroker","href":"/docs/0.7.0/api/fastkafka/testing/ApacheKafkaBroker","docId":"api/fastkafka/testing/ApacheKafkaBroker"},{"type":"link","label":"LocalRedpandaBroker","href":"/docs/0.7.0/api/fastkafka/testing/LocalRedpandaBroker","docId":"api/fastkafka/testing/LocalRedpandaBroker"},{"type":"link","label":"Tester","href":"/docs/0.7.0/api/fastkafka/testing/Tester","docId":"api/fastkafka/testing/Tester"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"CLI","items":[{"type":"link","label":"fastkafka","href":"/docs/0.7.0/cli/fastkafka","docId":"cli/fastkafka"},{"type":"link","label":"run_fastkafka_server_process","href":"/docs/0.7.0/cli/run_fastkafka_server_process","docId":"cli/run_fastkafka_server_process"}],"collapsed":true,"collapsible":true},{"type":"link","label":"LICENSE","href":"/docs/0.7.0/LICENSE","docId":"LICENSE"},{"type":"link","label":"Contributing to fastkafka","href":"/docs/0.7.0/CONTRIBUTING","docId":"CONTRIBUTING"},{"type":"link","label":"Release notes","href":"/docs/0.7.0/CHANGELOG","docId":"CHANGELOG"}]},"docs":{"api/fastkafka/encoder/avro_decoder":{"id":"api/fastkafka/encoder/avro_decoder","title":"avro_decoder","description":"fastkafka.encoder.avrodecoder {fastkafka.encoder.avrodecoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/avro_encoder":{"id":"api/fastkafka/encoder/avro_encoder","title":"avro_encoder","description":"fastkafka.encoder.avroencoder {fastkafka.encoder.avroencoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/AvroBase":{"id":"api/fastkafka/encoder/AvroBase","title":"AvroBase","description":"fastkafka.encoder.AvroBase {fastkafka.encoder.AvroBase}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/avsc_to_pydantic":{"id":"api/fastkafka/encoder/avsc_to_pydantic","title":"avsc_to_pydantic","description":"fastkafka.encoder.avsctopydantic {fastkafka.encoder.avsctopydantic}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/json_decoder":{"id":"api/fastkafka/encoder/json_decoder","title":"json_decoder","description":"fastkafka.encoder.jsondecoder {fastkafka.encoder.jsondecoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/json_encoder":{"id":"api/fastkafka/encoder/json_encoder","title":"json_encoder","description":"fastkafka.encoder.jsonencoder {fastkafka.encoder.jsonencoder}","sidebar":"tutorialSidebar"},"api/fastkafka/EventMetadata":{"id":"api/fastkafka/EventMetadata","title":"EventMetadata","description":"fastkafka.EventMetadata {fastkafka.EventMetadata}","sidebar":"tutorialSidebar"},"api/fastkafka/executors/DynamicTaskExecutor":{"id":"api/fastkafka/executors/DynamicTaskExecutor","title":"DynamicTaskExecutor","description":"fastkafka.executors.DynamicTaskExecutor {fastkafka.executors.DynamicTaskExecutor}","sidebar":"tutorialSidebar"},"api/fastkafka/executors/SequentialExecutor":{"id":"api/fastkafka/executors/SequentialExecutor","title":"SequentialExecutor","description":"fastkafka.executors.SequentialExecutor {fastkafka.executors.SequentialExecutor}","sidebar":"tutorialSidebar"},"api/fastkafka/FastKafka":{"id":"api/fastkafka/FastKafka","title":"FastKafka","description":"fastkafka.FastKafka {fastkafka.FastKafka}","sidebar":"tutorialSidebar"},"api/fastkafka/KafkaEvent":{"id":"api/fastkafka/KafkaEvent","title":"KafkaEvent","description":"fastkafka.KafkaEvent {fastkafka.KafkaEvent}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/ApacheKafkaBroker":{"id":"api/fastkafka/testing/ApacheKafkaBroker","title":"ApacheKafkaBroker","description":"fastkafka.testing.ApacheKafkaBroker {fastkafka.testing.ApacheKafkaBroker}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/LocalRedpandaBroker":{"id":"api/fastkafka/testing/LocalRedpandaBroker","title":"LocalRedpandaBroker","description":"fastkafka.testing.LocalRedpandaBroker {fastkafka.testing.LocalRedpandaBroker}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/Tester":{"id":"api/fastkafka/testing/Tester","title":"Tester","description":"fastkafka.testing.Tester {fastkafka.testing.Tester}","sidebar":"tutorialSidebar"},"CHANGELOG":{"id":"CHANGELOG","title":"Release notes","description":"0.7.0","sidebar":"tutorialSidebar"},"cli/fastkafka":{"id":"cli/fastkafka","title":"fastkafka","description":"Usage:","sidebar":"tutorialSidebar"},"cli/run_fastkafka_server_process":{"id":"cli/run_fastkafka_server_process","title":"run_fastkafka_server_process","description":"Usage:","sidebar":"tutorialSidebar"},"CONTRIBUTING":{"id":"CONTRIBUTING","title":"Contributing to fastkafka","description":"First off, thanks for taking the time to contribute! \u2764\ufe0f","sidebar":"tutorialSidebar"},"guides/Guide_00_FastKafka_Demo":{"id":"guides/Guide_00_FastKafka_Demo","title":"FastKafka tutorial","description":"FastKafka is a powerful and easy-to-use"},"guides/Guide_01_Intro":{"id":"guides/Guide_01_Intro","title":"Intro","description":"This tutorial will show you how to use FastKafkaAPI, step by"},"guides/Guide_02_First_Steps":{"id":"guides/Guide_02_First_Steps","title":"First Steps","description":"Creating a simple Kafka consumer app"},"guides/Guide_03_Authentication":{"id":"guides/Guide_03_Authentication","title":"Authentication","description":"TLS Authentication"},"guides/Guide_04_Github_Actions_Workflow":{"id":"guides/Guide_04_Github_Actions_Workflow","title":"Deploy FastKafka docs to GitHub Pages","description":"Getting started","sidebar":"tutorialSidebar"},"guides/Guide_05_Lifespan_Handler":{"id":"guides/Guide_05_Lifespan_Handler","title":"Lifespan Events","description":"Did you know that you can define some special code that runs before and","sidebar":"tutorialSidebar"},"guides/Guide_06_Benchmarking_FastKafka":{"id":"guides/Guide_06_Benchmarking_FastKafka","title":"Benchmarking FastKafka app","description":"Prerequisites","sidebar":"tutorialSidebar"},"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka":{"id":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","title":"Encoding and Decoding Kafka Messages with FastKafka","description":"Prerequisites","sidebar":"tutorialSidebar"},"guides/Guide_11_Consumes_Basics":{"id":"guides/Guide_11_Consumes_Basics","title":"@consumes basics","description":"You can use @consumes decorator to consume messages from Kafka topics.","sidebar":"tutorialSidebar"},"guides/Guide_12_Batch_Consuming":{"id":"guides/Guide_12_Batch_Consuming","title":"Batch consuming","description":"If you want to consume data in batches @consumes decorator makes that","sidebar":"tutorialSidebar"},"guides/Guide_21_Produces_Basics":{"id":"guides/Guide_21_Produces_Basics","title":"@produces basics","description":"You can use @produces decorator to produce messages to Kafka topics.","sidebar":"tutorialSidebar"},"guides/Guide_22_Partition_Keys":{"id":"guides/Guide_22_Partition_Keys","title":"Defining a partition key","description":"Partition keys are used in Apache Kafka to determine which partition a","sidebar":"tutorialSidebar"},"guides/Guide_23_Batch_Producing":{"id":"guides/Guide_23_Batch_Producing","title":"Batch producing","description":"If you want to send your data in batches @produces decorator makes","sidebar":"tutorialSidebar"},"guides/Guide_24_Using_Multiple_Kafka_Clusters":{"id":"guides/Guide_24_Using_Multiple_Kafka_Clusters","title":"Using multiple Kafka clusters","description":"Ready to take your FastKafka app to the next level? This guide shows you","sidebar":"tutorialSidebar"},"guides/Guide_30_Using_docker_to_deploy_fastkafka":{"id":"guides/Guide_30_Using_docker_to_deploy_fastkafka","title":"Deploying FastKafka using Docker","description":"Building a Docker Image","sidebar":"tutorialSidebar"},"guides/Guide_31_Using_redpanda_to_test_fastkafka":{"id":"guides/Guide_31_Using_redpanda_to_test_fastkafka","title":"Using Redpanda to test FastKafka","description":"What is FastKafka?","sidebar":"tutorialSidebar"},"guides/Guide_32_Using_fastapi_to_run_fastkafka_application":{"id":"guides/Guide_32_Using_fastapi_to_run_fastkafka_application","title":"Using FastAPI to Run FastKafka Application","description":"When deploying a FastKafka application, the default approach is to","sidebar":"tutorialSidebar"},"index":{"id":"index","title":"FastKafka","description":"Effortless Kafka integration for your web services","sidebar":"tutorialSidebar"},"LICENSE":{"id":"LICENSE","title":"LICENSE","description":"Apache License","sidebar":"tutorialSidebar"}}}')}}]); \ No newline at end of file diff --git a/assets/js/3087bb2d.d34d4c8e.js b/assets/js/3087bb2d.d34d4c8e.js new file mode 100644 index 0000000..e0545c3 --- /dev/null +++ b/assets/js/3087bb2d.d34d4c8e.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6308],{3905:(e,n,t)=>{t.d(n,{Zo:()=>l,kt:()=>f});var a=t(7294);function i(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function s(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n<arguments.length;n++){var t=null!=arguments[n]?arguments[n]:{};n%2?s(Object(t),!0).forEach((function(n){i(e,n,t[n])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(n){Object.defineProperty(e,n,Object.getOwnPropertyDescriptor(t,n))}))}return e}function r(e,n){if(null==e)return{};var t,a,i=function(e,n){if(null==e)return{};var t,a,i={},s=Object.keys(e);for(a=0;a<s.length;a++)t=s[a],n.indexOf(t)>=0||(i[t]=e[t]);return i}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(a=0;a<s.length;a++)t=s[a],n.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(i[t]=e[t])}return i}var d=a.createContext({}),c=function(e){var n=a.useContext(d),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},l=function(e){var n=c(e.components);return a.createElement(d.Provider,{value:n},e.children)},p="mdxType",m={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,i=e.mdxType,s=e.originalType,d=e.parentName,l=r(e,["components","mdxType","originalType","parentName"]),p=c(t),u=i,f=p["".concat(d,".").concat(u)]||p[u]||m[u]||s;return t?a.createElement(f,o(o({ref:n},l),{},{components:t})):a.createElement(f,o({ref:n},l))}));function f(e,n){var t=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var s=t.length,o=new Array(s);o[0]=u;var r={};for(var d in n)hasOwnProperty.call(n,d)&&(r[d]=n[d]);r.originalType=e,r[p]="string"==typeof e?e:i,o[1]=r;for(var c=2;c<s;c++)o[c]=t[c];return a.createElement.apply(null,o)}return a.createElement.apply(null,t)}u.displayName="MDXCreateElement"},7247:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>d,contentTitle:()=>o,default:()=>m,frontMatter:()=>s,metadata:()=>r,toc:()=>c});var a=t(7462),i=(t(7294),t(3905));const s={},o="Encoding and Decoding Kafka Messages with FastKafka",r={unversionedId:"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",id:"version-0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",title:"Encoding and Decoding Kafka Messages with FastKafka",description:"Prerequisites",source:"@site/versioned_docs/version-0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",sourceDirName:"guides",slug:"/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",permalink:"/docs/0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Lifespan Events",permalink:"/docs/0.7.0/guides/Guide_05_Lifespan_Handler"},next:{title:"Using multiple Kafka clusters",permalink:"/docs/0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters"}},d={},c=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Ways to Encode and Decode Messages with FastKafka",id:"ways-to-encode-and-decode-messages-with-fastkafka",level:2},{value:"1. Json encoder and decoder",id:"1-json-encoder-and-decoder",level:2},{value:"2. Avro encoder and decoder",id:"2-avro-encoder-and-decoder",level:2},{value:"What is Avro?",id:"what-is-avro",level:3},{value:"Installing FastKafka with Avro dependencies",id:"installing-fastkafka-with-avro-dependencies",level:3},{value:"Defining Avro Schema Using Pydantic Models",id:"defining-avro-schema-using-pydantic-models",level:3},{value:"Reusing existing avro schema",id:"reusing-existing-avro-schema",level:3},{value:"Building pydantic models from avro schema dictionary",id:"building-pydantic-models-from-avro-schema-dictionary",level:4},{value:"Building pydantic models from <code>.avsc</code> file",id:"building-pydantic-models-from-avsc-file",level:4},{value:"Consume/Produce avro messages with FastKafka",id:"consumeproduce-avro-messages-with-fastkafka",level:3},{value:"Assembling it all together",id:"assembling-it-all-together",level:3},{value:"3. Custom encoder and decoder",id:"3-custom-encoder-and-decoder",level:2},{value:"Writing a custom encoder and decoder",id:"writing-a-custom-encoder-and-decoder",level:3},{value:"Assembling it all together",id:"assembling-it-all-together-1",level:3}],l={toc:c},p="wrapper";function m(e){let{components:n,...t}=e;return(0,i.kt)(p,(0,a.Z)({},l,t,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"encoding-and-decoding-kafka-messages-with-fastkafka"},"Encoding and Decoding Kafka Messages with FastKafka"),(0,i.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A basic knowledge of ",(0,i.kt)("inlineCode",{parentName:"li"},"FastKafka")," is needed to proceed with this\nguide. If you are not familiar with ",(0,i.kt)("inlineCode",{parentName:"li"},"FastKafka"),", please go through\nthe ",(0,i.kt)("a",{parentName:"li",href:"/docs#tutorial"},"tutorial")," first."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("inlineCode",{parentName:"li"},"FastKafka")," with its dependencies installed is needed. Please\ninstall ",(0,i.kt)("inlineCode",{parentName:"li"},"FastKafka")," using the command - ",(0,i.kt)("inlineCode",{parentName:"li"},"pip install fastkafka"))),(0,i.kt)("h2",{id:"ways-to-encode-and-decode-messages-with-fastkafka"},"Ways to Encode and Decode Messages with FastKafka"),(0,i.kt)("p",null,"In python, by default, we send Kafka messages as bytes. Even if our\nmessage is a string, we convert it to bytes and then send it to Kafka\ntopic. imilarly, while consuming messages, we consume them as bytes and\nthen convert them to strings."),(0,i.kt)("p",null,"In FastKafka, we specify message schema using Pydantic models as\nmentioned in ",(0,i.kt)("a",{parentName:"p",href:"/docs#messages"},"tutorial"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# Define Pydantic models for Kafka messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("p",null,"Then, we send and receive messages as instances of Pydantic models which\nwe defined. So, FastKafka needs a way to encode/decode to these Pydantic\nmodel messages to bytes in order to send/receive messages to/from Kafka\ntopics."),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods of FastKafka accept a parameter\ncalled ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," to decode/encode Kafka messages. FastKafka\nprovides three ways to encode and decode messages:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"json - This is the default encoder/decoder option in FastKafka.\nWhile producing, this option converts our instance of Pydantic model\nmessages to a JSON string and then converts it to bytes before\nsending it to the topic. While consuming, it converts bytes to a\nJSON string and then constructs an instance of Pydantic model from\nthe JSON string."),(0,i.kt)("li",{parentName:"ol"},"avro - This option uses Avro encoding/decoding to convert instances\nof Pydantic model messages to bytes while producing, and while\nconsuming, it constructs an instance of Pydantic model from bytes."),(0,i.kt)("li",{parentName:"ol"},"custom encoder/decoder - If you are not happy with the json or avro\nencoder/decoder options, you can write your own encoder/decoder\nfunctions and use them to encode/decode Pydantic messages.")),(0,i.kt)("h2",{id:"1-json-encoder-and-decoder"},"1. Json encoder and decoder"),(0,i.kt)("p",null,"The default option in FastKafka is json encoder/decoder. This option,\nwhile producing, converts our instance of pydantic model messages to\njson string and then converts to bytes before sending it to the topics.\nWhile consuming it converts bytes to json string and then constructs\ninstance of pydantic model from json string."),(0,i.kt)("p",null,"We can use the application from ",(0,i.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," as\nis, and it will use the json encoder/decoder by default. But, for\nclarity, let\u2019s modify it to explicitly accept the \u2018json\u2019 encoder/decoder\nparameter:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder="json")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder="json")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"In the above code, the ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),' decorator sets up a\nconsumer for the \u201cinput_data" topic, using the \u2018json\u2019 decoder to convert\nthe message payload to an instance of ',(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData"),". The\n",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces"),' decorator sets up a producer for the \u201cpredictions"\ntopic, using the \u2018json\u2019 encoder to convert the instance of\n',(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," to message payload."),(0,i.kt)("h2",{id:"2-avro-encoder-and-decoder"},"2. Avro encoder and decoder"),(0,i.kt)("h3",{id:"what-is-avro"},"What is Avro?"),(0,i.kt)("p",null,"Avro is a row-oriented remote procedure call and data serialization\nframework developed within Apache\u2019s Hadoop project. It uses JSON for\ndefining data types and protocols, and serializes data in a compact\nbinary format. To learn more about the Apache Avro, please check out the\n",(0,i.kt)("a",{parentName:"p",href:"https://avro.apache.org/docs/"},"docs"),"."),(0,i.kt)("h3",{id:"installing-fastkafka-with-avro-dependencies"},"Installing FastKafka with Avro dependencies"),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," with dependencies for Apache Avro installed is needed to use\navro encoder/decoder. Please install ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," with Avro support using\nthe command - ",(0,i.kt)("inlineCode",{parentName:"p"},"pip install fastkafka[avro]")),(0,i.kt)("h3",{id:"defining-avro-schema-using-pydantic-models"},"Defining Avro Schema Using Pydantic Models"),(0,i.kt)("p",null,"By default, you can use Pydantic model to define your message schemas.\nFastKafka internally takes care of encoding and decoding avro messages,\nbased on the Pydantic models."),(0,i.kt)("p",null,"So, similar to the ",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),", the message schema will\nremain as it is."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# Define Pydantic models for Avro messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("p",null,"No need to change anything to support avro. You can use existing\nPydantic models as is."),(0,i.kt)("h3",{id:"reusing-existing-avro-schema"},"Reusing existing avro schema"),(0,i.kt)("p",null,"If you are using some other library to send and receive avro encoded\nmessages, it is highly likely that you already have an Avro schema\ndefined."),(0,i.kt)("h4",{id:"building-pydantic-models-from-avro-schema-dictionary"},"Building pydantic models from avro schema dictionary"),(0,i.kt)("p",null,"Let\u2019s modify the above example and let\u2019s assume we have schemas already\nfor ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," which will look like below:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'iris_input_data_schema = {\n "type": "record",\n "namespace": "IrisInputData",\n "name": "IrisInputData",\n "fields": [\n {"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},\n {"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},\n {"doc": "Petal length in cm", "type": "double", "name": "petal_length"},\n {"doc": "Petal width in cm", "type": "double", "name": "petal_width"},\n ],\n}\niris_prediction_schema = {\n "type": "record",\n "namespace": "IrisPrediction",\n "name": "IrisPrediction",\n "fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],\n}\n')),(0,i.kt)("p",null,"We can easily construct pydantic models from avro schema using\n",(0,i.kt)("inlineCode",{parentName:"p"},"avsc_to_pydantic")," function which is included as part of ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka"),"\nitself."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n")),(0,i.kt)("p",null,"The above code will convert avro schema to pydantic models and will\nprint pydantic models\u2019 fields. The output of the above is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"{'sepal_length': ModelField(name='sepal_length', type=float, required=True),\n 'sepal_width': ModelField(name='sepal_width', type=float, required=True),\n 'petal_length': ModelField(name='petal_length', type=float, required=True),\n 'petal_width': ModelField(name='petal_width', type=float, required=True)}\n \n {'species': ModelField(name='species', type=str, required=True)}\n")),(0,i.kt)("p",null,"This is exactly same as manually defining the pydantic models ourselves.\nYou don\u2019t have to worry about not making any mistakes while converting\navro schema to pydantic models manually. You can easily and\nautomatically accomplish it by using ",(0,i.kt)("inlineCode",{parentName:"p"},"avsc_to_pydantic")," function as\ndemonstrated above."),(0,i.kt)("h4",{id:"building-pydantic-models-from-avsc-file"},"Building pydantic models from ",(0,i.kt)("inlineCode",{parentName:"h4"},".avsc")," file"),(0,i.kt)("p",null,"Not all cases will have avro schema conveniently defined as a python\ndictionary. You may have it stored as the proprietary ",(0,i.kt)("inlineCode",{parentName:"p"},".avsc")," files in\nfilesystem. Let\u2019s see how to convert those ",(0,i.kt)("inlineCode",{parentName:"p"},".avsc")," files to pydantic\nmodels."),(0,i.kt)("p",null,"Let\u2019s assume our avro files are stored in files called\n",(0,i.kt)("inlineCode",{parentName:"p"},"iris_input_data_schema.avsc")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"iris_prediction_schema.avsc"),". In that\ncase, following code converts the schema to pydantic models:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'import json\nfrom fastkafka.encoder import avsc_to_pydantic\n\n\nwith open("iris_input_data_schema.avsc", "rb") as f:\n iris_input_data_schema = json.load(f)\n \nwith open("iris_prediction_schema.avsc", "rb") as f:\n iris_prediction_schema = json.load(f)\n \n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n')),(0,i.kt)("h3",{id:"consumeproduce-avro-messages-with-fastkafka"},"Consume/Produce avro messages with FastKafka"),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," provides ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods to\nconsume/produces messages to/from a ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," topic. This is explained in\n",(0,i.kt)("a",{parentName:"p",href:"/docs#function-decorators"},"tutorial"),"."),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods accepts a parameter called\n",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," to decode/encode avro messages."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", encoder="avro")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", decoder="avro")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"In the above example, in ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods, we\nexplicitly instruct FastKafka to ",(0,i.kt)("inlineCode",{parentName:"p"},"decode")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"encode")," messages using\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"avro")," ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," instead of the default ",(0,i.kt)("inlineCode",{parentName:"p"},"json"),"\n",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder"),"."),(0,i.kt)("h3",{id:"assembling-it-all-together"},"Assembling it all together"),(0,i.kt)("p",null,"Let\u2019s rewrite the sample code found in\n",(0,i.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," to use ",(0,i.kt)("inlineCode",{parentName:"p"},"avro")," to ",(0,i.kt)("inlineCode",{parentName:"p"},"decode")," and\n",(0,i.kt)("inlineCode",{parentName:"p"},"encode")," messages:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\niris_input_data_schema = {\n "type": "record",\n "namespace": "IrisInputData",\n "name": "IrisInputData",\n "fields": [\n {"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},\n {"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},\n {"doc": "Petal length in cm", "type": "double", "name": "petal_length"},\n {"doc": "Petal width in cm", "type": "double", "name": "petal_width"},\n ],\n}\niris_prediction_schema = {\n "type": "record",\n "namespace": "IrisPrediction",\n "name": "IrisPrediction",\n "fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],\n}\n# Or load schema from avsc files\n\nfrom fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\n\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder="avro")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder="avro")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"The above code is a sample implementation of using FastKafka to consume\nand produce Avro-encoded messages from/to a Kafka topic. The code\ndefines two Avro schemas for the input data and the prediction result.\nIt then uses the ",(0,i.kt)("inlineCode",{parentName:"p"},"avsc_to_pydantic")," function from the FastKafka library\nto convert the Avro schema into Pydantic models, which will be used to\ndecode and encode Avro messages."),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," class is then instantiated with the broker details, and\ntwo functions decorated with ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces"),' are defined to consume messages from the\n\u201cinput_data" topic and produce messages to the \u201cpredictions" topic,\nrespectively. The functions uses the decoder=\u201cavro" and encoder=\u201cavro"\nparameters to decode and encode the Avro messages.'),(0,i.kt)("p",null,"In summary, the above code demonstrates a straightforward way to use\nAvro-encoded messages with FastKafka to build a message processing\npipeline."),(0,i.kt)("h2",{id:"3-custom-encoder-and-decoder"},"3. Custom encoder and decoder"),(0,i.kt)("p",null,"If you are not happy with the json or avro encoder/decoder options, you\ncan write your own encoder/decoder functions and use them to\nencode/decode Pydantic messages."),(0,i.kt)("h3",{id:"writing-a-custom-encoder-and-decoder"},"Writing a custom encoder and decoder"),(0,i.kt)("p",null,"In this section, let\u2019s see how to write a custom encoder and decoder\nwhich obfuscates kafka message with simple\n",(0,i.kt)("a",{parentName:"p",href:"https://en.wikipedia.org/wiki/ROT13"},"ROT13")," cipher."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},"import codecs\nimport json\nfrom typing import Any\n\nfrom pydantic.main import ModelMetaclass\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n msg_str = msg.json()\n obfuscated = codecs.encode(msg_str, 'rot13')\n raw_bytes = obfuscated.encode(\"utf-8\")\n return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n obfuscated = raw_msg.decode(\"utf-8\")\n msg_str = codecs.decode(obfuscated, 'rot13')\n msg_dict = json.loads(msg_str)\n return cls(**msg_dict)\n")),(0,i.kt)("p",null,"The above code defines two custom functions for encoding and decoding\nmessages in a Kafka application using the FastKafka library."),(0,i.kt)("p",null,"The encoding function, ",(0,i.kt)("inlineCode",{parentName:"p"},"custom_encoder()"),", takes a message ",(0,i.kt)("inlineCode",{parentName:"p"},"msg")," which\nis an instance of a Pydantic model, converts it to a JSON string using\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"json()")," method, obfuscates the resulting string using the ROT13\nalgorithm from the ",(0,i.kt)("inlineCode",{parentName:"p"},"codecs")," module, and finally encodes the obfuscated\nstring as raw bytes using the UTF-8 encoding."),(0,i.kt)("p",null,"The decoding function, ",(0,i.kt)("inlineCode",{parentName:"p"},"custom_decoder()"),", takes a raw message ",(0,i.kt)("inlineCode",{parentName:"p"},"raw_msg"),"\nin bytes format, a Pydantic class to construct instance with cls\nparameter. It first decodes the raw message from UTF-8 encoding, then\nuses the ROT13 algorithm to de-obfuscate the string. Finally, it loads\nthe resulting JSON string using the ",(0,i.kt)("inlineCode",{parentName:"p"},"json.loads()")," method and returns a\nnew instance of the specified ",(0,i.kt)("inlineCode",{parentName:"p"},"cls")," class initialized with the decoded\ndictionary."),(0,i.kt)("p",null,"These functions can be used with FastKafka\u2019s ",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"\nparameters to customize the serialization and deserialization of\nmessages in Kafka topics."),(0,i.kt)("p",null,"Let\u2019s test the above code"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},"i = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n\nencoded = custom_encoder(i)\ndisplay(encoded)\n\ndecoded = custom_decoder(encoded, IrisInputData)\ndisplay(decoded)\n")),(0,i.kt)("p",null,"This will result in following output"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},'b\'{"frcny_yratgu": 0.5, "frcny_jvqgu": 0.5, "crgny_yratgu": 0.5, "crgny_jvqgu": 0.5}\'\n\nIrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n')),(0,i.kt)("h3",{id:"assembling-it-all-together-1"},"Assembling it all together"),(0,i.kt)("p",null,"Let\u2019s rewrite the sample code found in\n",(0,i.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," to use our custom decoder and\nencoder functions:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\n\nimport codecs\nimport json\nfrom typing import Any\n\nfrom pydantic.main import ModelMetaclass\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n msg_str = msg.json()\n obfuscated = codecs.encode(msg_str, \'rot13\')\n raw_bytes = obfuscated.encode("utf-8")\n return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n obfuscated = raw_msg.decode("utf-8")\n msg_str = codecs.decode(obfuscated, \'rot13\')\n msg_dict = json.loads(msg_str)\n return cls(**msg_dict)\n\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder=custom_decoder)\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder=custom_encoder)\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"This code defines a custom encoder and decoder functions for encoding\nand decoding messages sent through a Kafka messaging system."),(0,i.kt)("p",null,"The custom ",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," function takes a message represented as a\n",(0,i.kt)("inlineCode",{parentName:"p"},"BaseModel")," and encodes it as bytes by first converting it to a JSON\nstring and then obfuscating it using the ROT13 encoding. The obfuscated\nmessage is then converted to bytes using UTF-8 encoding and returned."),(0,i.kt)("p",null,"The custom ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder")," function takes in the bytes representing an\nobfuscated message, decodes it using UTF-8 encoding, then decodes the\nROT13 obfuscation, and finally loads it as a dictionary using the ",(0,i.kt)("inlineCode",{parentName:"p"},"json"),"\nmodule. This dictionary is then converted to a ",(0,i.kt)("inlineCode",{parentName:"p"},"BaseModel")," instance\nusing the cls parameter."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/35d7f647.eeaa1f4e.js b/assets/js/35d7f647.eeaa1f4e.js new file mode 100644 index 0000000..27ddc89 --- /dev/null +++ b/assets/js/35d7f647.eeaa1f4e.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8775],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function i(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var l=n.createContext({}),p=function(e){var t=n.useContext(l),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},c=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,l=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),d=p(a),k=r,f=d["".concat(l,".").concat(k)]||d[k]||u[k]||o;return a?n.createElement(f,s(s({ref:t},c),{},{components:a})):n.createElement(f,s({ref:t},c))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,s=new Array(o);s[0]=k;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[d]="string"==typeof e?e:r,s[1]=i;for(var p=2;p<o;p++)s[p]=a[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,a)}k.displayName="MDXCreateElement"},4035:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>u,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={},s="Batch producing",i={unversionedId:"guides/Guide_23_Batch_Producing",id:"version-0.6.0/guides/Guide_23_Batch_Producing",title:"Batch producing",description:"If you want to send your data in batches @produces decorator makes",source:"@site/versioned_docs/version-0.6.0/guides/Guide_23_Batch_Producing.md",sourceDirName:"guides",slug:"/guides/Guide_23_Batch_Producing",permalink:"/docs/0.6.0/guides/Guide_23_Batch_Producing",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Defining a partition key",permalink:"/docs/0.6.0/guides/Guide_22_Partition_Keys"},next:{title:"Lifespan Events",permalink:"/docs/0.6.0/guides/Guide_05_Lifespan_Handler"}},l={},p=[{value:"Return a batch from the producing function",id:"return-a-batch-from-the-producing-function",level:2},{value:"App example",id:"app-example",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the batch was sent to the Kafka topic with the defined key",id:"check-if-the-batch-was-sent-to-the-kafka-topic-with-the-defined-key",level:2},{value:"Batch key",id:"batch-key",level:2},{value:"Check if the batch was sent to the Kafka topic",id:"check-if-the-batch-was-sent-to-the-kafka-topic",level:2}],c={toc:p},d="wrapper";function u(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"batch-producing"},"Batch producing"),(0,r.kt)("p",null,"If you want to send your data in batches ",(0,r.kt)("inlineCode",{parentName:"p"},"@produces")," decorator makes\nthat possible for you. By returning a ",(0,r.kt)("inlineCode",{parentName:"p"},"list")," of messages you want to\nsend in a batch the producer will collect the messages and send them in\na batch to a Kafka broker."),(0,r.kt)("p",null,"This guide will demonstrate how to use this feature."),(0,r.kt)("h2",{id:"return-a-batch-from-the-producing-function"},"Return a batch from the producing function"),(0,r.kt)("p",null,"To define a batch that you want to produce to Kafka topic, you need to\nreturn the ",(0,r.kt)("inlineCode",{parentName:"p"},"List")," of the messages that you want to be batched from your\nproducing function."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n return [HelloWorld(msg=msg) for msg in msgs]\n")),(0,r.kt)("p",null,"In the example, we want to return the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message class batch\nthat is created from a list of msgs we passed into our producing\nfunction."),(0,r.kt)("p",null,'Lets also prepare a backgound task that will send a batch of \u201chello\nworld" messages when the app starts.'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n')),(0,r.kt)("h2",{id:"app-example"},"App example"),(0,r.kt)("p",null,"We will modify the app example from ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_21_Produces_Basics"},"@producer\nbasics")," guide to return the\n",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," batch. The final app will look like this (make sure you\nreplace the ",(0,r.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,r.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values):"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n return [HelloWorld(msg=msg) for msg in msgs]\n')),(0,r.kt)("h2",{id:"run-the-app"},"Run the app"),(0,r.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n")),(0,r.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"[46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task\n[46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.\n")),(0,r.kt)("h2",{id:"check-if-the-batch-was-sent-to-the-kafka-topic-with-the-defined-key"},"Check if the batch was sent to the Kafka topic with the defined key"),(0,r.kt)("p",null,'Lets check the topic and see if there are \u201cHello world" messages in the\nhello_world topic. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n")),(0,r.kt)("p",null,"You should see the batch of messages in your topic."),(0,r.kt)("h2",{id:"batch-key"},"Batch key"),(0,r.kt)("p",null,"To define a key for your batch like in ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_22_Partition_Keys"},"Defining a partition\nkey")," guide you can wrap the\nreturning value in a\n",(0,r.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/KafkaEvent/#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass. To learn more about defining a partition ke and\n",(0,r.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/KafkaEvent/#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass, please, have a look at ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_22_Partition_Keys"},"Defining a partition\nkey")," guide."),(0,r.kt)("p",null,"Let\u2019s demonstrate that."),(0,r.kt)("p",null,"To define a key, we just need to modify our producing function, like\nthis:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")\n')),(0,r.kt)("p",null,"Now our app looks like this:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")\n')),(0,r.kt)("h2",{id:"check-if-the-batch-was-sent-to-the-kafka-topic"},"Check if the batch was sent to the Kafka topic"),(0,r.kt)("p",null,'Lets check the topic and see if there are \u201cHello world" messages in the\nhello_world topic, containing a defined key. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n")),(0,r.kt)("p",null,"You should see the batch of messages with the defined key in your topic."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/381a15bc.fecfc237.js b/assets/js/381a15bc.fecfc237.js new file mode 100644 index 0000000..b4901be --- /dev/null +++ b/assets/js/381a15bc.fecfc237.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7886],{3905:(e,t,a)=>{a.d(t,{Zo:()=>k,kt:()=>m});var n=a(7294);function l(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){l(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function o(e,t){if(null==e)return{};var a,n,l=function(e,t){if(null==e)return{};var a,n,l={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(l[a]=e[a]);return l}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(l[a]=e[a])}return l}var i=n.createContext({}),p=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},k=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var a=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,k=o(e,["components","mdxType","originalType","parentName"]),f=p(a),d=l,m=f["".concat(i,".").concat(d)]||f[d]||u[d]||r;return a?n.createElement(m,s(s({ref:t},k),{},{components:a})):n.createElement(m,s({ref:t},k))}));function m(e,t){var a=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var r=a.length,s=new Array(r);s[0]=d;var o={};for(var i in t)hasOwnProperty.call(t,i)&&(o[i]=t[i]);o.originalType=e,o[f]="string"==typeof e?e:l,s[1]=o;for(var p=2;p<r;p++)s[p]=a[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,a)}d.displayName="MDXCreateElement"},4388:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var n=a(7462),l=(a(7294),a(3905));const r={},s="fastkafka",o={unversionedId:"cli/fastkafka",id:"version-0.7.1/cli/fastkafka",title:"fastkafka",description:"Usage:",source:"@site/versioned_docs/version-0.7.1/cli/fastkafka.md",sourceDirName:"cli",slug:"/cli/fastkafka",permalink:"/docs/0.7.1/cli/fastkafka",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Tester",permalink:"/docs/0.7.1/api/fastkafka/testing/Tester"},next:{title:"run_fastkafka_server_process",permalink:"/docs/0.7.1/cli/run_fastkafka_server_process"}},i={},p=[{value:"<code>fastkafka docs</code>",id:"fastkafka-docs",level:2},{value:"<code>fastkafka docs generate</code>",id:"fastkafka-docs-generate",level:3},{value:"<code>fastkafka docs install_deps</code>",id:"fastkafka-docs-install_deps",level:3},{value:"<code>fastkafka docs serve</code>",id:"fastkafka-docs-serve",level:3},{value:"<code>fastkafka run</code>",id:"fastkafka-run",level:2},{value:"<code>fastkafka testing</code>",id:"fastkafka-testing",level:2},{value:"<code>fastkafka testing install_deps</code>",id:"fastkafka-testing-install_deps",level:3}],k={toc:p},f="wrapper";function u(e){let{components:t,...a}=e;return(0,l.kt)(f,(0,n.Z)({},k,a,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h1",{id:"fastkafka"},(0,l.kt)("inlineCode",{parentName:"h1"},"fastkafka")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--install-completion"),": Install completion for the current shell."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--show-completion"),": Show completion for the current shell, to copy it or customize the installation."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"docs"),": Commands for managing fastkafka app..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"run"),": Runs Fast Kafka API application"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"testing"),": Commands for managing fastkafka testing")),(0,l.kt)("h2",{id:"fastkafka-docs"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka docs")),(0,l.kt)("p",null,"Commands for managing fastkafka app documentation"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"generate"),": Generates documentation for a FastKafka..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"install_deps"),": Installs dependencies for FastKafka..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"serve"),": Generates and serves documentation for a...")),(0,l.kt)("h3",{id:"fastkafka-docs-generate"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs generate")),(0,l.kt)("p",null,"Generates documentation for a FastKafka application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs generate [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--root-path TEXT"),": root path under which documentation will be created; default is current directory"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h3",{id:"fastkafka-docs-install_deps"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs install_deps")),(0,l.kt)("p",null,"Installs dependencies for FastKafka documentation generation"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs install_deps [OPTIONS]\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h3",{id:"fastkafka-docs-serve"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs serve")),(0,l.kt)("p",null,"Generates and serves documentation for a FastKafka application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs serve [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--root-path TEXT"),": root path under which documentation will be created; default is current directory"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--bind TEXT"),": Some info ","[default: 127.0.0.1]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--port INTEGER"),": Some info ","[default: 8000]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h2",{id:"fastkafka-run"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka run")),(0,l.kt)("p",null,"Runs Fast Kafka API application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka run [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--num-workers INTEGER"),": Number of FastKafka instances to run, defaults to number of CPU cores. ","[default: 64]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--kafka-broker TEXT"),": kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. ","[default: localhost]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h2",{id:"fastkafka-testing"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka testing")),(0,l.kt)("p",null,"Commands for managing fastkafka testing"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka testing [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"install_deps"),": Installs dependencies for FastKafka app...")),(0,l.kt)("h3",{id:"fastkafka-testing-install_deps"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka testing install_deps")),(0,l.kt)("p",null,"Installs dependencies for FastKafka app testing"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka testing install_deps [OPTIONS]\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/38a44003.5bed27ee.js b/assets/js/38a44003.5bed27ee.js new file mode 100644 index 0000000..c7f35e0 --- /dev/null +++ b/assets/js/38a44003.5bed27ee.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8674],{3905:(e,t,a)=>{a.d(t,{Zo:()=>k,kt:()=>d});var n=a(7294);function l(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){l(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function o(e,t){if(null==e)return{};var a,n,l=function(e,t){if(null==e)return{};var a,n,l={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(l[a]=e[a]);return l}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(l[a]=e[a])}return l}var i=n.createContext({}),p=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},k=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,k=o(e,["components","mdxType","originalType","parentName"]),f=p(a),m=l,d=f["".concat(i,".").concat(m)]||f[m]||u[m]||r;return a?n.createElement(d,s(s({ref:t},k),{},{components:a})):n.createElement(d,s({ref:t},k))}));function d(e,t){var a=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var r=a.length,s=new Array(r);s[0]=m;var o={};for(var i in t)hasOwnProperty.call(t,i)&&(o[i]=t[i]);o.originalType=e,o[f]="string"==typeof e?e:l,s[1]=o;for(var p=2;p<r;p++)s[p]=a[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,a)}m.displayName="MDXCreateElement"},4328:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var n=a(7462),l=(a(7294),a(3905));const r={},s="fastkafka",o={unversionedId:"cli/fastkafka",id:"version-0.6.0/cli/fastkafka",title:"fastkafka",description:"Usage:",source:"@site/versioned_docs/version-0.6.0/cli/fastkafka.md",sourceDirName:"cli",slug:"/cli/fastkafka",permalink:"/docs/0.6.0/cli/fastkafka",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Tester",permalink:"/docs/0.6.0/api/fastkafka/testing/Tester"},next:{title:"run_fastkafka_server_process",permalink:"/docs/0.6.0/cli/run_fastkafka_server_process"}},i={},p=[{value:"<code>fastkafka docs</code>",id:"fastkafka-docs",level:2},{value:"<code>fastkafka docs generate</code>",id:"fastkafka-docs-generate",level:3},{value:"<code>fastkafka docs install_deps</code>",id:"fastkafka-docs-install_deps",level:3},{value:"<code>fastkafka docs serve</code>",id:"fastkafka-docs-serve",level:3},{value:"<code>fastkafka run</code>",id:"fastkafka-run",level:2},{value:"<code>fastkafka testing</code>",id:"fastkafka-testing",level:2},{value:"<code>fastkafka testing install_deps</code>",id:"fastkafka-testing-install_deps",level:3}],k={toc:p},f="wrapper";function u(e){let{components:t,...a}=e;return(0,l.kt)(f,(0,n.Z)({},k,a,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h1",{id:"fastkafka"},(0,l.kt)("inlineCode",{parentName:"h1"},"fastkafka")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--install-completion"),": Install completion for the current shell."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--show-completion"),": Show completion for the current shell, to copy it or customize the installation."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"docs"),": Commands for managing fastkafka app..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"run"),": Runs Fast Kafka API application"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"testing"),": Commands for managing fastkafka testing")),(0,l.kt)("h2",{id:"fastkafka-docs"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka docs")),(0,l.kt)("p",null,"Commands for managing fastkafka app documentation"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"generate"),": Generates documentation for a FastKafka..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"install_deps"),": Installs dependencies for FastKafka..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"serve"),": Generates and serves documentation for a...")),(0,l.kt)("h3",{id:"fastkafka-docs-generate"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs generate")),(0,l.kt)("p",null,"Generates documentation for a FastKafka application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs generate [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--root-path TEXT"),": root path under which documentation will be created ","[default: .]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h3",{id:"fastkafka-docs-install_deps"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs install_deps")),(0,l.kt)("p",null,"Installs dependencies for FastKafka documentation generation"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs install_deps [OPTIONS]\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h3",{id:"fastkafka-docs-serve"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs serve")),(0,l.kt)("p",null,"Generates and serves documentation for a FastKafka application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs serve [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--root-path TEXT"),": root path under which documentation will be created ","[default: .]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--bind TEXT"),": Some info ","[default: 127.0.0.1]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--port INTEGER"),": Some info ","[default: 8000]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h2",{id:"fastkafka-run"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka run")),(0,l.kt)("p",null,"Runs Fast Kafka API application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka run [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--num-workers INTEGER"),": Number of FastKafka instances to run, defaults to number of CPU cores. ","[default: 64]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--kafka-broker TEXT"),": kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. ","[required]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h2",{id:"fastkafka-testing"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka testing")),(0,l.kt)("p",null,"Commands for managing fastkafka testing"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka testing [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"install_deps"),": Installs dependencies for FastKafka app...")),(0,l.kt)("h3",{id:"fastkafka-testing-install_deps"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka testing install_deps")),(0,l.kt)("p",null,"Installs dependencies for FastKafka app testing"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka testing install_deps [OPTIONS]\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/40415b6c.d29ab3ac.js b/assets/js/40415b6c.d29ab3ac.js new file mode 100644 index 0000000..4a0cf71 --- /dev/null +++ b/assets/js/40415b6c.d29ab3ac.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7058],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){a(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function u(e,t){if(null==e)return{};var n,r,a=function(e,t){if(null==e)return{};var n,r,a={},o=Object.keys(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var c=r.createContext({}),l=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},s=function(e){var t=l(e.components);return r.createElement(c.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,c=e.parentName,s=u(e,["components","mdxType","originalType","parentName"]),p=l(n),d=a,m=p["".concat(c,".").concat(d)]||p[d]||f[d]||o;return n?r.createElement(m,i(i({ref:t},s),{},{components:n})):r.createElement(m,i({ref:t},s))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,i=new Array(o);i[0]=d;var u={};for(var c in t)hasOwnProperty.call(t,c)&&(u[c]=t[c]);u.originalType=e,u[p]="string"==typeof e?e:a,i[1]=u;for(var l=2;l<o;l++)i[l]=n[l];return r.createElement.apply(null,i)}return r.createElement.apply(null,n)}d.displayName="MDXCreateElement"},3100:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>f,frontMatter:()=>o,metadata:()=>u,toc:()=>l});var r=n(7462),a=(n(7294),n(3905));const o={},i="Authentication",u={unversionedId:"guides/Guide_03_Authentication",id:"guides/Guide_03_Authentication",title:"Authentication",description:"TLS Authentication",source:"@site/docs/guides/Guide_03_Authentication.md",sourceDirName:"guides",slug:"/guides/Guide_03_Authentication",permalink:"/docs/next/guides/Guide_03_Authentication",draft:!1,tags:[],version:"current",frontMatter:{}},c={},l=[{value:"TLS Authentication",id:"tls-authentication",level:2}],s={toc:l},p="wrapper";function f(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,r.Z)({},s,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"authentication"},"Authentication"),(0,a.kt)("h2",{id:"tls-authentication"},"TLS Authentication"),(0,a.kt)("p",null,"sasl_mechanism (str) \u2013 Authentication mechanism when security_protocol\nis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN,\nGSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN"),(0,a.kt)("p",null,"sasl_plain_username (str) \u2013 username for SASL PLAIN authentication.\nDefault: None"),(0,a.kt)("p",null,"sasl_plain_password (str) \u2013 password for SASL PLAIN authentication.\nDefault: None"),(0,a.kt)("p",null,"sasl_oauth_token_provider (AbstractTokenProvider) \u2013 OAuthBearer token\nprovider instance. (See kafka.oauth.abstract). Default: None"))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/409b7aa0.924e3028.js b/assets/js/409b7aa0.924e3028.js new file mode 100644 index 0000000..0054050 --- /dev/null +++ b/assets/js/409b7aa0.924e3028.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6623],{3905:(e,t,a)=>{a.d(t,{Zo:()=>f,kt:()=>d});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){o(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function s(e,t){if(null==e)return{};var a,n,o=function(e,t){if(null==e)return{};var a,n,o={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},f=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},u="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},c=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,r=e.originalType,p=e.parentName,f=s(e,["components","mdxType","originalType","parentName"]),u=l(a),c=o,d=u["".concat(p,".").concat(c)]||u[c]||k[c]||r;return a?n.createElement(d,i(i({ref:t},f),{},{components:a})):n.createElement(d,i({ref:t},f))}));function d(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=a.length,i=new Array(r);i[0]=c;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[u]="string"==typeof e?e:o,i[1]=s;for(var l=2;l<r;l++)i[l]=a[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}c.displayName="MDXCreateElement"},1359:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>k,frontMatter:()=>r,metadata:()=>s,toc:()=>l});var n=a(7462),o=(a(7294),a(3905));const r={},i="Deploy FastKafka docs to GitHub Pages",s={unversionedId:"guides/Guide_04_Github_Actions_Workflow",id:"version-0.5.0/guides/Guide_04_Github_Actions_Workflow",title:"Deploy FastKafka docs to GitHub Pages",description:"Getting started",source:"@site/versioned_docs/version-0.5.0/guides/Guide_04_Github_Actions_Workflow.md",sourceDirName:"guides",slug:"/guides/Guide_04_Github_Actions_Workflow",permalink:"/docs/0.5.0/guides/Guide_04_Github_Actions_Workflow",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using Redpanda to test FastKafka",permalink:"/docs/0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka"},next:{title:"Deploying FastKafka using Docker",permalink:"/docs/0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka"}},p={},l=[{value:"Getting started",id:"getting-started",level:2},{value:"Options",id:"options",level:2},{value:"Set app location",id:"set-app-location",level:3},{value:"Example Repository",id:"example-repository",level:2}],f={toc:l},u="wrapper";function k(e){let{components:t,...a}=e;return(0,o.kt)(u,(0,n.Z)({},f,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"deploy-fastkafka-docs-to-github-pages"},"Deploy FastKafka docs to GitHub Pages"),(0,o.kt)("h2",{id:"getting-started"},"Getting started"),(0,o.kt)("p",null,"Add your workflow file ",(0,o.kt)("inlineCode",{parentName:"p"},".github/workflows/fastkafka_docs_deploy.yml")," and\npush it to your remote default branch."),(0,o.kt)("p",null,"Here is an example workflow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'name: Deploy FastKafka Generated Documentation to GitHub Pages\n\non:\n push:\n branches: [ "main", "master" ]\n workflow_dispatch:\n\njobs:\n deploy:\n runs-on: ubuntu-latest\n permissions:\n contents: write\n steps:\n - uses: airtai/workflows/fastkafka-ghp@main\n with:\n app: "test_fastkafka.application:kafka_app"\n')),(0,o.kt)("h2",{id:"options"},"Options"),(0,o.kt)("h3",{id:"set-app-location"},"Set app location"),(0,o.kt)("p",null,"Input in the form of ",(0,o.kt)("inlineCode",{parentName:"p"},"path:app"),", where ",(0,o.kt)("inlineCode",{parentName:"p"},"path")," is the path to a Python\nfile and ",(0,o.kt)("inlineCode",{parentName:"p"},"app")," is an object of type\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),":"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'- name: Deploy\n uses: airtai/workflows/fastkafka-ghp@main\n with:\n app: "test_fastkafka.application:kafka_app"\n')),(0,o.kt)("p",null,"In the above example,\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp is named as ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_app")," and it is available in the ",(0,o.kt)("inlineCode",{parentName:"p"},"application"),"\nsubmodule of the ",(0,o.kt)("inlineCode",{parentName:"p"},"test_fastkafka")," module."),(0,o.kt)("h2",{id:"example-repository"},"Example Repository"),(0,o.kt)("p",null,"A\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\nlibrary that uses the above-mentioned workfow actions to publish\nFastKafka docs to ",(0,o.kt)("inlineCode",{parentName:"p"},"Github Pages")," can be found\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_project/blob/main/.github/workflows/fastkafka_deploy.yaml"},"here"),"."))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/414d4a37.2900f4a9.js b/assets/js/414d4a37.2900f4a9.js new file mode 100644 index 0000000..952a7c6 --- /dev/null +++ b/assets/js/414d4a37.2900f4a9.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7473],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>k});var o=t(7294);function n(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function r(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);a&&(o=o.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,o)}return t}function s(e){for(var a=1;a<arguments.length;a++){var t=null!=arguments[a]?arguments[a]:{};a%2?r(Object(t),!0).forEach((function(a){n(e,a,t[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):r(Object(t)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(t,a))}))}return e}function l(e,a){if(null==e)return{};var t,o,n=function(e,a){if(null==e)return{};var t,o,n={},r=Object.keys(e);for(o=0;o<r.length;o++)t=r[o],a.indexOf(t)>=0||(n[t]=e[t]);return n}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o<r.length;o++)t=r[o],a.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(n[t]=e[t])}return n}var i=o.createContext({}),p=function(e){var a=o.useContext(i),t=a;return e&&(t="function"==typeof e?e(a):s(s({},a),e)),t},c=function(e){var a=p(e.components);return o.createElement(i.Provider,{value:a},e.children)},m="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return o.createElement(o.Fragment,{},a)}},d=o.forwardRef((function(e,a){var t=e.components,n=e.mdxType,r=e.originalType,i=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),m=p(t),d=n,k=m["".concat(i,".").concat(d)]||m[d]||u[d]||r;return t?o.createElement(k,s(s({ref:a},c),{},{components:t})):o.createElement(k,s({ref:a},c))}));function k(e,a){var t=arguments,n=a&&a.mdxType;if("string"==typeof e||n){var r=t.length,s=new Array(r);s[0]=d;var l={};for(var i in a)hasOwnProperty.call(a,i)&&(l[i]=a[i]);l.originalType=e,l[m]="string"==typeof e?e:n,s[1]=l;for(var p=2;p<r;p++)s[p]=t[p];return o.createElement.apply(null,s)}return o.createElement.apply(null,t)}d.displayName="MDXCreateElement"},5954:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var o=t(7462),n=(t(7294),t(3905));const r={},s="@consumes basics",l={unversionedId:"guides/Guide_11_Consumes_Basics",id:"version-0.5.0/guides/Guide_11_Consumes_Basics",title:"@consumes basics",description:"You can use @consumes decorator to consume messages from Kafka topics.",source:"@site/versioned_docs/version-0.5.0/guides/Guide_11_Consumes_Basics.md",sourceDirName:"guides",slug:"/guides/Guide_11_Consumes_Basics",permalink:"/docs/0.5.0/guides/Guide_11_Consumes_Basics",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"FastKafka",permalink:"/docs/0.5.0/"},next:{title:"@produces basics",permalink:"/docs/0.5.0/guides/Guide_21_Produces_Basics"}},i={},p=[{value:"Import <code>FastKafka</code>",id:"import-fastkafka",level:2},{value:"Define the structure of the messages",id:"define-the-structure-of-the-messages",level:2},{value:"Create a base FastKafka app",id:"create-a-base-fastkafka-app",level:2},{value:"Create a consumer function and decorate it with <code>@consumes</code>",id:"create-a-consumer-function-and-decorate-it-with-consumes",level:2},{value:"Final app",id:"final-app",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Send the message to kafka topic",id:"send-the-message-to-kafka-topic",level:2},{value:"Choosing a topic",id:"choosing-a-topic",level:2},{value:"Message data",id:"message-data",level:2}],c={toc:p},m="wrapper";function u(e){let{components:a,...t}=e;return(0,n.kt)(m,(0,o.Z)({},c,t,{components:a,mdxType:"MDXLayout"}),(0,n.kt)("h1",{id:"consumes-basics"},"@consumes basics"),(0,n.kt)("p",null,"You can use ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator to consume messages from Kafka topics."),(0,n.kt)("p",null,"In this guide we will create a simple FastKafka app that will consume\n",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages from hello_world topic."),(0,n.kt)("h2",{id:"import-fastkafka"},"Import ",(0,n.kt)("a",{parentName:"h2",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,n.kt)("inlineCode",{parentName:"a"},"FastKafka"))),(0,n.kt)("p",null,"To use the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator, first we need to import the base\nFastKafka app to create our application."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka import FastKafka\n")),(0,n.kt)("h2",{id:"define-the-structure-of-the-messages"},"Define the structure of the messages"),(0,n.kt)("p",null,"Next, you need to define the structure of the messages you want to\nconsume from the topic using ",(0,n.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"pydantic"),". For\nthe guide we\u2019ll stick to something basic, but you are free to define any\ncomplex message structure you wish in your project, just make sure it\ncan be JSON encoded."),(0,n.kt)("p",null,"Let\u2019s import ",(0,n.kt)("inlineCode",{parentName:"p"},"BaseModel")," and ",(0,n.kt)("inlineCode",{parentName:"p"},"Field")," from pydantic and create a simple\n",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class containing one string parameter ",(0,n.kt)("inlineCode",{parentName:"p"},"msg")),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},"from pydantic import BaseModel, Field\n")),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n')),(0,n.kt)("h2",{id:"create-a-base-fastkafka-app"},"Create a base FastKafka app"),(0,n.kt)("p",null,"Now we will create and define a base FastKafka app, replace the\n",(0,n.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,n.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values of your\nKafka bootstrap server"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n')),(0,n.kt)("h2",{id:"create-a-consumer-function-and-decorate-it-with-consumes"},"Create a consumer function and decorate it with ",(0,n.kt)("inlineCode",{parentName:"h2"},"@consumes")),(0,n.kt)("p",null,"Let\u2019s create a consumer function that will consume ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages\nfrom ",(0,n.kt)("em",{parentName:"p"},"hello_world")," topic and log them."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("p",null,"The function decorated with the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will be called\nwhen a message is produced to Kafka."),(0,n.kt)("p",null,"The message will then be injected into the typed ",(0,n.kt)("em",{parentName:"p"},"msg")," argument of the\nfunction and its type will be used to parse the message."),(0,n.kt)("p",null,"In this example case, when the message is sent into a ",(0,n.kt)("em",{parentName:"p"},"hello_world"),"\ntopic, it will be parsed into a HelloWorld class and ",(0,n.kt)("inlineCode",{parentName:"p"},"on_hello_world"),"\nfunction will be called with the parsed class as ",(0,n.kt)("em",{parentName:"p"},"msg")," argument value."),(0,n.kt)("h2",{id:"final-app"},"Final app"),(0,n.kt)("p",null,"Your app code should look like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("h2",{id:"run-the-app"},"Run the app"),(0,n.kt)("p",null,"Now we can run the app. Copy the code above in consumer_example.py and\nrun it by running"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n")),(0,n.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre"},"[382372]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[382372]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n[382372]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n[382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[382372]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n[382372]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 0}. \nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 382372...\n[382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 382372 terminated.\n")),(0,n.kt)("h2",{id:"send-the-message-to-kafka-topic"},"Send the message to kafka topic"),(0,n.kt)("p",null,"Lets send a ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message to the ",(0,n.kt)("em",{parentName:"p"},"hello_world")," topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-shell"},'echo {\\"msg\\": \\"Hello world\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n')),(0,n.kt)("p",null,"You should see the \u201cGot msg: msg='Hello world'\" being logged by your\nconsumer."),(0,n.kt)("h2",{id:"choosing-a-topic"},"Choosing a topic"),(0,n.kt)("p",null,"You probably noticed that you didn\u2019t define which topic you are\nreceiving the message from, this is because the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator\ndetermines the topic by default from your function name. The decorator\nwill take your function name and strip the default \u201con","_",'" prefix from it\nand use the rest as the topic name. In this example case, the topic is\n',(0,n.kt)("em",{parentName:"p"},"hello_world"),"."),(0,n.kt)("p",null,"You can choose your custom prefix by defining the ",(0,n.kt)("inlineCode",{parentName:"p"},"prefix")," parameter in\nconsumes decorator, like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\n@app.consumes(prefix="read_from_")\nasync def read_from_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("p",null,"Also, you can define the topic name completely by defining the ",(0,n.kt)("inlineCode",{parentName:"p"},"topic"),"\nin parameter in consumes decorator, like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\n@app.consumes(topic="my_special_topic")\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("h2",{id:"message-data"},"Message data"),(0,n.kt)("p",null,"The message received from kafka is translated from binary JSON\nrepresentation int the class defined by typing of ",(0,n.kt)("em",{parentName:"p"},"msg")," parameter in the\nfunction decorated by the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator."),(0,n.kt)("p",null,"In this example case, the message will be parsed into a ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld"),"\nclass."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/456c5d82.1356cd36.js b/assets/js/456c5d82.1356cd36.js new file mode 100644 index 0000000..47f14a3 --- /dev/null +++ b/assets/js/456c5d82.1356cd36.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1939],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>f});var o=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function s(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?r(Object(n),!0).forEach((function(t){a(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function i(e,t){if(null==e)return{};var n,o,a=function(e,t){if(null==e)return{};var n,o,a={},r=Object.keys(e);for(o=0;o<r.length;o++)n=r[o],t.indexOf(n)>=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o<r.length;o++)n=r[o],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var l=o.createContext({}),c=function(e){var t=o.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):s(s({},t),e)),n},u=function(e){var t=c(e.components);return o.createElement(l.Provider,{value:t},e.children)},p="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},d=o.forwardRef((function(e,t){var n=e.components,a=e.mdxType,r=e.originalType,l=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),p=c(n),d=a,f=p["".concat(l,".").concat(d)]||p[d]||m[d]||r;return n?o.createElement(f,s(s({ref:t},u),{},{components:n})):o.createElement(f,s({ref:t},u))}));function f(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var r=n.length,s=new Array(r);s[0]=d;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[p]="string"==typeof e?e:a,s[1]=i;for(var c=2;c<r;c++)s[c]=n[c];return o.createElement.apply(null,s)}return o.createElement.apply(null,n)}d.displayName="MDXCreateElement"},6153:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>m,frontMatter:()=>r,metadata:()=>i,toc:()=>c});var o=n(7462),a=(n(7294),n(3905));const r={},s="Batch consuming",i={unversionedId:"guides/Guide_12_Batch_Consuming",id:"version-0.7.0/guides/Guide_12_Batch_Consuming",title:"Batch consuming",description:"If you want to consume data in batches @consumes decorator makes that",source:"@site/versioned_docs/version-0.7.0/guides/Guide_12_Batch_Consuming.md",sourceDirName:"guides",slug:"/guides/Guide_12_Batch_Consuming",permalink:"/docs/0.7.0/guides/Guide_12_Batch_Consuming",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"@consumes basics",permalink:"/docs/0.7.0/guides/Guide_11_Consumes_Basics"},next:{title:"@produces basics",permalink:"/docs/0.7.0/guides/Guide_21_Produces_Basics"}},l={},c=[{value:"Consume function with batching",id:"consume-function-with-batching",level:2},{value:"App example",id:"app-example",level:2},{value:"Send the messages to kafka topic",id:"send-the-messages-to-kafka-topic",level:2}],u={toc:c},p="wrapper";function m(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,o.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"batch-consuming"},"Batch consuming"),(0,a.kt)("p",null,"If you want to consume data in batches ",(0,a.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator makes that\npossible for you. By typing a consumed msg object as a ",(0,a.kt)("inlineCode",{parentName:"p"},"list")," of\nmessages the consumer will call your consuming function with a batch of\nmessages consumed from a single partition. Let\u2019s demonstrate that now."),(0,a.kt)("h2",{id:"consume-function-with-batching"},"Consume function with batching"),(0,a.kt)("p",null,"To consume messages in batches, you need to wrap you message type into a\nlist and the ",(0,a.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will take care of the rest for you.\nYour consumes function will be called with batches grouped by partition\nnow."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes(auto_offset_reset="earliest")\nasync def on_hello_world(msg: List[HelloWorld]):\n logger.info(f"Got msg batch: {msg}")\n')),(0,a.kt)("h2",{id:"app-example"},"App example"),(0,a.kt)("p",null,"We will modify the app example from ",(0,a.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_11_Consumes_Basics"},"@consumes\nbasics")," guide to consume\n",(0,a.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages batch. The final app will look like this (make\nsure you replace the ",(0,a.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,a.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values):"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\nfrom typing import List\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.consumes(auto_offset_reset="earliest")\nasync def on_hello_world(msg: List[HelloWorld]):\n logger.info(f"Got msg batch: {msg}")\n')),(0,a.kt)("h2",{id:"send-the-messages-to-kafka-topic"},"Send the messages to kafka topic"),(0,a.kt)("p",null,"Lets send a couple of ",(0,a.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages to the ",(0,a.kt)("em",{parentName:"p"},"hello_world")," topic\nand check if our consumer kafka application has logged the received\nmessages batch. In your terminal, run the following command at least two\ntimes to create multiple messages in your kafka queue:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-shell"},'echo {\\"msg\\": \\"Hello world\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n')),(0,a.kt)("p",null,"Now we can run the app. Copy the code of the example app in\nconsumer_example.py and run it by running"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n")),(0,a.kt)("p",null,"You should see the your Kafka messages being logged in batches by your\nconsumer."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/46d2add0.08babb68.js b/assets/js/46d2add0.08babb68.js new file mode 100644 index 0000000..f0c9737 --- /dev/null +++ b/assets/js/46d2add0.08babb68.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1674],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>u});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function i(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?i(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):i(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},i=Object.keys(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),s=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},c=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},f="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,i=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),f=s(a),k=r,u=f["".concat(p,".").concat(k)]||f[k]||d[k]||i;return a?n.createElement(u,o(o({ref:t},c),{},{components:a})):n.createElement(u,o({ref:t},c))}));function u(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var i=a.length,o=new Array(i);o[0]=k;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[f]="string"==typeof e?e:r,o[1]=l;for(var s=2;s<i;s++)o[s]=a[s];return n.createElement.apply(null,o)}return n.createElement.apply(null,a)}k.displayName="MDXCreateElement"},6655:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>d,frontMatter:()=>i,metadata:()=>l,toc:()=>s});var n=a(7462),r=(a(7294),a(3905));const i={},o=void 0,l={unversionedId:"api/fastkafka/EventMetadata",id:"version-0.6.0/api/fastkafka/EventMetadata",title:"EventMetadata",description:"fastkafka.EventMetadata {fastkafka.EventMetadata}",source:"@site/versioned_docs/version-0.6.0/api/fastkafka/EventMetadata.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/EventMetadata",permalink:"/docs/0.6.0/api/fastkafka/EventMetadata",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Benchmarking FastKafka app",permalink:"/docs/0.6.0/guides/Guide_06_Benchmarking_FastKafka"},next:{title:"FastKafka",permalink:"/docs/0.6.0/api/fastkafka/"}},p={},s=[{value:"<code>fastkafka.EventMetadata</code>",id:"fastkafka.EventMetadata",level:2}],c={toc:s},f="wrapper";function d(e){let{components:t,...a}=e;return(0,r.kt)(f,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.EventMetadata"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.EventMetadata")),(0,r.kt)("p",null,"A class for encapsulating Kafka record metadata."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"topic"),": The topic this record is received from"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"partition"),": The partition from which this record is received"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"offset"),": The position of this record in the corresponding Kafka partition"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"timestamp"),": The timestamp of this record"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"timestamp_type"),": The timestamp type of this record"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"key"),": The key (or ",(0,r.kt)("inlineCode",{parentName:"li"},"None")," if no key is specified)"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"value"),": The value"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"serialized_key_size"),": The size of the serialized, uncompressed key in bytes"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"serialized_value_size"),": The size of the serialized, uncompressed value in bytes"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"headers"),": The headers")))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/478692f7.e9e2abf4.js b/assets/js/478692f7.e9e2abf4.js new file mode 100644 index 0000000..42d066c --- /dev/null +++ b/assets/js/478692f7.e9e2abf4.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7562],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>m});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){r(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function i(e,t){if(null==e)return{};var n,a,r=function(e,t){if(null==e)return{};var n,a,r={},o=Object.keys(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var c=a.createContext({}),s=function(e){var t=a.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=s(e.components);return a.createElement(c.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},k=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,c=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),d=s(n),k=r,m=d["".concat(c,".").concat(k)]||d[k]||u[k]||o;return n?a.createElement(m,l(l({ref:t},p),{},{components:n})):a.createElement(m,l({ref:t},p))}));function m(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,l=new Array(o);l[0]=k;var i={};for(var c in t)hasOwnProperty.call(t,c)&&(i[c]=t[c]);i.originalType=e,i[d]="string"==typeof e?e:r,l[1]=i;for(var s=2;s<o;s++)l[s]=n[s];return a.createElement.apply(null,l)}return a.createElement.apply(null,n)}k.displayName="MDXCreateElement"},7116:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>l,default:()=>u,frontMatter:()=>o,metadata:()=>i,toc:()=>s});var a=n(7462),r=(n(7294),n(3905));const o={},l=void 0,i={unversionedId:"api/fastkafka/encoder/json_decoder",id:"version-0.8.0/api/fastkafka/encoder/json_decoder",title:"json_decoder",description:"jsondecoder {fastkafka.encoder.jsondecoder}",source:"@site/versioned_docs/version-0.8.0/api/fastkafka/encoder/json_decoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/json_decoder",permalink:"/docs/api/fastkafka/encoder/json_decoder",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avsc_to_pydantic",permalink:"/docs/api/fastkafka/encoder/avsc_to_pydantic"},next:{title:"json_encoder",permalink:"/docs/api/fastkafka/encoder/json_encoder"}},c={},s=[{value:"json_decoder",id:"fastkafka.encoder.json_decoder",level:3}],p={toc:s},d="wrapper";function u(e){let{components:t,...n}=e;return(0,r.kt)(d,(0,a.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h3",{id:"fastkafka.encoder.json_decoder"},"json_decoder"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/json.py#L42-L55",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"json_decoder(\n raw_msg, cls\n)\n")),(0,r.kt)("p",null,"Decoder to decode json string in bytes to pydantic model instance"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"raw_msg")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bytes")),(0,r.kt)("td",{parentName:"tr",align:null},"Bytes message received from Kafka topic"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"cls")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Type[pydantic.main.BaseModel]")),(0,r.kt)("td",{parentName:"tr",align:null},"Pydantic class; This pydantic class will be used to construct instance of same class"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Any")),(0,r.kt)("td",{parentName:"tr",align:null},"An instance of given pydantic class")))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/47ac2e75.3ab08e6a.js b/assets/js/47ac2e75.3ab08e6a.js new file mode 100644 index 0000000..1a0dfc8 --- /dev/null +++ b/assets/js/47ac2e75.3ab08e6a.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3684],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>f});var n=t(7294);function o(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function r(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function s(e){for(var a=1;a<arguments.length;a++){var t=null!=arguments[a]?arguments[a]:{};a%2?r(Object(t),!0).forEach((function(a){o(e,a,t[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):r(Object(t)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(t,a))}))}return e}function l(e,a){if(null==e)return{};var t,n,o=function(e,a){if(null==e)return{};var t,n,o={},r=Object.keys(e);for(n=0;n<r.length;n++)t=r[n],a.indexOf(t)>=0||(o[t]=e[t]);return o}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)t=r[n],a.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var i=n.createContext({}),p=function(e){var a=n.useContext(i),t=a;return e&&(t="function"==typeof e?e(a):s(s({},a),e)),t},c=function(e){var a=p(e.components);return n.createElement(i.Provider,{value:a},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},k=n.forwardRef((function(e,a){var t=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=p(t),k=o,f=d["".concat(i,".").concat(k)]||d[k]||u[k]||r;return t?n.createElement(f,s(s({ref:a},c),{},{components:t})):n.createElement(f,s({ref:a},c))}));function f(e,a){var t=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var r=t.length,s=new Array(r);s[0]=k;var l={};for(var i in a)hasOwnProperty.call(a,i)&&(l[i]=a[i]);l.originalType=e,l[d]="string"==typeof e?e:o,s[1]=l;for(var p=2;p<r;p++)s[p]=t[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,t)}k.displayName="MDXCreateElement"},5232:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var n=t(7462),o=(t(7294),t(3905));const r={},s="@produces basics",l={unversionedId:"guides/Guide_21_Produces_Basics",id:"version-0.5.0/guides/Guide_21_Produces_Basics",title:"@produces basics",description:"You can use @produces decorator to produce messages to Kafka topics.",source:"@site/versioned_docs/version-0.5.0/guides/Guide_21_Produces_Basics.md",sourceDirName:"guides",slug:"/guides/Guide_21_Produces_Basics",permalink:"/docs/0.5.0/guides/Guide_21_Produces_Basics",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"@consumes basics",permalink:"/docs/0.5.0/guides/Guide_11_Consumes_Basics"},next:{title:"Defining a partition key",permalink:"/docs/0.5.0/guides/Guide_22_Partition_Keys"}},i={},p=[{value:"Import <code>FastKafka</code>",id:"import-fastkafka",level:2},{value:"Define the structure of the messages",id:"define-the-structure-of-the-messages",level:2},{value:"Create a base FastKafka app",id:"create-a-base-fastkafka-app",level:2},{value:"Create a producer function and decorate it with <code>@produces</code>",id:"create-a-producer-function-and-decorate-it-with-produces",level:2},{value:"Instruct the app to start sending HelloWorld messages",id:"instruct-the-app-to-start-sending-helloworld-messages",level:2},{value:"Final app",id:"final-app",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the message was sent to the Kafka topic",id:"check-if-the-message-was-sent-to-the-kafka-topic",level:2},{value:"Choosing a topic",id:"choosing-a-topic",level:2},{value:"Message data",id:"message-data",level:2}],c={toc:p},d="wrapper";function u(e){let{components:a,...t}=e;return(0,o.kt)(d,(0,n.Z)({},c,t,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"produces-basics"},"@produces basics"),(0,o.kt)("p",null,"You can use ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator to produce messages to Kafka topics."),(0,o.kt)("p",null,"In this guide we will create a simple FastKafka app that will produce\nhello world messages to hello_world topic."),(0,o.kt)("h2",{id:"import-fastkafka"},"Import ",(0,o.kt)("a",{parentName:"h2",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka"))),(0,o.kt)("p",null,"To use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator, frist we need to import the base\nFastKafka app to create our application."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka import FastKafka\n")),(0,o.kt)("h2",{id:"define-the-structure-of-the-messages"},"Define the structure of the messages"),(0,o.kt)("p",null,"Next, you need to define the structure of the messages you want to send\nto the topic using ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"pydantic"),". For the guide\nwe\u2019ll stick to something basic, but you are free to define any complex\nmessage structure you wish in your project, just make sure it can be\nJSON encoded."),(0,o.kt)("p",null,"Let\u2019s import ",(0,o.kt)("inlineCode",{parentName:"p"},"BaseModel")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"Field")," from pydantic and create a simple\n",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class containing one string parameter ",(0,o.kt)("inlineCode",{parentName:"p"},"msg")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from pydantic import BaseModel, Field\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n')),(0,o.kt)("h2",{id:"create-a-base-fastkafka-app"},"Create a base FastKafka app"),(0,o.kt)("p",null,"Now we will create and define a base FastKafka app, replace the\n",(0,o.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values of your\nKafka bootstrap server"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n')),(0,o.kt)("h2",{id:"create-a-producer-function-and-decorate-it-with-produces"},"Create a producer function and decorate it with ",(0,o.kt)("inlineCode",{parentName:"h2"},"@produces")),(0,o.kt)("p",null,"Let\u2019s create a producer function that will produce ",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages\nto ",(0,o.kt)("em",{parentName:"p"},"hello_world")," topic:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n")),(0,o.kt)("p",null,"Now you can call your defined function as any normal python function in\nyour code. The side effect of calling the function will be that the\nvalue you are returning will also be sent to a kafka topic."),(0,o.kt)("p",null,"By default, the topic is determined from your function name, the \u201cto","_",'"\nprefix is stripped and what is left over is used as a topic name. I this\ncase, that is ',(0,o.kt)("em",{parentName:"p"},"hello_world"),"."),(0,o.kt)("h2",{id:"instruct-the-app-to-start-sending-helloworld-messages"},"Instruct the app to start sending HelloWorld messages"),(0,o.kt)("p",null,"Let\u2019s use ",(0,o.kt)("inlineCode",{parentName:"p"},"@run_in_background")," decorator to instruct our app to send\nHelloWorld messages to hello_world topic every second."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,o.kt)("h2",{id:"final-app"},"Final app"),(0,o.kt)("p",null,"Your app code should look like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,o.kt)("h2",{id:"run-the-app"},"Run the app"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'script_file = "producer_example.py"\ncmd = "fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app"\nmd(\n f"Now we can run the app. Copy the code above in producer_example.py and run it by running\\n```shell\\n{cmd}\\n```"\n)\n')),(0,o.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\n")),(0,o.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.\n")),(0,o.kt)("h2",{id:"check-if-the-message-was-sent-to-the-kafka-topic"},"Check if the message was sent to the Kafka topic"),(0,o.kt)("p",null,'Lets check the topic and see if there is a \u201cHello world!" message in the\nhello_world topic. In your terminal run:'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n")),(0,o.kt)("p",null,'You should see the {\u201cmsg": \u201cHello world!"} messages in your topic.'),(0,o.kt)("h2",{id:"choosing-a-topic"},"Choosing a topic"),(0,o.kt)("p",null,"You probably noticed that you didn\u2019t define which topic you are sending\nthe message to, this is because the ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator determines the\ntopic by default from your function name. The decorator will take your\nfunction name and strip the default \u201cto","_",'" prefix from it and use the\nrest as the topic name. In this example case, the topic is\n',(0,o.kt)("em",{parentName:"p"},"hello_world"),"."),(0,o.kt)("p",null,'!!! warn "New topics"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.\n")),(0,o.kt)("p",null,"You can choose your custom prefix by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"prefix")," parameter in\nproduces decorator, like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.produces(prefix="send_to_")\nasync def send_to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n')),(0,o.kt)("p",null,"Also, you can define the topic name completely by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"topic"),"\nin parameter in produces decorator, like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.produces(topic="my_special_topic")\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n')),(0,o.kt)("h2",{id:"message-data"},"Message data"),(0,o.kt)("p",null,"The return value from your function will be translated JSON string and\nthen to bytes and sent to defined Kafka topic. The typing of the return\nvalue is used for generating the documentation for your Kafka app."),(0,o.kt)("p",null,"In this example case, the return value is HelloWorld class which will be\ntranslated into JSON formatted string and then to bytes. The translated\ndata will then be sent to Kafka. In the from of:\n",(0,o.kt)("inlineCode",{parentName:"p"},'b\'{"msg": "Hello world!"}\'')))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/48199270.467a59ac.js b/assets/js/48199270.467a59ac.js new file mode 100644 index 0000000..d8d1443 --- /dev/null +++ b/assets/js/48199270.467a59ac.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5041],{3905:(e,t,a)=>{a.d(t,{Zo:()=>d,kt:()=>m});var n=a(7294);function i(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){i(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function s(e,t){if(null==e)return{};var a,n,i=function(e,t){if(null==e)return{};var a,n,i={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(i[a]=e[a]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},d=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,d=s(e,["components","mdxType","originalType","parentName"]),c=l(a),u=i,m=c["".concat(p,".").concat(u)]||c[u]||k[u]||r;return a?n.createElement(m,o(o({ref:t},d),{},{components:a})):n.createElement(m,o({ref:t},d))}));function m(e,t){var a=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=a.length,o=new Array(r);o[0]=u;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[c]="string"==typeof e?e:i,o[1]=s;for(var l=2;l<r;l++)o[l]=a[l];return n.createElement.apply(null,o)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},1730:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>k,frontMatter:()=>r,metadata:()=>s,toc:()=>l});var n=a(7462),i=(a(7294),a(3905));const r={},o="Using Redpanda to test FastKafka",s={unversionedId:"guides/Guide_31_Using_redpanda_to_test_fastkafka",id:"guides/Guide_31_Using_redpanda_to_test_fastkafka",title:"Using Redpanda to test FastKafka",description:"What is FastKafka?",source:"@site/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",sourceDirName:"guides",slug:"/guides/Guide_31_Using_redpanda_to_test_fastkafka",permalink:"/docs/next/guides/Guide_31_Using_redpanda_to_test_fastkafka",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using Tester to test FastKafka",permalink:"/docs/next/guides/Guide_33_Using_Tester_class_to_test_fastkafka"},next:{title:"Deploy FastKafka docs to GitHub Pages",permalink:"/docs/next/guides/Guide_04_Github_Actions_Workflow"}},p={},l=[{value:"What is FastKafka?",id:"what-is-fastkafka",level:2},{value:"What is Redpanda?",id:"what-is-redpanda",level:2},{value:"Example repo",id:"example-repo",level:2},{value:"The process",id:"the-process",level:2},{value:"1. Prerequisites",id:"1-prerequisites",level:2},{value:"2. Cloning and setting up the example repo",id:"2-cloning-and-setting-up-the-example-repo",level:2},{value:"Create a virtual environment",id:"create-a-virtual-environment",level:3},{value:"Install Python dependencies",id:"install-python-dependencies",level:3},{value:"3. Writing server code",id:"3-writing-server-code",level:2},{value:"Preparing the demo model",id:"preparing-the-demo-model",level:3},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"4. Writing the test code",id:"4-writing-the-test-code",level:2},{value:"5. Running the tests",id:"5-running-the-tests",level:2},{value:"Recap",id:"recap",level:3}],d={toc:l},c="wrapper";function k(e){let{components:t,...a}=e;return(0,i.kt)(c,(0,n.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"using-redpanda-to-test-fastkafka"},"Using Redpanda to test FastKafka"),(0,i.kt)("h2",{id:"what-is-fastkafka"},"What is FastKafka?"),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,i.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,i.kt)("h2",{id:"what-is-redpanda"},"What is Redpanda?"),(0,i.kt)("p",null,"Redpanda is a drop-in replacement for Kafka. Most of the Kafka tools\nwork out of the box with Redpanda."),(0,i.kt)("p",null,"From ",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/"},"redpanda.com"),":"),(0,i.kt)("blockquote",null,(0,i.kt)("p",{parentName:"blockquote"},"Redpanda is a Kafka\xae-compatible streaming data platform that is proven\nto be 10x faster and 6x lower in total costs. It is also JVM-free,\nZooKeeper\xae-free, Jepsen-tested and source available.")),(0,i.kt)("p",null,"Some of the advantages of Redpanda over Kafka are"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A single binary with built-in everything, no ZooKeeper\xae or JVM\nneeded."),(0,i.kt)("li",{parentName:"ol"},"Costs upto 6X less than Kafka."),(0,i.kt)("li",{parentName:"ol"},"Up to 10x lower average latencies and up to 6x faster Kafka\ntransactions without compromising correctness.")),(0,i.kt)("p",null,"To learn more about Redpanda, please visit their\n",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/"},"website")," or checkout this ",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark"},"blog\npost"),"\ncomparing Redpanda and Kafka\u2019s performance benchmarks."),(0,i.kt)("h2",{id:"example-repo"},"Example repo"),(0,i.kt)("p",null,"A sample FastKafka-based library that uses Redpanda for testing, based\non this guide, can be found\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_with_redpanda"},"here"),"."),(0,i.kt)("h2",{id:"the-process"},"The process"),(0,i.kt)("p",null,"Here are the steps we\u2019ll be walking through to build our example:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Set up the prerequisites."),(0,i.kt)("li",{parentName:"ol"},"Clone the example repo."),(0,i.kt)("li",{parentName:"ol"},"Explain how to write an application using FastKafka."),(0,i.kt)("li",{parentName:"ol"},"Explain how to write a test case to test FastKafka with Redpanda."),(0,i.kt)("li",{parentName:"ol"},"Run the test case and produce/consume messages.")),(0,i.kt)("h2",{id:"1-prerequisites"},"1. Prerequisites"),(0,i.kt)("p",null,"Before starting, make sure you have the following prerequisites set up:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Python 3.x"),": A Python 3.x installation is required to run\nFastKafka. You can download the latest version of Python from the\n",(0,i.kt)("a",{parentName:"li",href:"https://www.python.org/downloads/"},"official website"),". You\u2019ll also\nneed to have pip installed and updated, which is Python\u2019s package\ninstaller."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Docker Desktop"),": Docker is used to run Redpanda, which is\nrequired for testing FastKafka. You can download and install Docker\nDesktop from the ",(0,i.kt)("a",{parentName:"li",href:"https://www.docker.com/products/docker-desktop/"},"official\nwebsite"),"."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Git"),": You\u2019ll need to have Git installed to clone the example\nrepo. You can download Git from the ",(0,i.kt)("a",{parentName:"li",href:"https://git-scm.com/downloads"},"official\nwebsite"),".")),(0,i.kt)("h2",{id:"2-cloning-and-setting-up-the-example-repo"},"2. Cloning and setting up the example repo"),(0,i.kt)("p",null,"To get started with the example code, clone the ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_with_redpanda"},"GitHub\nrepository")," by\nrunning the following command in your terminal:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"git clone https://github.com/airtai/sample_fastkafka_with_redpanda.git\ncd sample_fastkafka_with_redpanda\n")),(0,i.kt)("p",null,"This will create a new directory called sample_fastkafka_with_redpanda\nand download all the necessary files."),(0,i.kt)("h3",{id:"create-a-virtual-environment"},"Create a virtual environment"),(0,i.kt)("p",null,"Before writing any code, let\u2019s ",(0,i.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/venv.html#module-venv"},"create a new virtual\nenvironment"),"\nfor our project."),(0,i.kt)("p",null,"A virtual environment is an isolated environment for a Python project,\nwhich allows you to manage project-specific dependencies and avoid\nconflicts between different projects."),(0,i.kt)("p",null,"To create a new virtual environment, run the following commands in your\nterminal:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"python3 -m venv venv\n")),(0,i.kt)("p",null,"This will create a new directory called ",(0,i.kt)("inlineCode",{parentName:"p"},"venv")," in your project\ndirectory, which will contain the virtual environment."),(0,i.kt)("p",null,"To activate the virtual environment, run the following command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"source venv/bin/activate\n")),(0,i.kt)("p",null,"This will change your shell\u2019s prompt to indicate that you are now\nworking inside the virtual environment."),(0,i.kt)("p",null,"Finally, run the following command to upgrade ",(0,i.kt)("inlineCode",{parentName:"p"},"pip"),", the Python package\ninstaller:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pip install --upgrade pip\n")),(0,i.kt)("h3",{id:"install-python-dependencies"},"Install Python dependencies"),(0,i.kt)("p",null,"Next, let\u2019s install the required Python dependencies. In this guide,\nwe\u2019ll be using\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nto write our application code and ",(0,i.kt)("inlineCode",{parentName:"p"},"pytest")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"pytest-asyncio")," to test\nit."),(0,i.kt)("p",null,"You can install the dependencies from the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file\nprovided in the cloned repository by running:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pip install -r requirements.txt\n")),(0,i.kt)("p",null,"This will install all the required packages and their dependencies."),(0,i.kt)("h2",{id:"3-writing-server-code"},"3. Writing server code"),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file in the cloned repository demonstrates how to\nuse FastKafka to consume messages from a Kafka topic, make predictions\nusing a predictive model, and publish the predictions to another Kafka\ntopic. Here is an explanation of the code:"),(0,i.kt)("h3",{id:"preparing-the-demo-model"},"Preparing the demo model"),(0,i.kt)("p",null,"First we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model."),(0,i.kt)("p",null,"We will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n')),(0,i.kt)("h3",{id:"messages"},"Messages"),(0,i.kt)("p",null,"FastKafka uses ",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,i.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,i.kt)("p",null,"This example defines two message classes for use in a FastKafka\napplication:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," class is used to represent input data for a\npredictive model. It has four fields of type\n",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/latest/api/types/#pydantic.types.NonNegativeFloat"},(0,i.kt)("inlineCode",{parentName:"a"},"NonNegativeFloat")),",\nwhich is a subclass of float that only allows non-negative floating\npoint values.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," class is used to represent the output of the\npredictive model. It has a single field ",(0,i.kt)("inlineCode",{parentName:"p"},"species")," of type string\nrepresenting the predicted species."))),(0,i.kt)("p",null,"These message classes will be used to parse and validate incoming data\nin Kafka consumers and producers."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("h3",{id:"application"},"Application"),(0,i.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,i.kt)("p",null,"It starts by defining a dictionary called ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,i.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,i.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used both\nto generate documentation and to later run the server against one of the\ngiven kafka broker."),(0,i.kt)("p",null,"Next, an instance of the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum required arguments:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generating documentation")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n')),(0,i.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,i.kt)("p",null,"FastKafka provides convenient function decorators ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,i.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,i.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,i.kt)("p",null,"This following example shows how to use the ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message class. Specifying the\ntype of the single argument is instructing the Pydantic to use\n",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData.parse_raw()")," on the consumed message before passing it\nto the user defined function ",(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,i.kt)("inlineCode",{parentName:"p"},"to_predictions"),' function,\nwhich specifies that this function should produce a message to the\n\u201cpredictions" Kafka topic whenever it is called. The ',(0,i.kt)("inlineCode",{parentName:"p"},"to_predictions"),"\nfunction takes a single integer argument ",(0,i.kt)("inlineCode",{parentName:"p"},"species_class")," representing\none of three possible strign values predicted by the mdoel. It creates\na new ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," message using this value and then returns it.\nThe framework will call the ",(0,i.kt)("inlineCode",{parentName:"p"},'IrisPrediction.json().encode("utf-8")'),"\nfunction on the returned value and produce it to the specified topic."))),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h2",{id:"4-writing-the-test-code"},"4. Writing the test code"),(0,i.kt)("p",null,"The service can be tested using the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/testing/Tester#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstance which can be configured to start a ",(0,i.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/LocalRedpandaBroker/"},"Redpanda\nbroker")," for testing\npurposes. The ",(0,i.kt)("inlineCode",{parentName:"p"},"test.py")," file in the cloned repository contains the\nfollowing code for testing."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'import pytest\nfrom application import IrisInputData, IrisPrediction, kafka_app\n\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n sepal_length=0.1,\n sepal_width=0.2,\n petal_length=0.3,\n petal_width=0.4,\n)\n\n\n@pytest.mark.asyncio\nasync def test():\n # Start Tester app and create local Redpanda broker for testing\n async with Tester(kafka_app).using_local_redpanda(\n tag="v23.1.2", listener_port=9092\n ) as tester:\n # Send IrisInputData message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with IrisPrediction in predictions topic\n await tester.awaited_mocks.on_predictions.assert_awaited_with(\n IrisPrediction(species="setosa"), timeout=2\n )\n')),(0,i.kt)("p",null,"The\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/testing/Tester#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\nmodule utilizes uses\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/testing/LocalRedpandaBroker#fastkafka.testing.LocalRedpandaBroker"},(0,i.kt)("inlineCode",{parentName:"a"},"LocalRedpandaBroker")),"\nto start and stop a Redpanda broker for testing purposes using Docker"),(0,i.kt)("h2",{id:"5-running-the-tests"},"5. Running the tests"),(0,i.kt)("p",null,"We can run the tests which is in ",(0,i.kt)("inlineCode",{parentName:"p"},"test.py")," file by executing the\nfollowing command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pytest test.py\n")),(0,i.kt)("p",null,"This will start a Redpanda broker using Docker and executes tests. The\noutput of the command is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest\n============================== test session starts ===============================\nplatform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0\nrootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py\nplugins: asyncio-0.21.0, anyio-3.6.2\nasyncio: mode=strict\ncollected 1 item \n\ntest.py . [100%]\n\n=============================== 1 passed in 7.28s ================================\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$\n")),(0,i.kt)("p",null,"Running the tests with the Redpanda broker ensures that your code is\nworking correctly with a real Kafka-like message broker, making your\ntests more reliable."),(0,i.kt)("h3",{id:"recap"},"Recap"),(0,i.kt)("p",null,"We have created an Iris classification model and encapulated it into our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napplication. The app will consume the ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," from the\n",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic and produce the predictions to ",(0,i.kt)("inlineCode",{parentName:"p"},"predictions")," topic."),(0,i.kt)("p",null,"To test the app we have:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Created the app")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Started our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/testing/Tester#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\nclass with ",(0,i.kt)("inlineCode",{parentName:"p"},"Redpanda")," broker which mirrors the developed app topics\nfor testing purposes")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Sent ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message to ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Asserted and checked that the developed iris classification service\nhas reacted to ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message"))))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/4972.0680bd7d.js b/assets/js/4972.0680bd7d.js new file mode 100644 index 0000000..7c3e82e --- /dev/null +++ b/assets/js/4972.0680bd7d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4972],{4972:(e,t,a)=>{a.r(t),a.d(t,{default:()=>i});var n=a(7294),l=a(5999),o=a(833),r=a(7452);function i(){return n.createElement(n.Fragment,null,n.createElement(o.d,{title:(0,l.I)({id:"theme.NotFound.title",message:"Page Not Found"})}),n.createElement(r.Z,null,n.createElement("main",{className:"container margin-vert--xl"},n.createElement("div",{className:"row"},n.createElement("div",{className:"col col--6 col--offset-3"},n.createElement("h1",{className:"hero__title"},n.createElement(l.Z,{id:"theme.NotFound.title",description:"The title of the 404 page"},"Page Not Found")),n.createElement("p",null,n.createElement(l.Z,{id:"theme.NotFound.p1",description:"The first paragraph of the 404 page"},"We could not find what you were looking for.")),n.createElement("p",null,n.createElement(l.Z,{id:"theme.NotFound.p2",description:"The 2nd paragraph of the 404 page"},"Please contact the owner of the site that linked you to the original URL and let them know their link is broken.")))))))}}}]); \ No newline at end of file diff --git a/assets/js/4a00fd3a.497faf2c.js b/assets/js/4a00fd3a.497faf2c.js new file mode 100644 index 0000000..afd4f0d --- /dev/null +++ b/assets/js/4a00fd3a.497faf2c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4559],{3905:(e,n,a)=>{a.d(n,{Zo:()=>l,kt:()=>f});var t=a(7294);function i(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function s(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function o(e){for(var n=1;n<arguments.length;n++){var a=null!=arguments[n]?arguments[n]:{};n%2?s(Object(a),!0).forEach((function(n){i(e,n,a[n])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):s(Object(a)).forEach((function(n){Object.defineProperty(e,n,Object.getOwnPropertyDescriptor(a,n))}))}return e}function r(e,n){if(null==e)return{};var a,t,i=function(e,n){if(null==e)return{};var a,t,i={},s=Object.keys(e);for(t=0;t<s.length;t++)a=s[t],n.indexOf(a)>=0||(i[a]=e[a]);return i}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(t=0;t<s.length;t++)a=s[t],n.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var d=t.createContext({}),c=function(e){var n=t.useContext(d),a=n;return e&&(a="function"==typeof e?e(n):o(o({},n),e)),a},l=function(e){var n=c(e.components);return t.createElement(d.Provider,{value:n},e.children)},p="mdxType",m={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},u=t.forwardRef((function(e,n){var a=e.components,i=e.mdxType,s=e.originalType,d=e.parentName,l=r(e,["components","mdxType","originalType","parentName"]),p=c(a),u=i,f=p["".concat(d,".").concat(u)]||p[u]||m[u]||s;return a?t.createElement(f,o(o({ref:n},l),{},{components:a})):t.createElement(f,o({ref:n},l))}));function f(e,n){var a=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var s=a.length,o=new Array(s);o[0]=u;var r={};for(var d in n)hasOwnProperty.call(n,d)&&(r[d]=n[d]);r.originalType=e,r[p]="string"==typeof e?e:i,o[1]=r;for(var c=2;c<s;c++)o[c]=a[c];return t.createElement.apply(null,o)}return t.createElement.apply(null,a)}u.displayName="MDXCreateElement"},4835:(e,n,a)=>{a.r(n),a.d(n,{assets:()=>d,contentTitle:()=>o,default:()=>m,frontMatter:()=>s,metadata:()=>r,toc:()=>c});var t=a(7462),i=(a(7294),a(3905));const s={},o="Encoding and Decoding Kafka Messages with FastKafka",r={unversionedId:"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",id:"version-0.8.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",title:"Encoding and Decoding Kafka Messages with FastKafka",description:"Prerequisites",source:"@site/versioned_docs/version-0.8.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",sourceDirName:"guides",slug:"/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",permalink:"/docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Lifespan Events",permalink:"/docs/guides/Guide_05_Lifespan_Handler"},next:{title:"Using multiple Kafka clusters",permalink:"/docs/guides/Guide_24_Using_Multiple_Kafka_Clusters"}},d={},c=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Ways to Encode and Decode Messages with FastKafka",id:"ways-to-encode-and-decode-messages-with-fastkafka",level:2},{value:"1. Json encoder and decoder",id:"1-json-encoder-and-decoder",level:2},{value:"2. Avro encoder and decoder",id:"2-avro-encoder-and-decoder",level:2},{value:"What is Avro?",id:"what-is-avro",level:3},{value:"Installing FastKafka with Avro dependencies",id:"installing-fastkafka-with-avro-dependencies",level:3},{value:"Defining Avro Schema Using Pydantic Models",id:"defining-avro-schema-using-pydantic-models",level:3},{value:"Reusing existing avro schema",id:"reusing-existing-avro-schema",level:3},{value:"Building pydantic models from avro schema dictionary",id:"building-pydantic-models-from-avro-schema-dictionary",level:4},{value:"Building pydantic models from <code>.avsc</code> file",id:"building-pydantic-models-from-avsc-file",level:4},{value:"Consume/Produce avro messages with FastKafka",id:"consumeproduce-avro-messages-with-fastkafka",level:3},{value:"Assembling it all together",id:"assembling-it-all-together",level:3},{value:"3. Custom encoder and decoder",id:"3-custom-encoder-and-decoder",level:2},{value:"Writing a custom encoder and decoder",id:"writing-a-custom-encoder-and-decoder",level:3},{value:"Assembling it all together",id:"assembling-it-all-together-1",level:3}],l={toc:c},p="wrapper";function m(e){let{components:n,...a}=e;return(0,i.kt)(p,(0,t.Z)({},l,a,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"encoding-and-decoding-kafka-messages-with-fastkafka"},"Encoding and Decoding Kafka Messages with FastKafka"),(0,i.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A basic knowledge of\n",(0,i.kt)("a",{parentName:"li",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nis needed to proceed with this guide. If you are not familiar with\n",(0,i.kt)("a",{parentName:"li",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),",\nplease go through the ",(0,i.kt)("a",{parentName:"li",href:"/docs#tutorial"},"tutorial")," first."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("a",{parentName:"li",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith its dependencies installed is needed. Please install\n",(0,i.kt)("a",{parentName:"li",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nusing the command - ",(0,i.kt)("inlineCode",{parentName:"li"},"pip install fastkafka"))),(0,i.kt)("h2",{id:"ways-to-encode-and-decode-messages-with-fastkafka"},"Ways to Encode and Decode Messages with FastKafka"),(0,i.kt)("p",null,"In python, by default, we send Kafka messages as bytes. Even if our\nmessage is a string, we convert it to bytes and then send it to Kafka\ntopic. imilarly, while consuming messages, we consume them as bytes and\nthen convert them to strings."),(0,i.kt)("p",null,"In FastKafka, we specify message schema using Pydantic models as\nmentioned in ",(0,i.kt)("a",{parentName:"p",href:"/docs#messages"},"tutorial"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# Define Pydantic models for Kafka messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("p",null,"Then, we send and receive messages as instances of Pydantic models which\nwe defined. So, FastKafka needs a way to encode/decode to these Pydantic\nmodel messages to bytes in order to send/receive messages to/from Kafka\ntopics."),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods of FastKafka accept a parameter\ncalled ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," to decode/encode Kafka messages. FastKafka\nprovides three ways to encode and decode messages:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"json - This is the default encoder/decoder option in FastKafka.\nWhile producing, this option converts our instance of Pydantic model\nmessages to a JSON string and then converts it to bytes before\nsending it to the topic. While consuming, it converts bytes to a\nJSON string and then constructs an instance of Pydantic model from\nthe JSON string."),(0,i.kt)("li",{parentName:"ol"},"avro - This option uses Avro encoding/decoding to convert instances\nof Pydantic model messages to bytes while producing, and while\nconsuming, it constructs an instance of Pydantic model from bytes."),(0,i.kt)("li",{parentName:"ol"},"custom encoder/decoder - If you are not happy with the json or avro\nencoder/decoder options, you can write your own encoder/decoder\nfunctions and use them to encode/decode Pydantic messages.")),(0,i.kt)("h2",{id:"1-json-encoder-and-decoder"},"1. Json encoder and decoder"),(0,i.kt)("p",null,"The default option in FastKafka is json encoder/decoder. This option,\nwhile producing, converts our instance of pydantic model messages to\njson string and then converts to bytes before sending it to the topics.\nWhile consuming it converts bytes to json string and then constructs\ninstance of pydantic model from json string."),(0,i.kt)("p",null,"We can use the application from ",(0,i.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," as\nis, and it will use the json encoder/decoder by default. But, for\nclarity, let\u2019s modify it to explicitly accept the \u2018json\u2019 encoder/decoder\nparameter:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder="json")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder="json")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"In the above code, the ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),' decorator sets up a\nconsumer for the \u201cinput_data" topic, using the \u2018json\u2019 decoder to convert\nthe message payload to an instance of ',(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData"),". The\n",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces"),' decorator sets up a producer for the \u201cpredictions"\ntopic, using the \u2018json\u2019 encoder to convert the instance of\n',(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," to message payload."),(0,i.kt)("h2",{id:"2-avro-encoder-and-decoder"},"2. Avro encoder and decoder"),(0,i.kt)("h3",{id:"what-is-avro"},"What is Avro?"),(0,i.kt)("p",null,"Avro is a row-oriented remote procedure call and data serialization\nframework developed within Apache\u2019s Hadoop project. It uses JSON for\ndefining data types and protocols, and serializes data in a compact\nbinary format. To learn more about the Apache Avro, please check out the\n",(0,i.kt)("a",{parentName:"p",href:"https://avro.apache.org/docs/"},"docs"),"."),(0,i.kt)("h3",{id:"installing-fastkafka-with-avro-dependencies"},"Installing FastKafka with Avro dependencies"),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith dependencies for Apache Avro installed is needed to use avro\nencoder/decoder. Please install\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith Avro support using the command - ",(0,i.kt)("inlineCode",{parentName:"p"},"pip install fastkafka[avro]")),(0,i.kt)("h3",{id:"defining-avro-schema-using-pydantic-models"},"Defining Avro Schema Using Pydantic Models"),(0,i.kt)("p",null,"By default, you can use Pydantic model to define your message schemas.\nFastKafka internally takes care of encoding and decoding avro messages,\nbased on the Pydantic models."),(0,i.kt)("p",null,"So, similar to the ",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),", the message schema will\nremain as it is."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# Define Pydantic models for Avro messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("p",null,"No need to change anything to support avro. You can use existing\nPydantic models as is."),(0,i.kt)("h3",{id:"reusing-existing-avro-schema"},"Reusing existing avro schema"),(0,i.kt)("p",null,"If you are using some other library to send and receive avro encoded\nmessages, it is highly likely that you already have an Avro schema\ndefined."),(0,i.kt)("h4",{id:"building-pydantic-models-from-avro-schema-dictionary"},"Building pydantic models from avro schema dictionary"),(0,i.kt)("p",null,"Let\u2019s modify the above example and let\u2019s assume we have schemas already\nfor ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," which will look like below:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'iris_input_data_schema = {\n "type": "record",\n "namespace": "IrisInputData",\n "name": "IrisInputData",\n "fields": [\n {"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},\n {"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},\n {"doc": "Petal length in cm", "type": "double", "name": "petal_length"},\n {"doc": "Petal width in cm", "type": "double", "name": "petal_width"},\n ],\n}\niris_prediction_schema = {\n "type": "record",\n "namespace": "IrisPrediction",\n "name": "IrisPrediction",\n "fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],\n}\n')),(0,i.kt)("p",null,"We can easily construct pydantic models from avro schema using\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/encoder/avsc_to_pydantic#fastkafka.encoder.avsc_to_pydantic"},(0,i.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction which is included as part of\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nitself."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.model_fields)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.model_fields)\n")),(0,i.kt)("p",null,"The above code will convert avro schema to pydantic models and will\nprint pydantic models\u2019 fields. The output of the above is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"{'sepal_length': ModelField(name='sepal_length', type=float, required=True),\n 'sepal_width': ModelField(name='sepal_width', type=float, required=True),\n 'petal_length': ModelField(name='petal_length', type=float, required=True),\n 'petal_width': ModelField(name='petal_width', type=float, required=True)}\n \n {'species': ModelField(name='species', type=str, required=True)}\n")),(0,i.kt)("p",null,"This is exactly same as manually defining the pydantic models ourselves.\nYou don\u2019t have to worry about not making any mistakes while converting\navro schema to pydantic models manually. You can easily and\nautomatically accomplish it by using\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/encoder/avsc_to_pydantic#fastkafka.encoder.avsc_to_pydantic"},(0,i.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction as demonstrated above."),(0,i.kt)("h4",{id:"building-pydantic-models-from-avsc-file"},"Building pydantic models from ",(0,i.kt)("inlineCode",{parentName:"h4"},".avsc")," file"),(0,i.kt)("p",null,"Not all cases will have avro schema conveniently defined as a python\ndictionary. You may have it stored as the proprietary ",(0,i.kt)("inlineCode",{parentName:"p"},".avsc")," files in\nfilesystem. Let\u2019s see how to convert those ",(0,i.kt)("inlineCode",{parentName:"p"},".avsc")," files to pydantic\nmodels."),(0,i.kt)("p",null,"Let\u2019s assume our avro files are stored in files called\n",(0,i.kt)("inlineCode",{parentName:"p"},"iris_input_data_schema.avsc")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"iris_prediction_schema.avsc"),". In that\ncase, following code converts the schema to pydantic models:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'import json\nfrom fastkafka.encoder import avsc_to_pydantic\n\n\nwith open("iris_input_data_schema.avsc", "rb") as f:\n iris_input_data_schema = json.load(f)\n \nwith open("iris_prediction_schema.avsc", "rb") as f:\n iris_prediction_schema = json.load(f)\n \n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.model_fields)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.model_fields)\n')),(0,i.kt)("h3",{id:"consumeproduce-avro-messages-with-fastkafka"},"Consume/Produce avro messages with FastKafka"),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nprovides ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods to consume/produces\nmessages to/from a ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," topic. This is explained in\n",(0,i.kt)("a",{parentName:"p",href:"/docs#function-decorators"},"tutorial"),"."),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods accepts a parameter called\n",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," to decode/encode avro messages."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", encoder="avro")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", decoder="avro")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"In the above example, in ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods, we\nexplicitly instruct FastKafka to ",(0,i.kt)("inlineCode",{parentName:"p"},"decode")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"encode")," messages using\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"avro")," ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," instead of the default ",(0,i.kt)("inlineCode",{parentName:"p"},"json"),"\n",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder"),"."),(0,i.kt)("h3",{id:"assembling-it-all-together"},"Assembling it all together"),(0,i.kt)("p",null,"Let\u2019s rewrite the sample code found in\n",(0,i.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," to use ",(0,i.kt)("inlineCode",{parentName:"p"},"avro")," to ",(0,i.kt)("inlineCode",{parentName:"p"},"decode")," and\n",(0,i.kt)("inlineCode",{parentName:"p"},"encode")," messages:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\niris_input_data_schema = {\n "type": "record",\n "namespace": "IrisInputData",\n "name": "IrisInputData",\n "fields": [\n {"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},\n {"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},\n {"doc": "Petal length in cm", "type": "double", "name": "petal_length"},\n {"doc": "Petal width in cm", "type": "double", "name": "petal_width"},\n ],\n}\niris_prediction_schema = {\n "type": "record",\n "namespace": "IrisPrediction",\n "name": "IrisPrediction",\n "fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],\n}\n# Or load schema from avsc files\n\nfrom fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\n\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder="avro")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder="avro")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"The above code is a sample implementation of using FastKafka to consume\nand produce Avro-encoded messages from/to a Kafka topic. The code\ndefines two Avro schemas for the input data and the prediction result.\nIt then uses the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/encoder/avsc_to_pydantic#fastkafka.encoder.avsc_to_pydantic"},(0,i.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction from the FastKafka library to convert the Avro schema into\nPydantic models, which will be used to decode and encode Avro messages."),(0,i.kt)("p",null,"The\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is then instantiated with the broker details, and two functions\ndecorated with ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces"),' are\ndefined to consume messages from the \u201cinput_data" topic and produce\nmessages to the \u201cpredictions" topic, respectively. The functions uses\nthe decoder=\u201cavro" and encoder=\u201cavro" parameters to decode and encode\nthe Avro messages.'),(0,i.kt)("p",null,"In summary, the above code demonstrates a straightforward way to use\nAvro-encoded messages with FastKafka to build a message processing\npipeline."),(0,i.kt)("h2",{id:"3-custom-encoder-and-decoder"},"3. Custom encoder and decoder"),(0,i.kt)("p",null,"If you are not happy with the json or avro encoder/decoder options, you\ncan write your own encoder/decoder functions and use them to\nencode/decode Pydantic messages."),(0,i.kt)("h3",{id:"writing-a-custom-encoder-and-decoder"},"Writing a custom encoder and decoder"),(0,i.kt)("p",null,"In this section, let\u2019s see how to write a custom encoder and decoder\nwhich obfuscates kafka message with simple\n",(0,i.kt)("a",{parentName:"p",href:"https://en.wikipedia.org/wiki/ROT13"},"ROT13")," cipher."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},"import codecs\nimport json\nfrom typing import Any, Type\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n msg_str = msg.json()\n obfuscated = codecs.encode(msg_str, 'rot13')\n raw_bytes = obfuscated.encode(\"utf-8\")\n return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:\n obfuscated = raw_msg.decode(\"utf-8\")\n msg_str = codecs.decode(obfuscated, 'rot13')\n msg_dict = json.loads(msg_str)\n return cls(**msg_dict)\n")),(0,i.kt)("p",null,"The above code defines two custom functions for encoding and decoding\nmessages in a Kafka application using the FastKafka library."),(0,i.kt)("p",null,"The encoding function, ",(0,i.kt)("inlineCode",{parentName:"p"},"custom_encoder()"),", takes a message ",(0,i.kt)("inlineCode",{parentName:"p"},"msg")," which\nis an instance of a Pydantic model, converts it to a JSON string using\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"json()")," method, obfuscates the resulting string using the ROT13\nalgorithm from the ",(0,i.kt)("inlineCode",{parentName:"p"},"codecs")," module, and finally encodes the obfuscated\nstring as raw bytes using the UTF-8 encoding."),(0,i.kt)("p",null,"The decoding function, ",(0,i.kt)("inlineCode",{parentName:"p"},"custom_decoder()"),", takes a raw message ",(0,i.kt)("inlineCode",{parentName:"p"},"raw_msg"),"\nin bytes format, a Pydantic class to construct instance with cls\nparameter. It first decodes the raw message from UTF-8 encoding, then\nuses the ROT13 algorithm to de-obfuscate the string. Finally, it loads\nthe resulting JSON string using the ",(0,i.kt)("inlineCode",{parentName:"p"},"json.loads()")," method and returns a\nnew instance of the specified ",(0,i.kt)("inlineCode",{parentName:"p"},"cls")," class initialized with the decoded\ndictionary."),(0,i.kt)("p",null,"These functions can be used with FastKafka\u2019s ",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"\nparameters to customize the serialization and deserialization of\nmessages in Kafka topics."),(0,i.kt)("p",null,"Let\u2019s test the above code"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},"i = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n\nencoded = custom_encoder(i)\ndisplay(encoded)\n\ndecoded = custom_decoder(encoded, IrisInputData)\ndisplay(decoded)\n")),(0,i.kt)("p",null,"This will result in following output"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},'b\'{"frcny_yratgu": 0.5, "frcny_jvqgu": 0.5, "crgny_yratgu": 0.5, "crgny_jvqgu": 0.5}\'\n\nIrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n')),(0,i.kt)("h3",{id:"assembling-it-all-together-1"},"Assembling it all together"),(0,i.kt)("p",null,"Let\u2019s rewrite the sample code found in\n",(0,i.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," to use our custom decoder and\nencoder functions:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\n\nimport codecs\nimport json\nfrom typing import Any, Type\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n msg_str = msg.json()\n obfuscated = codecs.encode(msg_str, \'rot13\')\n raw_bytes = obfuscated.encode("utf-8")\n return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:\n obfuscated = raw_msg.decode("utf-8")\n msg_str = codecs.decode(obfuscated, \'rot13\')\n msg_dict = json.loads(msg_str)\n return cls(**msg_dict)\n\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder=custom_decoder)\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder=custom_encoder)\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"This code defines a custom encoder and decoder functions for encoding\nand decoding messages sent through a Kafka messaging system."),(0,i.kt)("p",null,"The custom ",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," function takes a message represented as a\n",(0,i.kt)("inlineCode",{parentName:"p"},"BaseModel")," and encodes it as bytes by first converting it to a JSON\nstring and then obfuscating it using the ROT13 encoding. The obfuscated\nmessage is then converted to bytes using UTF-8 encoding and returned."),(0,i.kt)("p",null,"The custom ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder")," function takes in the bytes representing an\nobfuscated message, decodes it using UTF-8 encoding, then decodes the\nROT13 obfuscation, and finally loads it as a dictionary using the ",(0,i.kt)("inlineCode",{parentName:"p"},"json"),"\nmodule. This dictionary is then converted to a ",(0,i.kt)("inlineCode",{parentName:"p"},"BaseModel")," instance\nusing the cls parameter."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/4a2f1dfa.5bc1686b.js b/assets/js/4a2f1dfa.5bc1686b.js new file mode 100644 index 0000000..218abd3 --- /dev/null +++ b/assets/js/4a2f1dfa.5bc1686b.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[383],{3905:(e,t,a)=>{a.d(t,{Zo:()=>d,kt:()=>m});var n=a(7294);function i(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function r(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){i(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function s(e,t){if(null==e)return{};var a,n,i=function(e,t){if(null==e)return{};var a,n,i={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(i[a]=e[a]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):r(r({},t),e)),a},d=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,i=e.mdxType,o=e.originalType,p=e.parentName,d=s(e,["components","mdxType","originalType","parentName"]),c=l(a),u=i,m=c["".concat(p,".").concat(u)]||c[u]||k[u]||o;return a?n.createElement(m,r(r({ref:t},d),{},{components:a})):n.createElement(m,r({ref:t},d))}));function m(e,t){var a=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=a.length,r=new Array(o);r[0]=u;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[c]="string"==typeof e?e:i,r[1]=s;for(var l=2;l<o;l++)r[l]=a[l];return n.createElement.apply(null,r)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},4331:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>r,default:()=>k,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var n=a(7462),i=(a(7294),a(3905));const o={},r="Using Redpanda to test FastKafka",s={unversionedId:"guides/Guide_31_Using_redpanda_to_test_fastkafka",id:"version-0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka",title:"Using Redpanda to test FastKafka",description:"What is FastKafka?",source:"@site/versioned_docs/version-0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",sourceDirName:"guides",slug:"/guides/Guide_31_Using_redpanda_to_test_fastkafka",permalink:"/docs/0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Encoding and Decoding Kafka Messages with FastKafka",permalink:"/docs/0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"},next:{title:"Deploy FastKafka docs to GitHub Pages",permalink:"/docs/0.6.0/guides/Guide_04_Github_Actions_Workflow"}},p={},l=[{value:"What is FastKafka?",id:"what-is-fastkafka",level:2},{value:"What is Redpanda?",id:"what-is-redpanda",level:2},{value:"Example repo",id:"example-repo",level:2},{value:"The process",id:"the-process",level:2},{value:"1. Prerequisites",id:"1-prerequisites",level:2},{value:"2. Cloning and setting up the example repo",id:"2-cloning-and-setting-up-the-example-repo",level:2},{value:"Create a virtual environment",id:"create-a-virtual-environment",level:3},{value:"Install Python dependencies",id:"install-python-dependencies",level:3},{value:"3. Writing server code",id:"3-writing-server-code",level:2},{value:"Preparing the demo model",id:"preparing-the-demo-model",level:3},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"4. Writing the test code",id:"4-writing-the-test-code",level:2},{value:"5. Running the tests",id:"5-running-the-tests",level:2},{value:"Recap",id:"recap",level:3}],d={toc:l},c="wrapper";function k(e){let{components:t,...a}=e;return(0,i.kt)(c,(0,n.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"using-redpanda-to-test-fastkafka"},"Using Redpanda to test FastKafka"),(0,i.kt)("h2",{id:"what-is-fastkafka"},"What is FastKafka?"),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,i.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,i.kt)("h2",{id:"what-is-redpanda"},"What is Redpanda?"),(0,i.kt)("p",null,"Redpanda is a drop-in replacement for Kafka. Most of the Kafka tools\nwork out of the box with Redpanda."),(0,i.kt)("p",null,"From ",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/"},"redpanda.com"),":"),(0,i.kt)("blockquote",null,(0,i.kt)("p",{parentName:"blockquote"},"Redpanda is a Kafka\xae-compatible streaming data platform that is proven\nto be 10x faster and 6x lower in total costs. It is also JVM-free,\nZooKeeper\xae-free, Jepsen-tested and source available.")),(0,i.kt)("p",null,"Some of the advantages of Redpanda over Kafka are"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A single binary with built-in everything, no ZooKeeper\xae or JVM\nneeded."),(0,i.kt)("li",{parentName:"ol"},"Costs upto 6X less than Kafka."),(0,i.kt)("li",{parentName:"ol"},"Up to 10x lower average latencies and up to 6x faster Kafka\ntransactions without compromising correctness.")),(0,i.kt)("p",null,"To learn more about Redpanda, please visit their\n",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/"},"website")," or checkout this ",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark"},"blog\npost"),"\ncomparing Redpanda and Kafka\u2019s performance benchmarks."),(0,i.kt)("h2",{id:"example-repo"},"Example repo"),(0,i.kt)("p",null,"A sample fastkafka-based library that uses Redpanda for testing, based\non this guide, can be found\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_with_redpanda"},"here"),"."),(0,i.kt)("h2",{id:"the-process"},"The process"),(0,i.kt)("p",null,"Here are the steps we\u2019ll be walking through to build our example:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Set up the prerequisites."),(0,i.kt)("li",{parentName:"ol"},"Clone the example repo."),(0,i.kt)("li",{parentName:"ol"},"Explain how to write an application using FastKafka."),(0,i.kt)("li",{parentName:"ol"},"Explain how to write a test case to test FastKafka with Redpanda."),(0,i.kt)("li",{parentName:"ol"},"Run the test case and produce/consume messages.")),(0,i.kt)("h2",{id:"1-prerequisites"},"1. Prerequisites"),(0,i.kt)("p",null,"Before starting, make sure you have the following prerequisites set up:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Python 3.x"),": A Python 3.x installation is required to run\nFastKafka. You can download the latest version of Python from the\n",(0,i.kt)("a",{parentName:"li",href:"https://www.python.org/downloads/"},"official website"),". You\u2019ll also\nneed to have pip installed and updated, which is Python\u2019s package\ninstaller."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Docker Desktop"),": Docker is used to run Redpanda, which is\nrequired for testing FastKafka. You can download and install Docker\nDesktop from the ",(0,i.kt)("a",{parentName:"li",href:"https://www.docker.com/products/docker-desktop/"},"official\nwebsite"),"."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Git"),": You\u2019ll need to have Git installed to clone the example\nrepo. You can download Git from the ",(0,i.kt)("a",{parentName:"li",href:"https://git-scm.com/downloads"},"official\nwebsite"),".")),(0,i.kt)("h2",{id:"2-cloning-and-setting-up-the-example-repo"},"2. Cloning and setting up the example repo"),(0,i.kt)("p",null,"To get started with the example code, clone the ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_with_redpanda"},"GitHub\nrepository")," by\nrunning the following command in your terminal:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"git clone https://github.com/airtai/sample_fastkafka_with_redpanda.git\ncd sample_fastkafka_with_redpanda\n")),(0,i.kt)("p",null,"This will create a new directory called sample_fastkafka_with_redpanda\nand download all the necessary files."),(0,i.kt)("h3",{id:"create-a-virtual-environment"},"Create a virtual environment"),(0,i.kt)("p",null,"Before writing any code, let\u2019s ",(0,i.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/venv.html#module-venv"},"create a new virtual\nenvironment"),"\nfor our project."),(0,i.kt)("p",null,"A virtual environment is an isolated environment for a Python project,\nwhich allows you to manage project-specific dependencies and avoid\nconflicts between different projects."),(0,i.kt)("p",null,"To create a new virtual environment, run the following commands in your\nterminal:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"python3 -m venv venv\n")),(0,i.kt)("p",null,"This will create a new directory called ",(0,i.kt)("inlineCode",{parentName:"p"},"venv")," in your project\ndirectory, which will contain the virtual environment."),(0,i.kt)("p",null,"To activate the virtual environment, run the following command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"source venv/bin/activate\n")),(0,i.kt)("p",null,"This will change your shell\u2019s prompt to indicate that you are now\nworking inside the virtual environment."),(0,i.kt)("p",null,"Finally, run the following command to upgrade ",(0,i.kt)("inlineCode",{parentName:"p"},"pip"),", the Python package\ninstaller:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pip install --upgrade pip\n")),(0,i.kt)("h3",{id:"install-python-dependencies"},"Install Python dependencies"),(0,i.kt)("p",null,"Next, let\u2019s install the required Python dependencies. In this guide,\nwe\u2019ll be using\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nto write our application code and ",(0,i.kt)("inlineCode",{parentName:"p"},"pytest")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"pytest-asyncio")," to test\nit."),(0,i.kt)("p",null,"You can install the dependencies from the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file\nprovided in the cloned repository by running:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pip install -r requirements.txt\n")),(0,i.kt)("p",null,"This will install all the required packages and their dependencies."),(0,i.kt)("h2",{id:"3-writing-server-code"},"3. Writing server code"),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file in the cloned repository demonstrates how to\nuse FastKafka to consume messages from a Kafka topic, make predictions\nusing a predictive model, and publish the predictions to another Kafka\ntopic. Here is an explanation of the code:"),(0,i.kt)("h3",{id:"preparing-the-demo-model"},"Preparing the demo model"),(0,i.kt)("p",null,"First we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model."),(0,i.kt)("p",null,"We will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n')),(0,i.kt)("h3",{id:"messages"},"Messages"),(0,i.kt)("p",null,"FastKafka uses ",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,i.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,i.kt)("p",null,"This example defines two message classes for use in a FastKafka\napplication:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," class is used to represent input data for a\npredictive model. It has four fields of type\n",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/latest/api/types/#pydantic.types.NonNegativeFloat"},(0,i.kt)("inlineCode",{parentName:"a"},"NonNegativeFloat")),",\nwhich is a subclass of float that only allows non-negative floating\npoint values.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," class is used to represent the output of the\npredictive model. It has a single field ",(0,i.kt)("inlineCode",{parentName:"p"},"species")," of type string\nrepresenting the predicted species."))),(0,i.kt)("p",null,"These message classes will be used to parse and validate incoming data\nin Kafka consumers and producers."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("h3",{id:"application"},"Application"),(0,i.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,i.kt)("p",null,"It starts by defining a dictionary called ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,i.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,i.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used both\nto generate documentation and to later run the server against one of the\ngiven kafka broker."),(0,i.kt)("p",null,"Next, an instance of the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum required arguments:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generating documentation")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n')),(0,i.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,i.kt)("p",null,"FastKafka provides convenient function decorators ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,i.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,i.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,i.kt)("p",null,"This following example shows how to use the ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message class. Specifying the\ntype of the single argument is instructing the Pydantic to use\n",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData.parse_raw()")," on the consumed message before passing it\nto the user defined function ",(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,i.kt)("inlineCode",{parentName:"p"},"to_predictions"),' function,\nwhich specifies that this function should produce a message to the\n\u201cpredictions" Kafka topic whenever it is called. The ',(0,i.kt)("inlineCode",{parentName:"p"},"to_predictions"),"\nfunction takes a single integer argument ",(0,i.kt)("inlineCode",{parentName:"p"},"species_class")," representing\none of three possible strign values predicted by the mdoel. It creates\na new ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," message using this value and then returns it.\nThe framework will call the ",(0,i.kt)("inlineCode",{parentName:"p"},'IrisPrediction.json().encode("utf-8")'),"\nfunction on the returned value and produce it to the specified topic."))),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h2",{id:"4-writing-the-test-code"},"4. Writing the test code"),(0,i.kt)("p",null,"The service can be tested using the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstance which can be configured to start a ",(0,i.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/LocalRedpandaBroker/"},"Redpanda\nbroker")," for testing\npurposes. The ",(0,i.kt)("inlineCode",{parentName:"p"},"test.py")," file in the cloned repository contains the\nfollowing code for testing."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'import pytest\nfrom application import IrisInputData, IrisPrediction, kafka_app\n\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n sepal_length=0.1,\n sepal_width=0.2,\n petal_length=0.3,\n petal_width=0.4,\n)\n\n\n@pytest.mark.asyncio\nasync def test():\n # Start Tester app and create local Redpanda broker for testing\n async with Tester(kafka_app).using_local_redpanda(\n tag="v23.1.2", listener_port=9092\n ) as tester:\n # Send IrisInputData message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with IrisPrediction in predictions topic\n await tester.awaited_mocks.on_predictions.assert_awaited_with(\n IrisPrediction(species="setosa"), timeout=2\n )\n')),(0,i.kt)("p",null,"The\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\nmodule utilizes uses\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/testing/LocalRedpandaBroker/#fastkafka.testing.LocalRedpandaBroker"},(0,i.kt)("inlineCode",{parentName:"a"},"LocalRedpandaBroker")),"\nto start and stop a Redpanda broker for testing purposes using Docker"),(0,i.kt)("h2",{id:"5-running-the-tests"},"5. Running the tests"),(0,i.kt)("p",null,"We can run the tests which is in ",(0,i.kt)("inlineCode",{parentName:"p"},"test.py")," file by executing the\nfollowing command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pytest test.py\n")),(0,i.kt)("p",null,"This will start a Redpanda broker using Docker and executes tests. The\noutput of the command is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest\n============================== test session starts ===============================\nplatform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0\nrootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py\nplugins: asyncio-0.21.0, anyio-3.6.2\nasyncio: mode=strict\ncollected 1 item \n\ntest.py . [100%]\n\n=============================== 1 passed in 7.28s ================================\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$\n")),(0,i.kt)("p",null,"Running the tests with the Redpanda broker ensures that your code is\nworking correctly with a real Kafka-like message broker, making your\ntests more reliable."),(0,i.kt)("h3",{id:"recap"},"Recap"),(0,i.kt)("p",null,"We have created an Iris classification model and encapulated it into our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napplication. The app will consume the ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," from the\n",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic and produce the predictions to ",(0,i.kt)("inlineCode",{parentName:"p"},"predictions")," topic."),(0,i.kt)("p",null,"To test the app we have:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Created the app")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Started our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\nclass with ",(0,i.kt)("inlineCode",{parentName:"p"},"Redpanda")," broker which mirrors the developed app topics\nfor testing purposes")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Sent ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message to ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Asserted and checked that the developed iris classification service\nhas reacted to ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message"))))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/4a9e4762.9c7fce8d.js b/assets/js/4a9e4762.9c7fce8d.js new file mode 100644 index 0000000..bc4294f --- /dev/null +++ b/assets/js/4a9e4762.9c7fce8d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1377],{3905:(a,e,t)=>{t.d(e,{Zo:()=>c,kt:()=>f});var n=t(7294);function o(a,e,t){return e in a?Object.defineProperty(a,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):a[e]=t,a}function s(a,e){var t=Object.keys(a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(a);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(a,e).enumerable}))),t.push.apply(t,n)}return t}function r(a){for(var e=1;e<arguments.length;e++){var t=null!=arguments[e]?arguments[e]:{};e%2?s(Object(t),!0).forEach((function(e){o(a,e,t[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(a,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(e){Object.defineProperty(a,e,Object.getOwnPropertyDescriptor(t,e))}))}return a}function i(a,e){if(null==a)return{};var t,n,o=function(a,e){if(null==a)return{};var t,n,o={},s=Object.keys(a);for(n=0;n<s.length;n++)t=s[n],e.indexOf(t)>=0||(o[t]=a[t]);return o}(a,e);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(a);for(n=0;n<s.length;n++)t=s[n],e.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(a,t)&&(o[t]=a[t])}return o}var p=n.createContext({}),l=function(a){var e=n.useContext(p),t=e;return a&&(t="function"==typeof a?a(e):r(r({},e),a)),t},c=function(a){var e=l(a.components);return n.createElement(p.Provider,{value:e},a.children)},k="mdxType",u={inlineCode:"code",wrapper:function(a){var e=a.children;return n.createElement(n.Fragment,{},e)}},d=n.forwardRef((function(a,e){var t=a.components,o=a.mdxType,s=a.originalType,p=a.parentName,c=i(a,["components","mdxType","originalType","parentName"]),k=l(t),d=o,f=k["".concat(p,".").concat(d)]||k[d]||u[d]||s;return t?n.createElement(f,r(r({ref:e},c),{},{components:t})):n.createElement(f,r({ref:e},c))}));function f(a,e){var t=arguments,o=e&&e.mdxType;if("string"==typeof a||o){var s=t.length,r=new Array(s);r[0]=d;var i={};for(var p in e)hasOwnProperty.call(e,p)&&(i[p]=e[p]);i.originalType=a,i[k]="string"==typeof a?a:o,r[1]=i;for(var l=2;l<s;l++)r[l]=t[l];return n.createElement.apply(null,r)}return n.createElement.apply(null,t)}d.displayName="MDXCreateElement"},5809:(a,e,t)=>{t.r(e),t.d(e,{assets:()=>p,contentTitle:()=>r,default:()=>u,frontMatter:()=>s,metadata:()=>i,toc:()=>l});var n=t(7462),o=(t(7294),t(3905));const s={},r="FastKafka",i={unversionedId:"index",id:"version-0.8.0/index",title:"FastKafka",description:"Effortless Kafka integration for your web services",source:"@site/versioned_docs/version-0.8.0/index.md",sourceDirName:".",slug:"/",permalink:"/docs/",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",next:{title:"@consumes basics",permalink:"/docs/guides/Guide_11_Consumes_Basics"}},p={},l=[{value:"\u2b50\u2b50\u2b50 Stay in touch \u2b50\u2b50\u2b50",id:"-stay-in-touch-",level:4},{value:"\ud83d\udc1d\ud83d\udc1d\ud83d\udc1d We were busy lately \ud83d\udc1d\ud83d\udc1d\ud83d\udc1d",id:"-we-were-busy-lately-",level:4},{value:"Install",id:"install",level:2},{value:"Tutorial",id:"tutorial",level:2},{value:"Writing server code",id:"writing-server-code",level:2},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"Testing the service",id:"testing-the-service",level:2},{value:"Recap",id:"recap",level:3},{value:"Running the service",id:"running-the-service",level:2},{value:"Documentation",id:"documentation",level:2},{value:"License",id:"license",level:2}],c={toc:l},k="wrapper";function u(a){let{components:e,...t}=a;return(0,o.kt)(k,(0,n.Z)({},c,t,{components:e,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"fastkafka"},"FastKafka"),(0,o.kt)("b",null,"Effortless Kafka integration for your web services"),(0,o.kt)("hr",null),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/v/fastkafka.png",alt:"PyPI"})," ",(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/dm/fastkafka.png",alt:"PyPI -\nDownloads"})," ",(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/pyversions/fastkafka.png",alt:"PyPI - Python\nVersion"})),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/github/actions/workflow/status/airtai/fastkafka/test.yaml",alt:"GitHub Workflow\nStatus"}),"\n",(0,o.kt)("img",{parentName:"p",src:"https://github.com/airtai/fastkafka//actions/workflows/codeql.yml/badge.svg",alt:"CodeQL"}),"\n",(0,o.kt)("img",{parentName:"p",src:"https://github.com/airtai/fastkafka//actions/workflows/dependency-review.yml/badge.svg",alt:"Dependency\nReview"})),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/github/license/airtai/fastkafka.png",alt:"GitHub"})),(0,o.kt)("hr",null),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,o.kt)("hr",null),(0,o.kt)("h4",{id:"-stay-in-touch-"},"\u2b50\u2b50\u2b50 Stay in touch \u2b50\u2b50\u2b50"),(0,o.kt)("p",null,"Please show your support and stay in touch by:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"giving our ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/"},"GitHub repository")," a\nstar, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"joining our ",(0,o.kt)("a",{parentName:"p",href:"https://discord.gg/CJWmYpyFbc"},"Discord server"),"."))),(0,o.kt)("p",null,"Your support helps us to stay in touch with you and encourages us to\ncontinue developing and improving the library. Thank you for your\nsupport!"),(0,o.kt)("hr",null),(0,o.kt)("h4",{id:"-we-were-busy-lately-"},"\ud83d\udc1d\ud83d\udc1d\ud83d\udc1d We were busy lately \ud83d\udc1d\ud83d\udc1d\ud83d\udc1d"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://repobeats.axiom.co/api/embed/21f36049093d5eb8e5fdad18c3c5d8df5428ca30.svg",alt:"Activity",title:"Repobeats analytics image"})),(0,o.kt)("h2",{id:"install"},"Install"),(0,o.kt)("p",null,"FastKafka works on Windows, macOS, Linux, and most Unix-style operating\nsystems. You can install base version of FastKafka with ",(0,o.kt)("inlineCode",{parentName:"p"},"pip")," as usual:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka\n")),(0,o.kt)("p",null,"To install FastKafka with testing features please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[test]\n")),(0,o.kt)("p",null,"To install FastKafka with asyncapi docs please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[docs]\n")),(0,o.kt)("p",null,"To install FastKafka with all the features please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[test,docs]\n")),(0,o.kt)("h2",{id:"tutorial"},"Tutorial"),(0,o.kt)("p",null,"You can start an interactive tutorial in Google Colab by clicking the\nbutton below:"),(0,o.kt)("a",{href:"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/index.ipynb",target:"_blank"},(0,o.kt)("img",{src:"https://colab.research.google.com/assets/colab-badge.svg",alt:"Open in Colab"})),(0,o.kt)("h2",{id:"writing-server-code"},"Writing server code"),(0,o.kt)("p",null,"To demonstrate FastKafka simplicity of using ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes"),"\ndecorators, we will focus on a simple app."),(0,o.kt)("p",null,"The app will consume jsons containig positive floats from one topic, log\nthem and then produce incremented values to another topic."),(0,o.kt)("h3",{id:"messages"},"Messages"),(0,o.kt)("p",null,"FastKafka uses ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,o.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,o.kt)("p",null,"This example defines one ",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," mesage class. This Class will model the\nconsumed and produced data in our app demo, it contains one\n",(0,o.kt)("inlineCode",{parentName:"p"},"NonNegativeFloat")," field ",(0,o.kt)("inlineCode",{parentName:"p"},"data"),' that will be logged and \u201cprocessed"\nbefore being produced to another topic.'),(0,o.kt)("p",null,"These message class will be used to parse and validate incoming data in\nKafka consumers and producers."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass Data(BaseModel):\n data: NonNegativeFloat = Field(\n ..., example=0.5, description="Float data example"\n )\n')),(0,o.kt)("h3",{id:"application"},"Application"),(0,o.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,o.kt)("p",null,"It starts by defining a dictionary called ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,o.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,o.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\nboth generating the documentation and later to run the actual server\nagainst one of the given kafka broker."),(0,o.kt)("p",null,"Next, an object of the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum set of arguments:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generation of documentation")),(0,o.kt)("p",null,"We will also import and create a logger so that we can log the incoming\ndata in our consuming function."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from logging import getLogger\nfrom fastkafka import FastKafka\n\nlogger = getLogger("Demo Kafka app")\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Demo Kafka app",\n kafka_brokers=kafka_brokers,\n)\n')),(0,o.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,o.kt)("p",null,"FastKafka provides convenient function decorators ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,o.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,o.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,o.kt)("p",null,"This following example shows how to use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," message class. Specifying the type of the\nsingle argument is instructing the Pydantic to use ",(0,o.kt)("inlineCode",{parentName:"p"},"Data.parse_raw()"),"\non the consumed message before passing it to the user defined function\n",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"to_output_data"),' function,\nwhich specifies that this function should produce a message to the\n\u201coutput_data" Kafka topic whenever it is called. The ',(0,o.kt)("inlineCode",{parentName:"p"},"to_output_data"),"\nfunction takes a single float argument ",(0,o.kt)("inlineCode",{parentName:"p"},"data"),". It it increments the\ndata returns it wrapped in a ",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," object. The framework will call\nthe ",(0,o.kt)("inlineCode",{parentName:"p"},'Data.json().encode("utf-8")')," function on the returned value and\nproduce it to the specified topic."))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: Data):\n logger.info(f"Got data: {msg.data}")\n await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic="output_data")\nasync def to_output_data(data: float) -> Data:\n processed_data = Data(data=data+1.0)\n return processed_data\n')),(0,o.kt)("h2",{id:"testing-the-service"},"Testing the service"),(0,o.kt)("p",null,"The service can be tested using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstances which internally starts InMemory implementation of Kafka\nbroker."),(0,o.kt)("p",null,"The Tester will redirect your consumes and produces decorated functions\nto the InMemory Kafka broker so that you can quickly test your app\nwithout the need for a running Kafka broker and all its dependencies."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka.testing import Tester\n\nmsg = Data(\n data=0.1,\n)\n\n# Start Tester app and create InMemory Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n # Send Data message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with incremented data in output_data topic\n await tester.awaited_mocks.on_output_data.assert_awaited_with(\n Data(data=1.1), timeout=2\n )\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] Demo Kafka app: Got data: 0.1\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,o.kt)("h3",{id:"recap"},"Recap"),(0,o.kt)("p",null,"We have created a simple FastKafka application. The app will consume the\n",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," from the ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic, log it and produce the incremented\ndata to ",(0,o.kt)("inlineCode",{parentName:"p"},"output_data")," topic."),(0,o.kt)("p",null,"To test the app we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Created the app")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Started our Tester class which mirrors the developed app topics for\ntesting purposes")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Sent Data message to ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Asserted and checked that the developed service has reacted to Data\nmessage"))),(0,o.kt)("h2",{id:"running-the-service"},"Running the service"),(0,o.kt)("p",null,"The service can be started using builtin faskafka run CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file ",(0,o.kt)("inlineCode",{parentName:"p"},'"application.py"')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass Data(BaseModel):\n data: NonNegativeFloat = Field(\n ..., example=0.5, description="Float data example"\n )\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Demo Kafka app",\n kafka_brokers=kafka_brokers,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: Data):\n logger.info(f"Got data: {msg.data}")\n await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic="output_data")\nasync def to_output_data(data: float) -> Data:\n processed_data = Data(data=data+1.0)\n return processed_data\n')),(0,o.kt)("p",null,"To run the service, use the FastKafka CLI command and pass the module\n(in this case, the file where the app implementation is located) and the\napp simbol to the command."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka run --num-workers=1 --kafka-broker localhost application:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see the following output in your\ncommand line:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[1504]: 23-05-31 11:36:45.874 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1504]: 23-05-31 11:36:45.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1506]: 23-05-31 11:36:45.993 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1506]: 23-05-31 11:36:45.994 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1506]: 23-05-31 11:36:46.014 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1506]: 23-05-31 11:36:46.015 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1506]: 23-05-31 11:36:46.040 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1506]: 23-05-31 11:36:46.042 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1506]: 23-05-31 11:36:46.043 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1506]: 23-05-31 11:36:46.043 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1506]: 23-05-31 11:36:46.068 [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata\n[1506]: 23-05-31 11:36:46.070 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1504]: 23-05-31 11:36:46.131 [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1504]: 23-05-31 11:36:46.132 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable to update metadata from [0]\n[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable to update metadata from [0]\n[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\nStarting process cleanup, this may take a few seconds...\n23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1504...\n23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1506...\n[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-31 11:37:00.607 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1506 was already terminated.\n23-05-31 11:37:00.822 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1504 was already terminated.\n")),(0,o.kt)("h2",{id:"documentation"},"Documentation"),(0,o.kt)("p",null,"The kafka app comes with builtin documentation generation using\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/tools/generator"},"AsyncApi HTML generator"),"."),(0,o.kt)("p",null,"AsyncApi requires Node.js to be installed and we provide the following\nconvenience command line for it:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"23-05-31 11:38:24.128 [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n")),(0,o.kt)("p",null,"To generate the documentation programatically you just need to call the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs generate application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"23-05-31 11:38:25.113 [INFO] fastkafka._components.asyncapi: Old async specifications at '/content/asyncapi/spec/asyncapi.yml' does not exist.\n23-05-31 11:38:25.118 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'\n23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /content/asyncapi/docs.\n")),(0,o.kt)("p",null,"This will generate the ",(0,o.kt)("em",{parentName:"p"},"asyncapi")," folder in relative path where all your\ndocumentation will be saved. You can check out the content of it with:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"ls -l asyncapi\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"total 8\ndrwxr-xr-x 4 root root 4096 May 31 11:38 docs\ndrwxr-xr-x 2 root root 4096 May 31 11:38 spec\n")),(0,o.kt)("p",null,"In docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka docs serve"),"\nCLI command (more on that in our guides)."),(0,o.kt)("p",null,"In spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application."),(0,o.kt)("p",null,"We can locally preview the generated documentation by running the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs serve application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},'23-05-31 11:38:45.250 [INFO] fastkafka._components.asyncapi: New async specifications generated at: \'/content/asyncapi/spec/asyncapi.yml\'\n23-05-31 11:39:04.410 [INFO] fastkafka._components.asyncapi: Async docs generated at \'asyncapi/docs\'\n23-05-31 11:39:04.411 [INFO] fastkafka._components.asyncapi: Output of \'$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write\'\n\nDone! \u2728\nCheck out your shiny new generated files at /content/asyncapi/docs.\n\n\nServing documentation on http://127.0.0.1:8000\n127.0.0.1 - - [31/May/2023 11:39:14] "GET / HTTP/1.1" 200 -\n127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/global.min.css HTTP/1.1" 200 -\n127.0.0.1 - - [31/May/2023 11:39:14] "GET /js/asyncapi-ui.min.js HTTP/1.1" 200 -\n127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/asyncapi.min.css HTTP/1.1" 200 -\nInterupting serving of documentation and cleaning up...\n')),(0,o.kt)("p",null,"From the parameters passed to the application constructor, we get the\ndocumentation bellow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Demo Kafka app",\n kafka_brokers=kafka_brokers,\n)\n')),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png",alt:"Kafka_servers"})),(0,o.kt)("p",null,"The following documentation snippet are for the consumer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png",alt:"Kafka_consumer"})),(0,o.kt)("p",null,"The following documentation snippet are for the producer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png",alt:"Kafka_producer"})),(0,o.kt)("p",null,"Finally, all messages as defined as subclasses of ",(0,o.kt)("em",{parentName:"p"},"BaseModel")," are\ndocumented as well:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png",alt:"Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)"})),(0,o.kt)("h2",{id:"license"},"License"),(0,o.kt)("p",null,"FastKafka is licensed under the Apache License 2.0"),(0,o.kt)("p",null,"A permissive license whose main conditions require preservation of\ncopyright and license notices. Contributors provide an express grant of\npatent rights. Licensed works, modifications, and larger works may be\ndistributed under different terms and without source code."),(0,o.kt)("p",null,"The full text of the license can be found\n",(0,o.kt)("a",{parentName:"p",href:"https://raw.githubusercontent.com/airtai/fastkafka/main/LICENSE"},"here"),"."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/4ace981f.70fbc63c.js b/assets/js/4ace981f.70fbc63c.js new file mode 100644 index 0000000..5b11dd2 --- /dev/null +++ b/assets/js/4ace981f.70fbc63c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6791],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>u});var n=t(7294);function o(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function s(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function r(e){for(var a=1;a<arguments.length;a++){var t=null!=arguments[a]?arguments[a]:{};a%2?s(Object(t),!0).forEach((function(a){o(e,a,t[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(t,a))}))}return e}function i(e,a){if(null==e)return{};var t,n,o=function(e,a){if(null==e)return{};var t,n,o={},s=Object.keys(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||(o[t]=e[t]);return o}(e,a);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var p=n.createContext({}),l=function(e){var a=n.useContext(p),t=a;return e&&(t="function"==typeof e?e(a):r(r({},a),e)),t},c=function(e){var a=l(e.components);return n.createElement(p.Provider,{value:a},e.children)},k="mdxType",f={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},d=n.forwardRef((function(e,a){var t=e.components,o=e.mdxType,s=e.originalType,p=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),k=l(t),d=o,u=k["".concat(p,".").concat(d)]||k[d]||f[d]||s;return t?n.createElement(u,r(r({ref:a},c),{},{components:t})):n.createElement(u,r({ref:a},c))}));function u(e,a){var t=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var s=t.length,r=new Array(s);r[0]=d;var i={};for(var p in a)hasOwnProperty.call(a,p)&&(i[p]=a[p]);i.originalType=e,i[k]="string"==typeof e?e:o,r[1]=i;for(var l=2;l<s;l++)r[l]=t[l];return n.createElement.apply(null,r)}return n.createElement.apply(null,t)}d.displayName="MDXCreateElement"},4071:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>p,contentTitle:()=>r,default:()=>f,frontMatter:()=>s,metadata:()=>i,toc:()=>l});var n=t(7462),o=(t(7294),t(3905));const s={},r="FastKafka",i={unversionedId:"index",id:"version-0.6.0/index",title:"FastKafka",description:"Effortless Kafka integration for your web services",source:"@site/versioned_docs/version-0.6.0/index.md",sourceDirName:".",slug:"/",permalink:"/docs/0.6.0/",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",next:{title:"@consumes basics",permalink:"/docs/0.6.0/guides/Guide_11_Consumes_Basics"}},p={},l=[{value:"\u2b50\u2b50\u2b50 Stay in touch \u2b50\u2b50\u2b50",id:"-stay-in-touch-",level:4},{value:"\ud83d\udc1d\ud83d\udc1d\ud83d\udc1d We were busy lately \ud83d\udc1d\ud83d\udc1d\ud83d\udc1d",id:"-we-were-busy-lately-",level:4},{value:"Install",id:"install",level:2},{value:"Tutorial",id:"tutorial",level:2},{value:"Writing server code",id:"writing-server-code",level:2},{value:"Preparing the demo model",id:"preparing-the-demo-model",level:3},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"Testing the service",id:"testing-the-service",level:2},{value:"Recap",id:"recap",level:3},{value:"Running the service",id:"running-the-service",level:2},{value:"Documentation",id:"documentation",level:2},{value:"License",id:"license",level:2}],c={toc:l},k="wrapper";function f(e){let{components:a,...t}=e;return(0,o.kt)(k,(0,n.Z)({},c,t,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"fastkafka"},"FastKafka"),(0,o.kt)("b",null,"Effortless Kafka integration for your web services"),(0,o.kt)("hr",null),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/v/fastkafka.png",alt:"PyPI"})," ",(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/dm/fastkafka.png",alt:"PyPI -\nDownloads"})," ",(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/pyversions/fastkafka.png",alt:"PyPI - Python\nVersion"})),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/github/actions/workflow/status/airtai/fastkafka/test.yaml",alt:"GitHub Workflow\nStatus"}),"\n",(0,o.kt)("img",{parentName:"p",src:"https://github.com/airtai/fastkafka//actions/workflows/codeql.yml/badge.svg",alt:"CodeQL"}),"\n",(0,o.kt)("img",{parentName:"p",src:"https://github.com/airtai/fastkafka//actions/workflows/dependency-review.yml/badge.svg",alt:"Dependency\nReview"})),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/github/license/airtai/fastkafka.png",alt:"GitHub"})),(0,o.kt)("hr",null),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,o.kt)("hr",null),(0,o.kt)("h4",{id:"-stay-in-touch-"},"\u2b50\u2b50\u2b50 Stay in touch \u2b50\u2b50\u2b50"),(0,o.kt)("p",null,"Please show your support and stay in touch by:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"giving our ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/"},"GitHub repository")," a\nstar, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"joining our ",(0,o.kt)("a",{parentName:"p",href:"https://discord.gg/CJWmYpyFbc"},"Discord server"),"."))),(0,o.kt)("p",null,"Your support helps us to stay in touch with you and encourages us to\ncontinue developing and improving the library. Thank you for your\nsupport!"),(0,o.kt)("hr",null),(0,o.kt)("h4",{id:"-we-were-busy-lately-"},"\ud83d\udc1d\ud83d\udc1d\ud83d\udc1d We were busy lately \ud83d\udc1d\ud83d\udc1d\ud83d\udc1d"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://repobeats.axiom.co/api/embed/21f36049093d5eb8e5fdad18c3c5d8df5428ca30.svg",alt:"Activity",title:"Repobeats analytics image"})),(0,o.kt)("h2",{id:"install"},"Install"),(0,o.kt)("p",null,"FastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install base version of ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka")," with ",(0,o.kt)("inlineCode",{parentName:"p"},"pip")," as usual:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka\n")),(0,o.kt)("p",null,"To install fastkafka with testing features please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[test]\n")),(0,o.kt)("p",null,"To install fastkafka with asyncapi docs please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[docs]\n")),(0,o.kt)("p",null,"To install fastkafka with all the features please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[test,docs]\n")),(0,o.kt)("h2",{id:"tutorial"},"Tutorial"),(0,o.kt)("p",null,"You can start an interactive tutorial in Google Colab by clicking the\nbutton below:"),(0,o.kt)("a",{href:"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb",target:"_blank"},(0,o.kt)("img",{src:"https://colab.research.google.com/assets/colab-badge.svg",alt:"Open In Colab"})),(0,o.kt)("h2",{id:"writing-server-code"},"Writing server code"),(0,o.kt)("p",null,"Here is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic."),(0,o.kt)("h3",{id:"preparing-the-demo-model"},"Preparing the demo model"),(0,o.kt)("p",null,"First we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model."),(0,o.kt)("p",null,"We will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n')),(0,o.kt)("h3",{id:"messages"},"Messages"),(0,o.kt)("p",null,"FastKafka uses ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,o.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,o.kt)("p",null,"This example defines two message classes for use in a FastKafka\napplication:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," class is used to represent input data for a\npredictive model. It has four fields of type\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/latest/api/types/#pydantic.types.NonNegativeFloat"},(0,o.kt)("inlineCode",{parentName:"a"},"NonNegativeFloat")),",\nwhich is a subclass of float that only allows non-negative floating\npoint values.")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," class is used to represent the output of the\npredictive model. It has a single field ",(0,o.kt)("inlineCode",{parentName:"p"},"species")," of type string\nrepresenting the predicted species."))),(0,o.kt)("p",null,"These message classes will be used to parse and validate incoming data\nin Kafka consumers and producers."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,o.kt)("h3",{id:"application"},"Application"),(0,o.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,o.kt)("p",null,"It starts by defining a dictionary called ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,o.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,o.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\nboth generating the documentation and later to run the actual server\nagainst one of the given kafka broker."),(0,o.kt)("p",null,"Next, an object of the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum set of arguments:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generation of documentation")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n')),(0,o.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,o.kt)("p",null,"FastKafka provides convenient function decorators ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,o.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,o.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,o.kt)("p",null,"This following example shows how to use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message class. Specifying the\ntype of the single argument is instructing the Pydantic to use\n",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData.parse_raw()")," on the consumed message before passing it\nto the user defined function ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),' function,\nwhich specifies that this function should produce a message to the\n\u201cpredictions" Kafka topic whenever it is called. The ',(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),"\nfunction takes a single integer argument ",(0,o.kt)("inlineCode",{parentName:"p"},"species_class")," representing\none of three possible strign values predicted by the mdoel. It creates\na new ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," message using this value and then returns it.\nThe framework will call the ",(0,o.kt)("inlineCode",{parentName:"p"},'IrisPrediction.json().encode("utf-8")'),"\nfunction on the returned value and produce it to the specified topic."))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h2",{id:"testing-the-service"},"Testing the service"),(0,o.kt)("p",null,"The service can be tested using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstances which internally starts InMemory implementation of Kafka\nbroker."),(0,o.kt)("p",null,"The Tester will redirect your consumes and produces decorated functions\nto the InMemory Kafka broker so that you can quickly test your app\nwithout the need for a running Kafka broker and all its dependencies."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nmsg = IrisInputData(\n sepal_length=0.1,\n sepal_width=0.2,\n petal_length=0.3,\n petal_width=0.4,\n)\n\n# Start Tester app and create InMemory Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n # Send IrisInputData message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with IrisPrediction in predictions topic\n await tester.awaited_mocks.on_predictions.assert_awaited_with(\n IrisPrediction(species="setosa"), timeout=2\n )\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,o.kt)("h3",{id:"recap"},"Recap"),(0,o.kt)("p",null,"We have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic and produce the predictions to ",(0,o.kt)("inlineCode",{parentName:"p"},"predictions")," topic."),(0,o.kt)("p",null,"To test the app we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Created the app")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Started our Tester class which mirrors the developed app topics for\ntesting purposes")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Sent IrisInputData message to ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Asserted and checked that the developed iris classification service\nhas reacted to IrisInputData message"))),(0,o.kt)("h2",{id:"running-the-service"},"Running the service"),(0,o.kt)("p",null,"The service can be started using builtin faskafka run CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file ",(0,o.kt)("inlineCode",{parentName:"p"},'"application.py"')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("p",null,"To run the service, you will need a running Kafka broker on localhost as\nspecified in the ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")," parameter above. We can start the Kafka\nbroker locally using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker/#fastkafka.testing.ApacheKafkaBroker"},(0,o.kt)("inlineCode",{parentName:"a"},"ApacheKafkaBroker")),"."),(0,o.kt)("p",null,"To use\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker/#fastkafka.testing.ApacheKafkaBroker"},(0,o.kt)("inlineCode",{parentName:"a"},"ApacheKafkaBroker")),",\nyou need to install JRE and Kafka to your environment. To simplify this\nprocess, fastkafka comes with a CLI command that does just that, to run\nit, in your terminal execute the following:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka testing install_deps\n")),(0,o.kt)("p",null,"Now we can run\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker/#fastkafka.testing.ApacheKafkaBroker"},(0,o.kt)("inlineCode",{parentName:"a"},"ApacheKafkaBroker")),"\nthat will start a Kafka broker instance for us."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka.testing import ApacheKafkaBroker\n\nbroker = ApacheKafkaBroker(apply_nest_asyncio=True)\n\nbroker.start()\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n'127.0.0.1:9092'\n")),(0,o.kt)("p",null,"Then, we start the FastKafka service by running the following command in\nthe folder where the ",(0,o.kt)("inlineCode",{parentName:"p"},"application.py")," file is located:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n")),(0,o.kt)("p",null,"In the above command, we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--num-workers")," option to specify how many\nworkers to launch and we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--kafka-broker")," option to specify which\nkafka broker configuration to use from earlier specified ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[801767]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[801765]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[801767]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[801765]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[801767]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[801767]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[801765]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[801765]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[801765]: [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata\n[801765]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[801767]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[801767]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[801767]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[801765]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[801767]: [ERROR] aiokafka: Unable to update metadata from [0]\n[801765]: [ERROR] aiokafka: Unable to update metadata from [0]\n^C\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 801765...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 801767...\n[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n")),(0,o.kt)("p",null,"You need to interupt running of the cell above by selecting\n",(0,o.kt)("inlineCode",{parentName:"p"},"Runtime->Interupt execution")," on the toolbar above."),(0,o.kt)("p",null,"Finally, we can stop the local Kafka Broker:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"broker.stop()\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 801303...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 801303 was already terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 800930...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 800930 was already terminated.\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n")),(0,o.kt)("h2",{id:"documentation"},"Documentation"),(0,o.kt)("p",null,"The kafka app comes with builtin documentation generation using\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/tools/generator"},"AsyncApi HTML generator"),"."),(0,o.kt)("p",null,"AsyncApi requires Node.js to be installed and we provide the following\nconvenience command line for it:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n")),(0,o.kt)("p",null,"To generate the documentation programatically you just need to call the\nfolloving command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs generate application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/asyncapi/docs.\n")),(0,o.kt)("p",null,". This will generate the ",(0,o.kt)("em",{parentName:"p"},"asyncapi")," folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"ls -l asyncapi\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"total 8\ndrwxrwxr-x 4 kumaran kumaran 4096 Mar 21 09:14 docs\ndrwxrwxr-x 2 kumaran kumaran 4096 Mar 21 09:14 spec\n")),(0,o.kt)("p",null,"In docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka docs serve"),"\nCLI command (more on that in our guides)."),(0,o.kt)("p",null,"In spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application."),(0,o.kt)("p",null,"We can locally preview the generated documentation by running the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs serve application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/asyncapi/docs.\n\n\nServing documentation on http://127.0.0.1:8000\n^C\nInterupting serving of documentation and cleaning up...\n")),(0,o.kt)("p",null,"From the parameters passed to the application constructor, we get the\ndocumentation bellow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n)\n')),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png",alt:"Kafka_servers"})),(0,o.kt)("p",null,"The following documentation snippet are for the consumer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png",alt:"Kafka_consumer"})),(0,o.kt)("p",null,"The following documentation snippet are for the producer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png",alt:"Kafka_producer"})),(0,o.kt)("p",null,"Finally, all messages as defined as subclasses of ",(0,o.kt)("em",{parentName:"p"},"BaseModel")," are\ndocumented as well:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png",alt:"Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)"})),(0,o.kt)("h2",{id:"license"},"License"),(0,o.kt)("p",null,"FastKafka is licensed under the Apache License 2.0"),(0,o.kt)("p",null,"A permissive license whose main conditions require preservation of\ncopyright and license notices. Contributors provide an express grant of\npatent rights. Licensed works, modifications, and larger works may be\ndistributed under different terms and without source code."),(0,o.kt)("p",null,"The full text of the license can be found\n",(0,o.kt)("a",{parentName:"p",href:"https://raw.githubusercontent.com/airtai/fastkafka/main/LICENSE"},"here"),"."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/4c4d6ef6.8a27cfcd.js b/assets/js/4c4d6ef6.8a27cfcd.js new file mode 100644 index 0000000..2cc4c9a --- /dev/null +++ b/assets/js/4c4d6ef6.8a27cfcd.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[2706],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>f});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){o(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,o=function(e,t){if(null==e)return{};var a,n,o={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var i=n.createContext({}),p=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},c=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=p(a),k=o,f=d["".concat(i,".").concat(k)]||d[k]||u[k]||r;return a?n.createElement(f,s(s({ref:t},c),{},{components:a})):n.createElement(f,s({ref:t},c))}));function f(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=a.length,s=new Array(r);s[0]=k;var l={};for(var i in t)hasOwnProperty.call(t,i)&&(l[i]=t[i]);l.originalType=e,l[d]="string"==typeof e?e:o,s[1]=l;for(var p=2;p<r;p++)s[p]=a[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,a)}k.displayName="MDXCreateElement"},2063:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var n=a(7462),o=(a(7294),a(3905));const r={},s="@produces basics",l={unversionedId:"guides/Guide_21_Produces_Basics",id:"version-0.7.1/guides/Guide_21_Produces_Basics",title:"@produces basics",description:"You can use @produces decorator to produce messages to Kafka topics.",source:"@site/versioned_docs/version-0.7.1/guides/Guide_21_Produces_Basics.md",sourceDirName:"guides",slug:"/guides/Guide_21_Produces_Basics",permalink:"/docs/0.7.1/guides/Guide_21_Produces_Basics",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Batch consuming",permalink:"/docs/0.7.1/guides/Guide_12_Batch_Consuming"},next:{title:"Defining a partition key",permalink:"/docs/0.7.1/guides/Guide_22_Partition_Keys"}},i={},p=[{value:"Import <code>FastKafka</code>",id:"import-fastkafka",level:2},{value:"Define the structure of the messages",id:"define-the-structure-of-the-messages",level:2},{value:"Create a base FastKafka app",id:"create-a-base-fastkafka-app",level:2},{value:"Create a producer function and decorate it with <code>@produces</code>",id:"create-a-producer-function-and-decorate-it-with-produces",level:2},{value:"Instruct the app to start sending HelloWorld messages",id:"instruct-the-app-to-start-sending-helloworld-messages",level:2},{value:"Final app",id:"final-app",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the message was sent to the Kafka topic",id:"check-if-the-message-was-sent-to-the-kafka-topic",level:2},{value:"Choosing a topic",id:"choosing-a-topic",level:2},{value:"Message data",id:"message-data",level:2}],c={toc:p},d="wrapper";function u(e){let{components:t,...a}=e;return(0,o.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"produces-basics"},"@produces basics"),(0,o.kt)("p",null,"You can use ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator to produce messages to Kafka topics."),(0,o.kt)("p",null,"In this guide we will create a simple FastKafka app that will produce\nhello world messages to hello_world topic."),(0,o.kt)("h2",{id:"import-fastkafka"},"Import ",(0,o.kt)("a",{parentName:"h2",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka"))),(0,o.kt)("p",null,"To use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator, frist we need to import the base\nFastKafka app to create our application."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka import FastKafka\n")),(0,o.kt)("h2",{id:"define-the-structure-of-the-messages"},"Define the structure of the messages"),(0,o.kt)("p",null,"Next, you need to define the structure of the messages you want to send\nto the topic using ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"pydantic"),". For the guide\nwe\u2019ll stick to something basic, but you are free to define any complex\nmessage structure you wish in your project, just make sure it can be\nJSON encoded."),(0,o.kt)("p",null,"Let\u2019s import ",(0,o.kt)("inlineCode",{parentName:"p"},"BaseModel")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"Field")," from pydantic and create a simple\n",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class containing one string parameter ",(0,o.kt)("inlineCode",{parentName:"p"},"msg")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from pydantic import BaseModel, Field\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n')),(0,o.kt)("h2",{id:"create-a-base-fastkafka-app"},"Create a base FastKafka app"),(0,o.kt)("p",null,"Now we will create and define a base FastKafka app, replace the\n",(0,o.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values of your\nKafka bootstrap server"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n')),(0,o.kt)("h2",{id:"create-a-producer-function-and-decorate-it-with-produces"},"Create a producer function and decorate it with ",(0,o.kt)("inlineCode",{parentName:"h2"},"@produces")),(0,o.kt)("p",null,"Let\u2019s create a producer function that will produce ",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages\nto ",(0,o.kt)("em",{parentName:"p"},"hello_world")," topic:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n")),(0,o.kt)("p",null,"Now you can call your defined function as any normal python function in\nyour code. The side effect of calling the function will be that the\nvalue you are returning will also be sent to a kafka topic."),(0,o.kt)("p",null,"By default, the topic is determined from your function name, the \u201cto","_",'"\nprefix is stripped and what is left over is used as a topic name. I this\ncase, that is ',(0,o.kt)("em",{parentName:"p"},"hello_world"),"."),(0,o.kt)("h2",{id:"instruct-the-app-to-start-sending-helloworld-messages"},"Instruct the app to start sending HelloWorld messages"),(0,o.kt)("p",null,"Let\u2019s use ",(0,o.kt)("inlineCode",{parentName:"p"},"@run_in_background")," decorator to instruct our app to send\nHelloWorld messages to hello_world topic every second."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,o.kt)("h2",{id:"final-app"},"Final app"),(0,o.kt)("p",null,"Your app code should look like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,o.kt)("h2",{id:"run-the-app"},"Run the app"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'script_file = "producer_example.py"\ncmd = "fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app"\nmd(\n f"Now we can run the app. Copy the code above in producer_example.py and run it by running\\n```shell\\n{cmd}\\n```"\n)\n')),(0,o.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\n")),(0,o.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.\n")),(0,o.kt)("h2",{id:"check-if-the-message-was-sent-to-the-kafka-topic"},"Check if the message was sent to the Kafka topic"),(0,o.kt)("p",null,'Lets check the topic and see if there is a \u201cHello world!" message in the\nhello_world topic. In your terminal run:'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n")),(0,o.kt)("p",null,'You should see the {\u201cmsg": \u201cHello world!"} messages in your topic.'),(0,o.kt)("h2",{id:"choosing-a-topic"},"Choosing a topic"),(0,o.kt)("p",null,"You probably noticed that you didn\u2019t define which topic you are sending\nthe message to, this is because the ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator determines the\ntopic by default from your function name. The decorator will take your\nfunction name and strip the default \u201cto","_",'" prefix from it and use the\nrest as the topic name. In this example case, the topic is\n',(0,o.kt)("em",{parentName:"p"},"hello_world"),"."),(0,o.kt)("p",null,'!!! warn "New topics"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.\n")),(0,o.kt)("p",null,"You can choose your custom prefix by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"prefix")," parameter in\nproduces decorator, like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.produces(prefix="send_to_")\nasync def send_to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n')),(0,o.kt)("p",null,"Also, you can define the topic name completely by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"topic"),"\nin parameter in produces decorator, like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.produces(topic="my_special_topic")\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n')),(0,o.kt)("h2",{id:"message-data"},"Message data"),(0,o.kt)("p",null,"The return value from your function will be translated JSON string and\nthen to bytes and sent to defined Kafka topic. The typing of the return\nvalue is used for generating the documentation for your Kafka app."),(0,o.kt)("p",null,"In this example case, the return value is HelloWorld class which will be\ntranslated into JSON formatted string and then to bytes. The translated\ndata will then be sent to Kafka. In the from of:\n",(0,o.kt)("inlineCode",{parentName:"p"},'b\'{"msg": "Hello world!"}\'')))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/4d11873e.e7adb799.js b/assets/js/4d11873e.e7adb799.js new file mode 100644 index 0000000..b9a3efe --- /dev/null +++ b/assets/js/4d11873e.e7adb799.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[904],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>k});var a=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){i(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function r(e,t){if(null==e)return{};var n,a,i=function(e,t){if(null==e)return{};var n,a,i={},o=Object.keys(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var s=a.createContext({}),m=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=m(e.components);return a.createElement(s.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,o=e.originalType,s=e.parentName,p=r(e,["components","mdxType","originalType","parentName"]),d=m(n),u=i,k=d["".concat(s,".").concat(u)]||d[u]||c[u]||o;return n?a.createElement(k,l(l({ref:t},p),{},{components:n})):a.createElement(k,l({ref:t},p))}));function k(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=n.length,l=new Array(o);l[0]=u;var r={};for(var s in t)hasOwnProperty.call(t,s)&&(r[s]=t[s]);r.originalType=e,r[d]="string"==typeof e?e:i,l[1]=r;for(var m=2;m<o;m++)l[m]=n[m];return a.createElement.apply(null,l)}return a.createElement.apply(null,n)}u.displayName="MDXCreateElement"},5732:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>c,frontMatter:()=>o,metadata:()=>r,toc:()=>m});var a=n(7462),i=(n(7294),n(3905));const o={},l=void 0,r={unversionedId:"api/fastkafka/FastKafka",id:"version-0.7.0/api/fastkafka/FastKafka",title:"FastKafka",description:"fastkafka.FastKafka {fastkafka.FastKafka}",source:"@site/versioned_docs/version-0.7.0/api/fastkafka/FastKafka.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/",permalink:"/docs/0.7.0/api/fastkafka/",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"EventMetadata",permalink:"/docs/0.7.0/api/fastkafka/EventMetadata"},next:{title:"KafkaEvent",permalink:"/docs/0.7.0/api/fastkafka/KafkaEvent"}},s={},m=[{value:"<code>fastkafka.FastKafka</code>",id:"fastkafka.FastKafka",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>benchmark</code>",id:"benchmark",level:3},{value:"<code>consumes</code>",id:"consumes",level:3},{value:"<code>create_docs</code>",id:"create_docs",level:3},{value:"<code>create_mocks</code>",id:"create_mocks",level:3},{value:"<code>fastapi_lifespan</code>",id:"fastapi_lifespan",level:3},{value:"<code>get_topics</code>",id:"get_topics",level:3},{value:"<code>produces</code>",id:"produces",level:3},{value:"<code>run_in_background</code>",id:"run_in_background",level:3},{value:"<code>set_kafka_broker</code>",id:"set_kafka_broker",level:3}],p={toc:m},d="wrapper";function c(e){let{components:t,...n}=e;return(0,i.kt)(d,(0,a.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"h2"},"fastkafka.FastKafka")),(0,i.kt)("h3",{id:"init"},(0,i.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def __init__(self, title: Optional[str] = None, description: Optional[str] = None, version: Optional[str] = None, contact: Optional[Dict[str, str]] = None, kafka_brokers: Optional[Dict[str, Any]] = None, root_path: Optional[pathlib.Path, str] = None, lifespan: Optional[Callable[[ForwardRef('FastKafka')], AsyncContextManager[NoneType]]] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7f1c4d890f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7f1c4c8a1210>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None, group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, exclude_internal_topics=True, isolation_level='read_uncommitted') -> None")),(0,i.kt)("p",null,"Creates FastKafka application"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"title"),": optional title for the documentation. If None,\nthe title will be set to empty string"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"description"),": optional description for the documentation. If\nNone, the description will be set to empty string"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"version"),": optional version for the documentation. If None,\nthe version will be set to empty string"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"contact"),": optional contact for the documentation. If None, the\ncontact will be set to placeholder values:\nname='Author' url=HttpUrl(' ",(0,i.kt)("a",{parentName:"li",href:"https://www.google.com"},"https://www.google.com")," ', ) email='",(0,i.kt)("a",{parentName:"li",href:"mailto:noreply@gmail.com"},"noreply@gmail.com"),"'"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),': dictionary describing kafka brokers used for setting\nthe bootstrap server when running the applicationa and for\ngenerating documentation. Defaults to\n{\n"localhost": {\n"url": "localhost",\n"description": "local kafka broker",\n"port": "9092",\n}\n}'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"root_path"),": path to where documentation will be created"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"lifespan"),": asynccontextmanager that is used for setting lifespan hooks.\n",(0,i.kt)("strong",{parentName:"li"},"aenter")," is called before app start and ",(0,i.kt)("strong",{parentName:"li"},"aexit")," after app stop.\nThe lifespan is called whe application is started as async context\nmanager, e.g.:",(0,i.kt)("inlineCode",{parentName:"li"},"async with kafka_app...")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list. It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ",(0,i.kt)("inlineCode",{parentName:"li"},"localhost:9092"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-producer-#")," (appended with a unique number\nper instance)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_serializer"),": used to convert user-supplied keys to bytes\nIf not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as ",(0,i.kt)("inlineCode",{parentName:"li"},"f(key),")," should return\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_serializer"),": used to convert user-supplied message\nvalues to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),". If not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as\n",(0,i.kt)("inlineCode",{parentName:"li"},"f(value)"),", should return :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"acks"),": one of ",(0,i.kt)("inlineCode",{parentName:"li"},"0"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"1"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"0"),": Producer will not wait for any acknowledgment from the server\nat all. The message will immediately be added to the socket\nbuffer and considered sent. No guarantee can be made that the\nserver has received the record in this case, and the retries\nconfiguration will not take effect (as the client won't\ngenerally know of any failures). The offset given back for each\nrecord will always be set to -1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"1"),": The broker leader will write the record to its local log but\nwill respond without awaiting full acknowledgement from all\nfollowers. In this case should the leader fail immediately\nafter acknowledging the record but before the followers have\nreplicated it then the record will be lost."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"all"),": The broker leader will wait for the full set of in-sync\nreplicas to acknowledge the record. This guarantees that the\nrecord will not be lost as long as at least one in-sync replica\nremains alive. This is the strongest available guarantee.")),(0,i.kt)("p",null,"If unset, defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=1"),". If ",(0,i.kt)("inlineCode",{parentName:"p"},"enable_idempotence")," is\n:data:",(0,i.kt)("inlineCode",{parentName:"p"},"True")," defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=all")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"compression_type"),": The compression type for all data generated by\nthe producer. Valid values are ",(0,i.kt)("inlineCode",{parentName:"li"},"gzip"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"snappy"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"lz4"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"zstd"),"\nor :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),".\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_batch_size"),": Maximum size of buffered data per partition.\nAfter this amount :meth:",(0,i.kt)("inlineCode",{parentName:"li"},"send")," coroutine will block until batch is\ndrained.\nDefault: 16384"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),": The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),", producer will wait ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms - process_time"),".\nDefault: 0 (i.e. no delay)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partitioner"),": Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n",(0,i.kt)("inlineCode",{parentName:"li"},"partitioner(key_bytes, all_partitions, available_partitions)"),".\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_request_size"),": The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"~kafka.protocol.produce.ProduceRequest")," (it's a blocking\ncall), maximum waiting time can be up to ",(0,i.kt)("inlineCode",{parentName:"li"},"2 *\nrequest_timeout_ms"),".\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more\ninformation see :ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_idempotence"),": When set to :data:",(0,i.kt)("inlineCode",{parentName:"li"},"True"),", the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:",(0,i.kt)("inlineCode",{parentName:"li"},"False"),", producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:",(0,i.kt)("inlineCode",{parentName:"li"},"ValueError")," will be thrown.\nNew in version 0.5.0."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values\nare: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider ("),": class:",(0,i.kt)("inlineCode",{parentName:"li"},"~aiokafka.abc.AbstractTokenProvider"),"):\nOAuthBearer token provider instance. (See\n:mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"*topics"),": optional list of topics to subscribe to. If not set,\ncall :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," or :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".assign")," before consuming records.\nPassing topics directly is same as calling :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," API."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"group_id"),": name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_deserializer"),": Any callable that takes a\nraw message key and returns a deserialized key."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_deserializer"),": Any callable that takes a\nraw message value and returns a deserialized value."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_min_bytes"),": Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n",(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_bytes"),": The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms"),": The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_partition_fetch_bytes"),": The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ",(0,i.kt)("inlineCode",{parentName:"li"},"= #partitions * max_partition_fetch_bytes"),".\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_records"),": The maximum number of records returned in a\nsingle call to :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),". Defaults ",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", no limit."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_offset_reset"),": A policy for resetting offsets on\n:exc:",(0,i.kt)("inlineCode",{parentName:"li"},".OffsetOutOfRangeError")," errors: ",(0,i.kt)("inlineCode",{parentName:"li"},"earliest")," will move to the oldest\navailable message, ",(0,i.kt)("inlineCode",{parentName:"li"},"latest")," will move to the most recent, and\n",(0,i.kt)("inlineCode",{parentName:"li"},"none")," will raise an exception so you can handle this case.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"latest"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_auto_commit"),": If true the consumer's offset will be\nperiodically committed in the background. Default: True."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_commit_interval_ms"),": milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"check_crcs"),": Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partition_assignment_strategy"),": List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: ","[:class:",(0,i.kt)("inlineCode",{parentName:"li"},".RoundRobinPartitionAssignor"),"]"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_interval_ms"),": Maximum allowed time between calls to\nconsume messages (e.g., :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),"). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See ",(0,i.kt)("inlineCode",{parentName:"li"},"KIP-62"),"_ for more\ninformation. Default 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"rebalance_timeout_ms"),": The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to ",(0,i.kt)("inlineCode",{parentName:"li"},"max.poll.interval.ms")," configuration,\nbut as ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka")," will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},".ConsumerRebalanceListener")," to delay rebalacing. Defaults\nto ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),": Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(",(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe ",(0,i.kt)("strong",{parentName:"li"},"broker")," configuration properties\n",(0,i.kt)("inlineCode",{parentName:"li"},"group.min.session.timeout.ms")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"group.max.session.timeout.ms"),".\nDefault: 10000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat_interval_ms"),": The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),", but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"consumer_timeout_ms"),": maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"exclude_internal_topics"),": Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"isolation_level"),": Controls how to read messages written\ntransactionally.")),(0,i.kt)("p",null,"If set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed"),", :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\ntransactional messages which have been committed.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")," (the default), :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will\nreturn all messages, even transactional messages which have been\naborted."),(0,i.kt)("p",null,"Non-transactional messages will be returned unconditionally in\neither mode."),(0,i.kt)("p",null,"Messages will always be returned in offset order. Hence, in\n",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," mode, :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," the seek_to_end method will\nreturn the LSO. See method docs below. Default: ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider"),": OAuthBearer token provider instance. (See :mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: None")),(0,i.kt)("h3",{id:"benchmark"},(0,i.kt)("inlineCode",{parentName:"h3"},"benchmark")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]")),(0,i.kt)("p",null,"Decorator to benchmark produces/consumes functions"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"interval"),": Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sliding_window_size"),": The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated")),(0,i.kt)("h3",{id:"consumes"},(0,i.kt)("inlineCode",{parentName:"h3"},"consumes")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, prefix: str = 'on_', description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]], typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when a message is received in a topic."),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"decoder"),": Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"executor"),': Type of executor to choose for consuming tasks. Avaliable options\nare "SequentialExecutor" and "DynamicTaskExecutor". The default option is\n"SequentialExecutor" which will execute the consuming tasks sequentially.\nIf the consuming tasks have high latency it is recommended to use\n"DynamicTaskExecutor" which will wrap the consuming functions into tasks\nand run them in on asyncio loop in background. This comes with a cost of\nincreased overhead so use it only in cases when your consume functions have\nhigh latency such as database queries or some other type of networking.'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: "on_". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"brokers"),": Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"description"),": Optional description of the consuming function async docs.\nIf not provided, consuming function ",(0,i.kt)("strong",{parentName:"li"},"doc")," attr will be used."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"*topics"),": optional list of topics to subscribe to. If not set,\ncall :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," or :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".assign")," before consuming records.\nPassing topics directly is same as calling :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," API."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string (or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings) that the consumer should contact to bootstrap\ninitial cluster metadata.")),(0,i.kt)("p",null,"This does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ",(0,i.kt)("inlineCode",{parentName:"p"},"localhost:9092"),"."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~.consumer.group_coordinator.GroupCoordinator"),"\nfor logging with respect to consumer group administration. Default:\n",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-{version}")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"group_id"),": name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_deserializer"),": Any callable that takes a\nraw message key and returns a deserialized key."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_deserializer"),": Any callable that takes a\nraw message value and returns a deserialized value."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_min_bytes"),": Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n",(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_bytes"),": The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms"),": The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_partition_fetch_bytes"),": The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ",(0,i.kt)("inlineCode",{parentName:"li"},"= #partitions * max_partition_fetch_bytes"),".\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_records"),": The maximum number of records returned in a\nsingle call to :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),". Defaults ",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", no limit."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Client request timeout in milliseconds.\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_offset_reset"),": A policy for resetting offsets on\n:exc:",(0,i.kt)("inlineCode",{parentName:"li"},".OffsetOutOfRangeError")," errors: ",(0,i.kt)("inlineCode",{parentName:"li"},"earliest")," will move to the oldest\navailable message, ",(0,i.kt)("inlineCode",{parentName:"li"},"latest")," will move to the most recent, and\n",(0,i.kt)("inlineCode",{parentName:"li"},"none")," will raise an exception so you can handle this case.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"latest"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_auto_commit"),": If true the consumer's offset will be\nperiodically committed in the background. Default: True."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_commit_interval_ms"),": milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"check_crcs"),": Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partition_assignment_strategy"),": List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: ","[:class:",(0,i.kt)("inlineCode",{parentName:"li"},".RoundRobinPartitionAssignor"),"]"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_interval_ms"),": Maximum allowed time between calls to\nconsume messages (e.g., :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),"). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See ",(0,i.kt)("inlineCode",{parentName:"li"},"KIP-62"),"_ for more\ninformation. Default 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"rebalance_timeout_ms"),": The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to ",(0,i.kt)("inlineCode",{parentName:"li"},"max.poll.interval.ms")," configuration,\nbut as ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka")," will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},".ConsumerRebalanceListener")," to delay rebalacing. Defaults\nto ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),": Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(",(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe ",(0,i.kt)("strong",{parentName:"li"},"broker")," configuration properties\n",(0,i.kt)("inlineCode",{parentName:"li"},"group.min.session.timeout.ms")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"group.max.session.timeout.ms"),".\nDefault: 10000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat_interval_ms"),": The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),", but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"consumer_timeout_ms"),": maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"AIOKafkaConsumer")," supports Kafka API versions >=0.9 only.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more information see\n:ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),". Default: None."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"exclude_internal_topics"),": Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying ",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"isolation_level"),": Controls how to read messages written\ntransactionally.")),(0,i.kt)("p",null,"If set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed"),", :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\ntransactional messages which have been committed.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")," (the default), :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will\nreturn all messages, even transactional messages which have been\naborted."),(0,i.kt)("p",null,"Non-transactional messages will be returned unconditionally in\neither mode."),(0,i.kt)("p",null,"Messages will always be returned in offset order. Hence, in\n",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," mode, :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," the seek_to_end method will\nreturn the LSO. See method docs below. Default: ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values are:\n",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider"),": OAuthBearer token provider instance. (See :mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: None")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("h3",{id:"create_docs"},(0,i.kt)("inlineCode",{parentName:"h3"},"create_docs")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def create_docs(self: fastkafka.FastKafka) -> None")),(0,i.kt)("p",null,"Create the asyncapi documentation based on the configured consumers and producers."),(0,i.kt)("p",null,"This function exports the asyncapi specification based on the configured consumers\nand producers in the FastKafka instance. It generates the asyncapi documentation by\nextracting the topics and callbacks from the consumers and producers."),(0,i.kt)("p",null,"Note:\nThe asyncapi documentation is saved to the location specified by the ",(0,i.kt)("inlineCode",{parentName:"p"},"_asyncapi_path"),"\nattribute of the FastKafka instance."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"None")),(0,i.kt)("h3",{id:"create_mocks"},(0,i.kt)("inlineCode",{parentName:"h3"},"create_mocks")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def create_mocks(self: fastkafka.FastKafka) -> None")),(0,i.kt)("p",null,"Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock"),(0,i.kt)("h3",{id:"fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"h3"},"fastapi_lifespan")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def fastapi_lifespan(self: fastkafka.FastKafka, kafka_broker_name: str) -> typing.Callable[[ForwardRef('FastAPI')], typing.AsyncIterator[NoneType]]")),(0,i.kt)("p",null,"Method for managing the lifespan of a FastAPI application with a specific Kafka broker."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_broker_name"),": The name of the Kafka broker to start FastKafka")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"Lifespan function to use for initializing FastAPI")),(0,i.kt)("h3",{id:"get_topics"},(0,i.kt)("inlineCode",{parentName:"h3"},"get_topics")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def get_topics(self: fastkafka.FastKafka) -> typing.Iterable[str]")),(0,i.kt)("p",null,"Get all topics for both producing and consuming."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"A set of topics for both producing and consuming.")),(0,i.kt)("h3",{id:"produces"},(0,i.kt)("inlineCode",{parentName:"h3"},"produces")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7f1c4d890f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7f1c4c8a1210>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when delivery report for a produced message is received"),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"encoder"),": Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: "to_". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"brokers"),": Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"description"),": Optional description of the producing function async docs.\nIf not provided, producing function ",(0,i.kt)("strong",{parentName:"li"},"doc")," attr will be used."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list. It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ",(0,i.kt)("inlineCode",{parentName:"li"},"localhost:9092"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-producer-#")," (appended with a unique number\nper instance)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_serializer"),": used to convert user-supplied keys to bytes\nIf not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as ",(0,i.kt)("inlineCode",{parentName:"li"},"f(key),")," should return\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_serializer"),": used to convert user-supplied message\nvalues to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),". If not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as\n",(0,i.kt)("inlineCode",{parentName:"li"},"f(value)"),", should return :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"acks"),": one of ",(0,i.kt)("inlineCode",{parentName:"li"},"0"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"1"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"0"),": Producer will not wait for any acknowledgment from the server\nat all. The message will immediately be added to the socket\nbuffer and considered sent. No guarantee can be made that the\nserver has received the record in this case, and the retries\nconfiguration will not take effect (as the client won't\ngenerally know of any failures). The offset given back for each\nrecord will always be set to -1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"1"),": The broker leader will write the record to its local log but\nwill respond without awaiting full acknowledgement from all\nfollowers. In this case should the leader fail immediately\nafter acknowledging the record but before the followers have\nreplicated it then the record will be lost."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"all"),": The broker leader will wait for the full set of in-sync\nreplicas to acknowledge the record. This guarantees that the\nrecord will not be lost as long as at least one in-sync replica\nremains alive. This is the strongest available guarantee.")),(0,i.kt)("p",null,"If unset, defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=1"),". If ",(0,i.kt)("inlineCode",{parentName:"p"},"enable_idempotence")," is\n:data:",(0,i.kt)("inlineCode",{parentName:"p"},"True")," defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=all")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"compression_type"),": The compression type for all data generated by\nthe producer. Valid values are ",(0,i.kt)("inlineCode",{parentName:"li"},"gzip"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"snappy"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"lz4"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"zstd"),"\nor :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),".\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_batch_size"),": Maximum size of buffered data per partition.\nAfter this amount :meth:",(0,i.kt)("inlineCode",{parentName:"li"},"send")," coroutine will block until batch is\ndrained.\nDefault: 16384"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),": The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),", producer will wait ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms - process_time"),".\nDefault: 0 (i.e. no delay)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partitioner"),": Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n",(0,i.kt)("inlineCode",{parentName:"li"},"partitioner(key_bytes, all_partitions, available_partitions)"),".\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_request_size"),": The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"~kafka.protocol.produce.ProduceRequest")," (it's a blocking\ncall), maximum waiting time can be up to ",(0,i.kt)("inlineCode",{parentName:"li"},"2 *\nrequest_timeout_ms"),".\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more\ninformation see :ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_idempotence"),": When set to :data:",(0,i.kt)("inlineCode",{parentName:"li"},"True"),", the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:",(0,i.kt)("inlineCode",{parentName:"li"},"False"),", producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:",(0,i.kt)("inlineCode",{parentName:"li"},"ValueError")," will be thrown.\nNew in version 0.5.0."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values\nare: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider ("),": class:",(0,i.kt)("inlineCode",{parentName:"li"},"~aiokafka.abc.AbstractTokenProvider"),"):\nOAuthBearer token provider instance. (See\n:mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"))),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ValueError"),": when needed")),(0,i.kt)("h3",{id:"run_in_background"},(0,i.kt)("inlineCode",{parentName:"h3"},"run_in_background")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]")),(0,i.kt)("p",null,"Decorator to schedule a task to be run in the background."),(0,i.kt)("p",null,"This decorator is used to schedule a task to be run in the background when the app's ",(0,i.kt)("inlineCode",{parentName:"p"},"_on_startup")," event is triggered."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"A decorator function that takes a background task as an input and stores it to be run in the backround.")),(0,i.kt)("h3",{id:"set_kafka_broker"},(0,i.kt)("inlineCode",{parentName:"h3"},"set_kafka_broker")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def set_kafka_broker(self, kafka_broker_name: str) -> None")),(0,i.kt)("p",null,"Sets the Kafka broker to start FastKafka with"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_broker_name"),": The name of the Kafka broker to start FastKafka")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"None")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ValueError"),": If the provided kafka_broker_name is not found in dictionary of kafka_brokers")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/4d517c40.eadb0bc3.js b/assets/js/4d517c40.eadb0bc3.js new file mode 100644 index 0000000..534a32f --- /dev/null +++ b/assets/js/4d517c40.eadb0bc3.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1195],{3905:(e,t,a)=>{a.d(t,{Zo:()=>m,kt:()=>N});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function i(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function l(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?i(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):i(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function o(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},i=Object.keys(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var d=n.createContext({}),s=function(e){var t=n.useContext(d),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},m=function(e){var t=s(e.components);return n.createElement(d.Provider,{value:t},e.children)},p="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,i=e.originalType,d=e.parentName,m=o(e,["components","mdxType","originalType","parentName"]),p=s(a),u=r,N=p["".concat(d,".").concat(u)]||p[u]||k[u]||i;return a?n.createElement(N,l(l({ref:t},m),{},{components:a})):n.createElement(N,l({ref:t},m))}));function N(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var i=a.length,l=new Array(i);l[0]=u;var o={};for(var d in t)hasOwnProperty.call(t,d)&&(o[d]=t[d]);o.originalType=e,o[p]="string"==typeof e?e:r,l[1]=o;for(var s=2;s<i;s++)l[s]=a[s];return n.createElement.apply(null,l)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},728:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>d,contentTitle:()=>l,default:()=>k,frontMatter:()=>i,metadata:()=>o,toc:()=>s});var n=a(7462),r=(a(7294),a(3905));const i={},l=void 0,o={unversionedId:"api/fastkafka/FastKafka",id:"api/fastkafka/FastKafka",title:"FastKafka",description:"fastkafka.FastKafka {fastkafka.FastKafka}",source:"@site/docs/api/fastkafka/FastKafka.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/",permalink:"/docs/next/api/fastkafka/",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"EventMetadata",permalink:"/docs/next/api/fastkafka/EventMetadata"},next:{title:"KafkaEvent",permalink:"/docs/next/api/fastkafka/KafkaEvent"}},d={},s=[{value:"fastkafka.FastKafka",id:"fastkafka.FastKafka",level:2},{value:"<strong>init</strong>",id:"fastkafka._application.app.FastKafka.init",level:3},{value:"benchmark",id:"fastkafka._application.app.FastKafka.benchmark",level:3},{value:"consumes",id:"fastkafka._application.app.FastKafka.consumes",level:3},{value:"create_docs",id:"fastkafka._application.app.FastKafka.create_docs",level:3},{value:"create_mocks",id:"fastkafka._application.app.FastKafka.create_mocks",level:3},{value:"fastapi_lifespan",id:"fastkafka._application.app.FastKafka.fastapi_lifespan",level:3},{value:"get_topics",id:"fastkafka._application.app.FastKafka.get_topics",level:3},{value:"is_started",id:"fastkafka._application.app.FastKafka.is_started",level:3},{value:"produces",id:"fastkafka._application.app.FastKafka.produces",level:3},{value:"run_in_background",id:"fastkafka._application.app.FastKafka.run_in_background",level:3},{value:"set_kafka_broker",id:"fastkafka._application.app.FastKafka.set_kafka_broker",level:3}],m={toc:s},p="wrapper";function k(e){let{components:t,...a}=e;return(0,r.kt)(p,(0,n.Z)({},m,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.FastKafka"},"fastkafka.FastKafka"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L178-L428",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.init"},(0,r.kt)("strong",{parentName:"h3"},"init")),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L180-L306",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n self,\n title=None,\n description=None,\n version=None,\n contact=None,\n kafka_brokers=None,\n root_path=None,\n lifespan=None,\n bootstrap_servers_id='localhost',\n loop=None,\n client_id=None,\n metadata_max_age_ms=300000,\n request_timeout_ms=40000,\n api_version='auto',\n acks=<object object at 0x7f21fc189d70>,\n key_serializer=None,\n value_serializer=None,\n compression_type=None,\n max_batch_size=16384,\n partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7f21fa7a9150>,\n max_request_size=1048576,\n linger_ms=0,\n send_backoff_ms=100,\n retry_backoff_ms=100,\n security_protocol='PLAINTEXT',\n ssl_context=None,\n connections_max_idle_ms=540000,\n enable_idempotence=False,\n transactional_id=None,\n transaction_timeout_ms=60000,\n sasl_mechanism='PLAIN',\n sasl_plain_password=None,\n sasl_plain_username=None,\n sasl_kerberos_service_name='kafka',\n sasl_kerberos_domain_name=None,\n sasl_oauth_token_provider=None,\n group_id=None,\n key_deserializer=None,\n value_deserializer=None,\n fetch_max_wait_ms=500,\n fetch_max_bytes=52428800,\n fetch_min_bytes=1,\n max_partition_fetch_bytes=1048576,\n auto_offset_reset='latest',\n enable_auto_commit=True,\n auto_commit_interval_ms=5000,\n check_crcs=True,\n partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),\n max_poll_interval_ms=300000,\n rebalance_timeout_ms=None,\n session_timeout_ms=10000,\n heartbeat_interval_ms=3000,\n consumer_timeout_ms=200,\n max_poll_records=None,\n exclude_internal_topics=True,\n isolation_level='read_uncommitted',\n)\n")),(0,r.kt)("p",null,"Creates FastKafka application"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"title")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"optional title for the documentation. If None,the title will be set to empty string"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"description")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"optional description for the documentation. IfNone, the description will be set to empty string"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"version")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"optional version for the documentation. If None,the version will be set to empty string"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"contact")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[Dict[str, str]]")),(0,r.kt)("td",{parentName:"tr",align:null},"optional contact for the documentation. If None, thecontact will be set to placeholder values:name='Author' url=HttpUrl(' ",(0,r.kt)("a",{parentName:"td",href:"https://www.google.com"},"https://www.google.com")," ', ) email='",(0,r.kt)("a",{parentName:"td",href:"mailto:noreply@gmail.com"},"noreply@gmail.com"),"'"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"kafka_brokers")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[Dict[str, Any]]")),(0,r.kt)("td",{parentName:"tr",align:null},'dictionary describing kafka brokers used for settingthe bootstrap server when running the applicationa and forgenerating documentation. Defaults to { "localhost": { "url": "localhost", "description": "local kafka broker", "port": "9092", } }'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"root_path")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[pathlib.Path, str, NoneType]")),(0,r.kt)("td",{parentName:"tr",align:null},"path to where documentation will be created"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"lifespan")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[Callable[[ForwardRef('FastKafka')], AsyncContextManager[NoneType]]]")),(0,r.kt)("td",{parentName:"tr",align:null},"asynccontextmanager that is used for setting lifespan hooks.",(0,r.kt)("strong",{parentName:"td"},"aenter")," is called before app start and ",(0,r.kt)("strong",{parentName:"td"},"aexit")," after app stop.The lifespan is called whe application is started as async contextmanager, e.g.:",(0,r.kt)("inlineCode",{parentName:"td"},"async with kafka_app...")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"client_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka-producer-#")," (appended with a unique numberper instance)"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key_serializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"used to convert user-supplied keys to bytesIf not :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", called as ",(0,r.kt)("inlineCode",{parentName:"td"},"f(key),")," should return:class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"value_serializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"used to convert user-supplied messagevalues to :class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),". If not :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", called as",(0,r.kt)("inlineCode",{parentName:"td"},"f(value)"),", should return :class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"acks")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"one of ",(0,r.kt)("inlineCode",{parentName:"td"},"0"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"1"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),". The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common:",(0,r.kt)("em",{parentName:"td"}," ",(0,r.kt)("inlineCode",{parentName:"em"},"0"),": Producer will not wait for any acknowledgment from the server at all. The message will immediately be added to the socket buffer and considered sent. No guarantee can be made that the server has received the record in this case, and the retries configuration will not take effect (as the client won't generally know of any failures). The offset given back for each record will always be set to -1.")," ",(0,r.kt)("inlineCode",{parentName:"td"},"1"),": The broker leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after acknowledging the record but before the followers have replicated it then the record will be lost.* ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),": The broker leader will wait for the full set of in-sync replicas to acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica remains alive. This is the strongest available guarantee.If unset, defaults to ",(0,r.kt)("inlineCode",{parentName:"td"},"acks=1"),". If ",(0,r.kt)("inlineCode",{parentName:"td"},"enable_idempotence")," is:data:",(0,r.kt)("inlineCode",{parentName:"td"},"True")," defaults to ",(0,r.kt)("inlineCode",{parentName:"td"},"acks=all")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"<object object at 0x7f21fc189d70>"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"compression_type")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The compression type for all data generated bythe producer. Valid values are ",(0,r.kt)("inlineCode",{parentName:"td"},"gzip"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"snappy"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"lz4"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"zstd"),"or :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),".Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_batch_size")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum size of buffered data per partition.After this amount :meth:",(0,r.kt)("inlineCode",{parentName:"td"},"send")," coroutine will block until batch isdrained.Default: 16384"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"16384"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than ",(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms"),", producer will wait ",(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms - process_time"),".Default: 0 (i.e. no delay)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"0"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"partitioner")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Callable used to determine which partitioneach message is assigned to. Called (after key serialization):",(0,r.kt)("inlineCode",{parentName:"td"},"partitioner(key_bytes, all_partitions, available_partitions)"),".The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", the message is delivered to a random partition(filtered to partitions with available leaders only, if possible)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"<kafka.partitioner.default.DefaultPartitioner object at 0x7f21fa7a9150>"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_request_size")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1048576"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"metadata_max_age_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"request_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Produce request timeout in milliseconds.As it's sent as part of:class:",(0,r.kt)("inlineCode",{parentName:"td"},"~kafka.protocol.produce.ProduceRequest")," (it's a blockingcall), maximum waiting time can be up to ",(0,r.kt)("inlineCode",{parentName:"td"},"2 *request_timeout_ms"),".Default: 40000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"40000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"retry_backoff_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Milliseconds to backoff when retrying onerrors. Default: 100."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"api_version")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"specify which kafka API version to use.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"auto"),", will attempt to infer the broker version byprobing various APIs. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"auto")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'auto'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"security_protocol")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Protocol used to communicate with brokers.Valid values are: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SSL"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT"),",",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAINTEXT'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ssl_context")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"pre-configured :class:",(0,r.kt)("inlineCode",{parentName:"td"},"~ssl.SSLContext"),"for wrapping socket connections. Directly passed into asyncio's:meth:",(0,r.kt)("inlineCode",{parentName:"td"},"~asyncio.loop.create_connection"),". For moreinformation see :ref:",(0,r.kt)("inlineCode",{parentName:"td"},"ssl_auth"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"connections_max_idle_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Close idle connections after the numberof milliseconds specified by this config. Specifying :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")," willdisable idle checks. Default: 540000 (9 minutes)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"540000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"enable_idempotence")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"When set to :data:",(0,r.kt)("inlineCode",{parentName:"td"},"True"),", the producer willensure that exactly one copy of each message is written in thestream. If :data:",(0,r.kt)("inlineCode",{parentName:"td"},"False"),", producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),". If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:",(0,r.kt)("inlineCode",{parentName:"td"},"ValueError")," will be thrown.New in version 0.5.0."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"False"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_mechanism")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Authentication mechanism when security_protocolis configured for ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT")," or ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Valid valuesare: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"GSSAPI"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-256"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-512"),",",(0,r.kt)("inlineCode",{parentName:"td"},"OAUTHBEARER"),".Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAIN'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_username")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"username for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_password")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"password for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"group_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"name of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key_deserializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Any callable that takes araw message key and returns a deserialized key."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"value_deserializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Any callable that takes araw message value and returns a deserialized value."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_min_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Minimum amount of data the server shouldreturn for a fetch request, otherwise wait up to",(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"52428800"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_wait_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"500"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_partition_fetch_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of dataper-partition the server will return. The maximum total memoryused for a request ",(0,r.kt)("inlineCode",{parentName:"td"},"= #partitions * max_partition_fetch_bytes"),".This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1048576"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_poll_records")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum number of records returned in asingle call to :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany"),". Defaults ",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", no limit."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"auto_offset_reset")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"A policy for resetting offsets on:exc:",(0,r.kt)("inlineCode",{parentName:"td"},".OffsetOutOfRangeError")," errors: ",(0,r.kt)("inlineCode",{parentName:"td"},"earliest")," will move to the oldestavailable message, ",(0,r.kt)("inlineCode",{parentName:"td"},"latest")," will move to the most recent, and",(0,r.kt)("inlineCode",{parentName:"td"},"none")," will raise an exception so you can handle this case.Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"latest"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'latest'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"enable_auto_commit")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"If true the consumer's offset will beperiodically committed in the background. Default: True."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"auto_commit_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"milliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"5000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"check_crcs")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Automatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: True"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"partition_assignment_strategy")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"List of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: ","[:class:",(0,r.kt)("inlineCode",{parentName:"td"},".RoundRobinPartitionAssignor"),"]"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_poll_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum allowed time between calls toconsume messages (e.g., :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany"),"). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See ",(0,r.kt)("inlineCode",{parentName:"td"},"KIP-62"),"_ for moreinformation. Default 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"rebalance_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to ",(0,r.kt)("inlineCode",{parentName:"td"},"max.poll.interval.ms")," configuration,but as ",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka")," will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:",(0,r.kt)("inlineCode",{parentName:"td"},".ConsumerRebalanceListener")," to delay rebalacing. Defaultsto ",(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Client group session and failure detectiontimeout. The consumer sends periodic heartbeats(",(0,r.kt)("inlineCode",{parentName:"td"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe ",(0,r.kt)("strong",{parentName:"td"},"broker")," configuration properties",(0,r.kt)("inlineCode",{parentName:"td"},"group.min.session.timeout.ms")," and ",(0,r.kt)("inlineCode",{parentName:"td"},"group.max.session.timeout.ms"),".Default: 10000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"10000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"heartbeat_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than ",(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms"),", but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 3000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"3000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"consumer_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"maximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"200"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"exclude_internal_topics")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Whether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: True"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"isolation_level")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Controls how to read messages writtentransactionally.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed"),", :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," will only returntransactional messages which have been committed.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"read_uncommitted")," (the default), :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, in",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," mode, :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," the seek_to_end method willreturn the LSO. See method docs below. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"read_uncommitted")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'read_uncommitted'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_oauth_token_provider")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"OAuthBearer token provider instance. (See :mod:",(0,r.kt)("inlineCode",{parentName:"td"},"kafka.oauth.abstract"),").Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.benchmark"},"benchmark"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L1113-L1164",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"benchmark(\n self, interval=1, sliding_window_size=None\n)\n")),(0,r.kt)("p",null,"Decorator to benchmark produces/consumes functions"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"interval")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[int, datetime.timedelta]")),(0,r.kt)("td",{parentName:"tr",align:null},"Period to use to calculate throughput. If value is of type int,then it will be used as seconds. If value is of type timedelta,then it will be used as it is. default: 1 - one second"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sliding_window_size")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[int]")),(0,r.kt)("td",{parentName:"tr",align:null},"The size of the sliding window to use to calculateaverage throughput. default: None - By default average throughput isnot calculated"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.consumes"},"consumes"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L475-L560",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"consumes(\n self,\n topic=None,\n decoder='json',\n executor=None,\n brokers=None,\n prefix='on_',\n description=None,\n loop=None,\n bootstrap_servers='localhost',\n client_id='aiokafka-0.8.1',\n group_id=None,\n key_deserializer=None,\n value_deserializer=None,\n fetch_max_wait_ms=500,\n fetch_max_bytes=52428800,\n fetch_min_bytes=1,\n max_partition_fetch_bytes=1048576,\n request_timeout_ms=40000,\n retry_backoff_ms=100,\n auto_offset_reset='latest',\n enable_auto_commit=True,\n auto_commit_interval_ms=5000,\n check_crcs=True,\n metadata_max_age_ms=300000,\n partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),\n max_poll_interval_ms=300000,\n rebalance_timeout_ms=None,\n session_timeout_ms=10000,\n heartbeat_interval_ms=3000,\n consumer_timeout_ms=200,\n max_poll_records=None,\n ssl_context=None,\n security_protocol='PLAINTEXT',\n api_version='auto',\n exclude_internal_topics=True,\n connections_max_idle_ms=540000,\n isolation_level='read_uncommitted',\n sasl_mechanism='PLAIN',\n sasl_plain_password=None,\n sasl_plain_username=None,\n sasl_kerberos_service_name='kafka',\n sasl_kerberos_domain_name=None,\n sasl_oauth_token_provider=None,\n)\n")),(0,r.kt)("p",null,"Decorator registering the callback called when a message is received in a topic."),(0,r.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"topic")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Kafka topic that the consumer will subscribe to and execute thedecorated function when it receives a message from the topic,default: None. If the topic is not specified, topic name will beinferred from the decorated function name by stripping the defined prefix"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"decoder")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[str, Callable[[bytes, Type[pydantic.main.BaseModel]], Any]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Decoder to use to decode messages consumed from the topic,default: json - By default, it uses json decoder to decodebytes to json string and then it creates instance of pydanticBaseModel. It also accepts custom decoder function."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'json'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"executor")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[str, fastkafka._components.task_streaming.StreamExecutor, NoneType]")),(0,r.kt)("td",{parentName:"tr",align:null},'Type of executor to choose for consuming tasks. Avaliable optionsare "SequentialExecutor" and "DynamicTaskExecutor". The default option is"SequentialExecutor" which will execute the consuming tasks sequentially.If the consuming tasks have high latency it is recommended to use"DynamicTaskExecutor" which will wrap the consuming functions into tasksand run them in on asyncio loop in background. This comes with a cost ofincreased overhead so use it only in cases when your consume functions havehigh latency such as database queries or some other type of networking.'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"prefix")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},'Prefix stripped from the decorated function to define a topic nameif the topic argument is not passed, default: "on_". If the decoratedfunction name is not prefixed with the defined prefix and topic argumentis not passed, then this method will throw ValueError'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'on_'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"brokers")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"description")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional description of the consuming function async docs.If not provided, consuming function ",(0,r.kt)("strong",{parentName:"td"},"doc")," attr will be used."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bootstrap_servers")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a ",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," string (or list of",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," strings) that the consumer should contact to bootstrapinitial cluster metadata.This does not have to be the full node list.It just needs to have at least one broker that will respond to aMetadata API Request. Default port is 9092. If no servers arespecified, will default to ",(0,r.kt)("inlineCode",{parentName:"td"},"localhost:9092"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'localhost'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"client_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client. Alsosubmitted to :class:",(0,r.kt)("inlineCode",{parentName:"td"},"~.consumer.group_coordinator.GroupCoordinator"),"for logging with respect to consumer group administration. Default:",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka-{version}")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'aiokafka-0.8.1'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"group_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"name of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key_deserializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Any callable that takes araw message key and returns a deserialized key."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"value_deserializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Any callable that takes araw message value and returns a deserialized value."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_min_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Minimum amount of data the server shouldreturn for a fetch request, otherwise wait up to",(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"52428800"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_wait_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"500"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_partition_fetch_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of dataper-partition the server will return. The maximum total memoryused for a request ",(0,r.kt)("inlineCode",{parentName:"td"},"= #partitions * max_partition_fetch_bytes"),".This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1048576"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_poll_records")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum number of records returned in asingle call to :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany"),". Defaults ",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", no limit."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"request_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Client request timeout in milliseconds.Default: 40000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"40000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"retry_backoff_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Milliseconds to backoff when retrying onerrors. Default: 100."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"auto_offset_reset")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"A policy for resetting offsets on:exc:",(0,r.kt)("inlineCode",{parentName:"td"},".OffsetOutOfRangeError")," errors: ",(0,r.kt)("inlineCode",{parentName:"td"},"earliest")," will move to the oldestavailable message, ",(0,r.kt)("inlineCode",{parentName:"td"},"latest")," will move to the most recent, and",(0,r.kt)("inlineCode",{parentName:"td"},"none")," will raise an exception so you can handle this case.Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"latest"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'latest'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"enable_auto_commit")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"If true the consumer's offset will beperiodically committed in the background. Default: True."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"auto_commit_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"milliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"5000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"check_crcs")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Automatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: True"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"metadata_max_age_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"partition_assignment_strategy")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"List of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: ","[:class:",(0,r.kt)("inlineCode",{parentName:"td"},".RoundRobinPartitionAssignor"),"]"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_poll_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum allowed time between calls toconsume messages (e.g., :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany"),"). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See ",(0,r.kt)("inlineCode",{parentName:"td"},"KIP-62"),"_ for moreinformation. Default 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"rebalance_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to ",(0,r.kt)("inlineCode",{parentName:"td"},"max.poll.interval.ms")," configuration,but as ",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka")," will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:",(0,r.kt)("inlineCode",{parentName:"td"},".ConsumerRebalanceListener")," to delay rebalacing. Defaultsto ",(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Client group session and failure detectiontimeout. The consumer sends periodic heartbeats(",(0,r.kt)("inlineCode",{parentName:"td"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe ",(0,r.kt)("strong",{parentName:"td"},"broker")," configuration properties",(0,r.kt)("inlineCode",{parentName:"td"},"group.min.session.timeout.ms")," and ",(0,r.kt)("inlineCode",{parentName:"td"},"group.max.session.timeout.ms"),".Default: 10000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"10000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"heartbeat_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than ",(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms"),", but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 3000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"3000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"consumer_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"maximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"200"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"api_version")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"specify which kafka API version to use.:class:",(0,r.kt)("inlineCode",{parentName:"td"},"AIOKafkaConsumer")," supports Kafka API versions >=0.9 only.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"auto"),", will attempt to infer the broker version byprobing various APIs. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"auto")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'auto'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"security_protocol")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Protocol used to communicate with brokers.Valid values are: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SSL"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT"),",",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAINTEXT'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ssl_context")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"pre-configured :class:",(0,r.kt)("inlineCode",{parentName:"td"},"~ssl.SSLContext"),"for wrapping socket connections. Directly passed into asyncio's:meth:",(0,r.kt)("inlineCode",{parentName:"td"},"~asyncio.loop.create_connection"),". For more information see:ref:",(0,r.kt)("inlineCode",{parentName:"td"},"ssl_auth"),". Default: None."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"exclude_internal_topics")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Whether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: True"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"connections_max_idle_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Close idle connections after the numberof milliseconds specified by this config. Specifying ",(0,r.kt)("inlineCode",{parentName:"td"},"None")," willdisable idle checks. Default: 540000 (9 minutes)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"540000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"isolation_level")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Controls how to read messages writtentransactionally.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed"),", :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," will only returntransactional messages which have been committed.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"read_uncommitted")," (the default), :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, in",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," mode, :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," the seek_to_end method willreturn the LSO. See method docs below. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"read_uncommitted")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'read_uncommitted'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_mechanism")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Authentication mechanism when security_protocolis configured for ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT")," or ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Valid values are:",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"GSSAPI"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-256"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-512"),",",(0,r.kt)("inlineCode",{parentName:"td"},"OAUTHBEARER"),".Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAIN'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_username")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"username for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_password")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"password for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_oauth_token_provider")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"OAuthBearer token provider instance. (See :mod:",(0,r.kt)("inlineCode",{parentName:"td"},"kafka.oauth.abstract"),").Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]], Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]]")),(0,r.kt)("td",{parentName:"tr",align:null},": A function returning the same function")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.create_docs"},"create_docs"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L943-L969",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"create_docs(\n self\n)\n")),(0,r.kt)("p",null,"Create the asyncapi documentation based on the configured consumers and producers."),(0,r.kt)("p",null,"This function exports the asyncapi specification based on the configured consumers\nand producers in the FastKafka instance. It generates the asyncapi documentation by\nextracting the topics and callbacks from the consumers and producers."),(0,r.kt)("p",null,"Note:\nThe asyncapi documentation is saved to the location specified by the ",(0,r.kt)("inlineCode",{parentName:"p"},"_asyncapi_path"),"\nattribute of the FastKafka instance."),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.create_mocks"},"create_mocks"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L1031-L1109",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"create_mocks(\n self\n)\n")),(0,r.kt)("p",null,"Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock"),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.fastapi_lifespan"},"fastapi_lifespan"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L1168-L1187",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"fastapi_lifespan(\n self, kafka_broker_name\n)\n")),(0,r.kt)("p",null,"Method for managing the lifespan of a FastAPI application with a specific Kafka broker."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"kafka_broker_name")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"The name of the Kafka broker to start FastKafka"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[ForwardRef('FastAPI')], AsyncIterator[None]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Lifespan function to use for initializing FastAPI")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.get_topics"},"get_topics"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L668-L677",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"get_topics(\n self\n)\n")),(0,r.kt)("p",null,"Get all topics for both producing and consuming."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Iterable[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"A set of topics for both producing and consuming.")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.is_started"},"is_started"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L309-L320",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"@property\nis_started(\n self\n)\n")),(0,r.kt)("p",null,"Property indicating whether the FastKafka object is started."),(0,r.kt)("p",null,"The is_started property indicates if the FastKafka object is currently\nin a started state. This implies that all background tasks, producers,\nand consumers have been initiated, and the object is successfully connected\nto the Kafka broker."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bool")),(0,r.kt)("td",{parentName:"tr",align:null},"True if the object is started, False otherwise.")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.produces"},"produces"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L585-L664",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"produces(\n self,\n topic=None,\n encoder='json',\n prefix='to_',\n brokers=None,\n description=None,\n loop=None,\n bootstrap_servers='localhost',\n client_id=None,\n metadata_max_age_ms=300000,\n request_timeout_ms=40000,\n api_version='auto',\n acks=<object object at 0x7f21fc189d70>,\n key_serializer=None,\n value_serializer=None,\n compression_type=None,\n max_batch_size=16384,\n partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7f21fa7a9150>,\n max_request_size=1048576,\n linger_ms=0,\n send_backoff_ms=100,\n retry_backoff_ms=100,\n security_protocol='PLAINTEXT',\n ssl_context=None,\n connections_max_idle_ms=540000,\n enable_idempotence=False,\n transactional_id=None,\n transaction_timeout_ms=60000,\n sasl_mechanism='PLAIN',\n sasl_plain_password=None,\n sasl_plain_username=None,\n sasl_kerberos_service_name='kafka',\n sasl_kerberos_domain_name=None,\n sasl_oauth_token_provider=None,\n)\n")),(0,r.kt)("p",null,"Decorator registering the callback called when delivery report for a produced message is received"),(0,r.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"topic")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Kafka topic that the producer will send returned values fromthe decorated function to, default: None- If the topic is notspecified, topic name will be inferred from the decorated functionname by stripping the defined prefix."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"encoder")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[str, Callable[[pydantic.main.BaseModel], bytes]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Encoder to use to encode messages before sending it to topic,default: json - By default, it uses json encoder to convertpydantic basemodel to json string and then encodes the string to bytesusing 'utf-8' encoding. It also accepts custom encoder function."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'json'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"prefix")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},'Prefix stripped from the decorated function to define a topicname if the topic argument is not passed, default: "to_". If thedecorated function name is not prefixed with the defined prefixand topic argument is not passed, then this method will throw ValueError'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'to_'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"brokers")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"description")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional description of the producing function async docs.If not provided, producing function ",(0,r.kt)("strong",{parentName:"td"},"doc")," attr will be used."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bootstrap_servers")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a ",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," string or list of",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," strings that the producer should contact tobootstrap initial cluster metadata. This does not have to be thefull node list. It just needs to have at least one broker that willrespond to a Metadata API Request. Default port is 9092. If noservers are specified, will default to ",(0,r.kt)("inlineCode",{parentName:"td"},"localhost:9092"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'localhost'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"client_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka-producer-#")," (appended with a unique numberper instance)"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key_serializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"used to convert user-supplied keys to bytesIf not :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", called as ",(0,r.kt)("inlineCode",{parentName:"td"},"f(key),")," should return:class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"value_serializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"used to convert user-supplied messagevalues to :class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),". If not :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", called as",(0,r.kt)("inlineCode",{parentName:"td"},"f(value)"),", should return :class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"acks")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"one of ",(0,r.kt)("inlineCode",{parentName:"td"},"0"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"1"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),". The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common:",(0,r.kt)("em",{parentName:"td"}," ",(0,r.kt)("inlineCode",{parentName:"em"},"0"),": Producer will not wait for any acknowledgment from the server at all. The message will immediately be added to the socket buffer and considered sent. No guarantee can be made that the server has received the record in this case, and the retries configuration will not take effect (as the client won't generally know of any failures). The offset given back for each record will always be set to -1.")," ",(0,r.kt)("inlineCode",{parentName:"td"},"1"),": The broker leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after acknowledging the record but before the followers have replicated it then the record will be lost.* ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),": The broker leader will wait for the full set of in-sync replicas to acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica remains alive. This is the strongest available guarantee.If unset, defaults to ",(0,r.kt)("inlineCode",{parentName:"td"},"acks=1"),". If ",(0,r.kt)("inlineCode",{parentName:"td"},"enable_idempotence")," is:data:",(0,r.kt)("inlineCode",{parentName:"td"},"True")," defaults to ",(0,r.kt)("inlineCode",{parentName:"td"},"acks=all")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"<object object at 0x7f21fc189d70>"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"compression_type")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The compression type for all data generated bythe producer. Valid values are ",(0,r.kt)("inlineCode",{parentName:"td"},"gzip"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"snappy"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"lz4"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"zstd"),"or :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),".Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_batch_size")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum size of buffered data per partition.After this amount :meth:",(0,r.kt)("inlineCode",{parentName:"td"},"send")," coroutine will block until batch isdrained.Default: 16384"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"16384"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than ",(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms"),", producer will wait ",(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms - process_time"),".Default: 0 (i.e. no delay)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"0"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"partitioner")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Callable used to determine which partitioneach message is assigned to. Called (after key serialization):",(0,r.kt)("inlineCode",{parentName:"td"},"partitioner(key_bytes, all_partitions, available_partitions)"),".The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", the message is delivered to a random partition(filtered to partitions with available leaders only, if possible)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"<kafka.partitioner.default.DefaultPartitioner object at 0x7f21fa7a9150>"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_request_size")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1048576"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"metadata_max_age_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"request_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Produce request timeout in milliseconds.As it's sent as part of:class:",(0,r.kt)("inlineCode",{parentName:"td"},"~kafka.protocol.produce.ProduceRequest")," (it's a blockingcall), maximum waiting time can be up to ",(0,r.kt)("inlineCode",{parentName:"td"},"2 *request_timeout_ms"),".Default: 40000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"40000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"retry_backoff_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Milliseconds to backoff when retrying onerrors. Default: 100."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"api_version")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"specify which kafka API version to use.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"auto"),", will attempt to infer the broker version byprobing various APIs. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"auto")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'auto'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"security_protocol")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Protocol used to communicate with brokers.Valid values are: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SSL"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT"),",",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAINTEXT'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ssl_context")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"pre-configured :class:",(0,r.kt)("inlineCode",{parentName:"td"},"~ssl.SSLContext"),"for wrapping socket connections. Directly passed into asyncio's:meth:",(0,r.kt)("inlineCode",{parentName:"td"},"~asyncio.loop.create_connection"),". For moreinformation see :ref:",(0,r.kt)("inlineCode",{parentName:"td"},"ssl_auth"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"connections_max_idle_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Close idle connections after the numberof milliseconds specified by this config. Specifying :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")," willdisable idle checks. Default: 540000 (9 minutes)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"540000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"enable_idempotence")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"When set to :data:",(0,r.kt)("inlineCode",{parentName:"td"},"True"),", the producer willensure that exactly one copy of each message is written in thestream. If :data:",(0,r.kt)("inlineCode",{parentName:"td"},"False"),", producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),". If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:",(0,r.kt)("inlineCode",{parentName:"td"},"ValueError")," will be thrown.New in version 0.5.0."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"False"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_mechanism")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Authentication mechanism when security_protocolis configured for ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT")," or ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Valid valuesare: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"GSSAPI"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-256"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-512"),",",(0,r.kt)("inlineCode",{parentName:"td"},"OAUTHBEARER"),".Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAIN'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_username")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"username for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_password")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"password for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]], Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]]")),(0,r.kt)("td",{parentName:"tr",align:null},": A function returning the same function")))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ValueError")),(0,r.kt)("td",{parentName:"tr",align:null},"when needed")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.run_in_background"},"run_in_background"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L681-L714",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"run_in_background(\n self\n)\n")),(0,r.kt)("p",null,"Decorator to schedule a task to be run in the background."),(0,r.kt)("p",null,"This decorator is used to schedule a task to be run in the background when the app's ",(0,r.kt)("inlineCode",{parentName:"p"},"_on_startup")," event is triggered."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[Callable[..., Coroutine[Any, Any, Any]]], Callable[..., Coroutine[Any, Any, Any]]]")),(0,r.kt)("td",{parentName:"tr",align:null},"A decorator function that takes a background task as an input and stores it to be run in the backround.")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.set_kafka_broker"},"set_kafka_broker"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_application/app.py#L322-L338",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"set_kafka_broker(\n self, kafka_broker_name\n)\n")),(0,r.kt)("p",null,"Sets the Kafka broker to start FastKafka with"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"kafka_broker_name")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"The name of the Kafka broker to start FastKafka"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ValueError")),(0,r.kt)("td",{parentName:"tr",align:null},"If the provided kafka_broker_name is not found in dictionary of kafka_brokers")))))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/4e5074e6.b0d53ffd.js b/assets/js/4e5074e6.b0d53ffd.js new file mode 100644 index 0000000..025ebd5 --- /dev/null +++ b/assets/js/4e5074e6.b0d53ffd.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1312],{3905:(e,t,a)=>{a.d(t,{Zo:()=>d,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),p=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},d=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),c=p(a),u=r,f=c["".concat(s,".").concat(u)]||c[u]||k[u]||o;return a?n.createElement(f,i(i({ref:t},d),{},{components:a})):n.createElement(f,i({ref:t},d))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=u;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:r,i[1]=l;for(var p=2;p<o;p++)i[p]=a[p];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},5681:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>k,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={},i=void 0,l={unversionedId:"api/fastkafka/testing/LocalRedpandaBroker",id:"version-0.5.0/api/fastkafka/testing/LocalRedpandaBroker",title:"LocalRedpandaBroker",description:"fastkafka.testing.LocalRedpandaBroker {fastkafka.testing.LocalRedpandaBroker}",source:"@site/versioned_docs/version-0.5.0/api/fastkafka/testing/LocalRedpandaBroker.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/LocalRedpandaBroker",permalink:"/docs/0.5.0/api/fastkafka/testing/LocalRedpandaBroker",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"ApacheKafkaBroker",permalink:"/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker"},next:{title:"Tester",permalink:"/docs/0.5.0/api/fastkafka/testing/Tester"}},s={},p=[{value:"<code>fastkafka.testing.LocalRedpandaBroker</code>",id:"fastkafka.testing.LocalRedpandaBroker",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>get_service_config_string</code>",id:"get_service_config_string",level:3},{value:"<code>start</code>",id:"start",level:3},{value:"<code>stop</code>",id:"stop",level:3}],d={toc:p},c="wrapper";function k(e){let{components:t,...a}=e;return(0,r.kt)(c,(0,n.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.testing.LocalRedpandaBroker"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.testing.LocalRedpandaBroker")),(0,r.kt)("p",null,"LocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing."),(0,r.kt)("h3",{id:"init"},(0,r.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug', **kwargs: Dict[str, Any]) -> None")),(0,r.kt)("p",null,"Initialises the LocalRedpandaBroker object"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"tag"),": Tag of Redpanda image to use to start container"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"seastar_core"),": Core(s) to use byt Seastar (the framework Redpanda uses under the hood)"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"memory"),": The amount of memory to make available to Redpanda"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"mode"),": Mode to use to load configuration properties in container"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"default_log_level"),": Log levels to use for Redpanda")),(0,r.kt)("h3",{id:"get_service_config_string"},(0,r.kt)("inlineCode",{parentName:"h3"},"get_service_config_string")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def get_service_config_string(self, service: str, data_dir: pathlib.Path) -> str")),(0,r.kt)("p",null,"Generates a configuration for a service"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"service"),': "redpanda", defines which service to get config string for')),(0,r.kt)("h3",{id:"start"},(0,r.kt)("inlineCode",{parentName:"h3"},"start")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def start(self: fastkafka.testing.LocalRedpandaBroker) -> str")),(0,r.kt)("p",null,"Starts a local redpanda broker instance synchronously"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Redpanda broker bootstrap server address in string format: add:port")),(0,r.kt)("h3",{id:"stop"},(0,r.kt)("inlineCode",{parentName:"h3"},"stop")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def stop(self: fastkafka.testing.LocalRedpandaBroker) -> None")),(0,r.kt)("p",null,"Stops a local redpanda broker instance synchronously"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"None")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/4f8e8160.b9904ca2.js b/assets/js/4f8e8160.b9904ca2.js new file mode 100644 index 0000000..eea4fe2 --- /dev/null +++ b/assets/js/4f8e8160.b9904ca2.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7683],{3905:(e,t,a)=>{a.d(t,{Zo:()=>k,kt:()=>d});var r=a(7294);function n(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,r)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){n(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,r,n=function(e,t){if(null==e)return{};var a,r,n={},o=Object.keys(e);for(r=0;r<o.length;r++)a=o[r],t.indexOf(a)>=0||(n[a]=e[a]);return n}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)a=o[r],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(n[a]=e[a])}return n}var s=r.createContext({}),p=function(e){var t=r.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},k=function(e){var t=p(e.components);return r.createElement(s.Provider,{value:t},e.children)},c="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},u=r.forwardRef((function(e,t){var a=e.components,n=e.mdxType,o=e.originalType,s=e.parentName,k=l(e,["components","mdxType","originalType","parentName"]),c=p(a),u=n,d=c["".concat(s,".").concat(u)]||c[u]||f[u]||o;return a?r.createElement(d,i(i({ref:t},k),{},{components:a})):r.createElement(d,i({ref:t},k))}));function d(e,t){var a=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var o=a.length,i=new Array(o);i[0]=u;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:n,i[1]=l;for(var p=2;p<o;p++)i[p]=a[p];return r.createElement.apply(null,i)}return r.createElement.apply(null,a)}u.displayName="MDXCreateElement"},1337:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>f,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var r=a(7462),n=(a(7294),a(3905));const o={},i=void 0,l={unversionedId:"api/fastkafka/testing/ApacheKafkaBroker",id:"version-0.6.0/api/fastkafka/testing/ApacheKafkaBroker",title:"ApacheKafkaBroker",description:"fastkafka.testing.ApacheKafkaBroker {fastkafka.testing.ApacheKafkaBroker}",source:"@site/versioned_docs/version-0.6.0/api/fastkafka/testing/ApacheKafkaBroker.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/ApacheKafkaBroker",permalink:"/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"json_encoder",permalink:"/docs/0.6.0/api/fastkafka/encoder/json_encoder"},next:{title:"LocalRedpandaBroker",permalink:"/docs/0.6.0/api/fastkafka/testing/LocalRedpandaBroker"}},s={},p=[{value:"<code>fastkafka.testing.ApacheKafkaBroker</code>",id:"fastkafka.testing.ApacheKafkaBroker",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>start</code>",id:"start",level:3},{value:"<code>stop</code>",id:"stop",level:3}],k={toc:p},c="wrapper";function f(e){let{components:t,...a}=e;return(0,n.kt)(c,(0,r.Z)({},k,a,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"fastkafka.testing.ApacheKafkaBroker"},(0,n.kt)("inlineCode",{parentName:"h2"},"fastkafka.testing.ApacheKafkaBroker")),(0,n.kt)("p",null,"ApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing."),(0,n.kt)("h3",{id:"init"},(0,n.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None")),(0,n.kt)("p",null,"Initialises the ApacheKafkaBroker object"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"zookeeper_port"),": Port for clients (Kafka brokes) to connect"),(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect")),(0,n.kt)("h3",{id:"start"},(0,n.kt)("inlineCode",{parentName:"h3"},"start")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def start(self: fastkafka.testing.ApacheKafkaBroker) -> str")),(0,n.kt)("p",null,"Starts a local kafka broker and zookeeper instance synchronously"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"Kafka broker bootstrap server address in string format: add:port")),(0,n.kt)("h3",{id:"stop"},(0,n.kt)("inlineCode",{parentName:"h3"},"stop")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def stop(self: fastkafka.testing.ApacheKafkaBroker) -> None")),(0,n.kt)("p",null,"Stops a local kafka broker and zookeeper instance synchronously"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"None")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/514a13f6.826a6d2f.js b/assets/js/514a13f6.826a6d2f.js new file mode 100644 index 0000000..42ff1bf --- /dev/null +++ b/assets/js/514a13f6.826a6d2f.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3800],{3905:(t,e,a)=>{a.d(e,{Zo:()=>s,kt:()=>u});var r=a(7294);function n(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(t);e&&(r=r.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,r)}return a}function k(t){for(var e=1;e<arguments.length;e++){var a=null!=arguments[e]?arguments[e]:{};e%2?l(Object(a),!0).forEach((function(e){n(t,e,a[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(a)):l(Object(a)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(a,e))}))}return t}function i(t,e){if(null==t)return{};var a,r,n=function(t,e){if(null==t)return{};var a,r,n={},l=Object.keys(t);for(r=0;r<l.length;r++)a=l[r],e.indexOf(a)>=0||(n[a]=t[a]);return n}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(r=0;r<l.length;r++)a=l[r],e.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(n[a]=t[a])}return n}var o=r.createContext({}),p=function(t){var e=r.useContext(o),a=e;return t&&(a="function"==typeof t?t(e):k(k({},e),t)),a},s=function(t){var e=p(t.components);return r.createElement(o.Provider,{value:e},t.children)},c="mdxType",f={inlineCode:"code",wrapper:function(t){var e=t.children;return r.createElement(r.Fragment,{},e)}},d=r.forwardRef((function(t,e){var a=t.components,n=t.mdxType,l=t.originalType,o=t.parentName,s=i(t,["components","mdxType","originalType","parentName"]),c=p(a),d=n,u=c["".concat(o,".").concat(d)]||c[d]||f[d]||l;return a?r.createElement(u,k(k({ref:e},s),{},{components:a})):r.createElement(u,k({ref:e},s))}));function u(t,e){var a=arguments,n=e&&e.mdxType;if("string"==typeof t||n){var l=a.length,k=new Array(l);k[0]=d;var i={};for(var o in e)hasOwnProperty.call(e,o)&&(i[o]=e[o]);i.originalType=t,i[c]="string"==typeof t?t:n,k[1]=i;for(var p=2;p<l;p++)k[p]=a[p];return r.createElement.apply(null,k)}return r.createElement.apply(null,a)}d.displayName="MDXCreateElement"},9685:(t,e,a)=>{a.r(e),a.d(e,{assets:()=>o,contentTitle:()=>k,default:()=>f,frontMatter:()=>l,metadata:()=>i,toc:()=>p});var r=a(7462),n=(a(7294),a(3905));const l={},k=void 0,i={unversionedId:"api/fastkafka/testing/ApacheKafkaBroker",id:"api/fastkafka/testing/ApacheKafkaBroker",title:"ApacheKafkaBroker",description:"fastkafka.testing.ApacheKafkaBroker {fastkafka.testing.ApacheKafkaBroker}",source:"@site/docs/api/fastkafka/testing/ApacheKafkaBroker.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/ApacheKafkaBroker",permalink:"/docs/next/api/fastkafka/testing/ApacheKafkaBroker",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"SequentialExecutor",permalink:"/docs/next/api/fastkafka/executors/SequentialExecutor"},next:{title:"LocalRedpandaBroker",permalink:"/docs/next/api/fastkafka/testing/LocalRedpandaBroker"}},o={},p=[{value:"fastkafka.testing.ApacheKafkaBroker",id:"fastkafka.testing.ApacheKafkaBroker",level:2},{value:"<strong>init</strong>",id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.init",level:3},{value:"get_service_config_string",id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.get_service_config_string",level:3},{value:"is_started",id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.is_started",level:3},{value:"start",id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.start",level:3},{value:"stop",id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.stop",level:3}],s={toc:p},c="wrapper";function f(t){let{components:e,...a}=t;return(0,n.kt)(c,(0,r.Z)({},s,a,{components:e,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"fastkafka.testing.ApacheKafkaBroker"},"fastkafka.testing.ApacheKafkaBroker"),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_testing/apache_kafka_broker.py#L168-L305",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("p",null,"ApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing."),(0,n.kt)("h3",{id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.init"},(0,n.kt)("strong",{parentName:"h3"},"init")),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_testing/apache_kafka_broker.py#L173-L209",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n self,\n topics=[],\n retries=3,\n apply_nest_asyncio=False,\n zookeeper_port=2181,\n listener_port=9092,\n)\n")),(0,n.kt)("p",null,"Initialises the ApacheKafkaBroker object"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Name"),(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"),(0,n.kt)("th",{parentName:"tr",align:null},"Default"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"topics")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"Iterable[str]")),(0,n.kt)("td",{parentName:"tr",align:null},"List of topics to create after sucessfull Kafka broker startup"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"[]"))),(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"retries")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"int")),(0,n.kt)("td",{parentName:"tr",align:null},"Number of retries to create kafka and zookeeper services using random"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"3"))),(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"apply_nest_asyncio")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"bool")),(0,n.kt)("td",{parentName:"tr",align:null},"set to True if running in notebook"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"False"))),(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"zookeeper_port")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"int")),(0,n.kt)("td",{parentName:"tr",align:null},"Port for clients (Kafka brokes) to connect"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"2181"))),(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"listener_port")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"int")),(0,n.kt)("td",{parentName:"tr",align:null},"Port on which the clients (producers and consumers) can connect"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"9092"))))),(0,n.kt)("h3",{id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.get_service_config_string"},"get_service_config_string"),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_testing/apache_kafka_broker.py#L459-L475",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-py"},"get_service_config_string(\n self, service, data_dir\n)\n")),(0,n.kt)("p",null,"Gets the configuration string for a service."),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Name"),(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"),(0,n.kt)("th",{parentName:"tr",align:null},"Default"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"service")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"str")),(0,n.kt)("td",{parentName:"tr",align:null},'Name of the service ("kafka" or "zookeeper").'),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("em",{parentName:"td"},"required"))),(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"data_dir")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"Path")),(0,n.kt)("td",{parentName:"tr",align:null},"Path to the directory where the service will save data."),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("em",{parentName:"td"},"required"))))),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"str")),(0,n.kt)("td",{parentName:"tr",align:null},"The service configuration string.")))),(0,n.kt)("h3",{id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.is_started"},"is_started"),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_testing/apache_kafka_broker.py#L212-L222",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-py"},"@property\nis_started(\n self\n)\n")),(0,n.kt)("p",null,"Property indicating whether the ApacheKafkaBroker object is started."),(0,n.kt)("p",null,"The is_started property indicates if the ApacheKafkaBroker object is currently\nin a started state. This implies that Zookeeper and Kafka broker processes have\nsucesfully started and are ready for handling events."),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"bool")),(0,n.kt)("td",{parentName:"tr",align:null},"True if the object is started, False otherwise.")))),(0,n.kt)("h3",{id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.start"},"start"),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_testing/apache_kafka_broker.py#L624-L664",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-py"},"start(\n self\n)\n")),(0,n.kt)("p",null,"Starts a local Kafka broker and ZooKeeper instance synchronously."),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"str")),(0,n.kt)("td",{parentName:"tr",align:null},"The Kafka broker bootstrap server address in string format: host:port.")))),(0,n.kt)("h3",{id:"fastkafka._testing.apache_kafka_broker.ApacheKafkaBroker.stop"},"stop"),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_testing/apache_kafka_broker.py#L668-L680",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-py"},"stop(\n self\n)\n")),(0,n.kt)("p",null,"Stops a local kafka broker and zookeeper instance synchronously"))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/516ebbd1.b21303ee.js b/assets/js/516ebbd1.b21303ee.js new file mode 100644 index 0000000..fc22e1c --- /dev/null +++ b/assets/js/516ebbd1.b21303ee.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4095],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?a(Object(n),!0).forEach((function(t){o(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):a(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function l(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var s=r.createContext({}),u=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=u(e.components);return r.createElement(s.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,s=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),p=u(n),k=o,d=p["".concat(s,".").concat(k)]||p[k]||f[k]||a;return n?r.createElement(d,i(i({ref:t},c),{},{components:n})):r.createElement(d,i({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=k;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:o,i[1]=l;for(var u=2;u<a;u++)i[u]=n[u];return r.createElement.apply(null,i)}return r.createElement.apply(null,n)}k.displayName="MDXCreateElement"},5048:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>f,frontMatter:()=>a,metadata:()=>l,toc:()=>u});var r=n(7462),o=(n(7294),n(3905));const a={},i="Intro",l={unversionedId:"guides/Guide_01_Intro",id:"guides/Guide_01_Intro",title:"Intro",description:"This tutorial will show you how to use FastKafkaAPI, step by",source:"@site/docs/guides/Guide_01_Intro.md",sourceDirName:"guides",slug:"/guides/Guide_01_Intro",permalink:"/docs/next/guides/Guide_01_Intro",draft:!1,tags:[],version:"current",frontMatter:{}},s={},u=[{value:"Installing FastKafkaAPI",id:"installing-fastkafkaapi",level:2},{value:"Preparing a Kafka broker",id:"preparing-a-kafka-broker",level:2},{value:"Running the code",id:"running-the-code",level:2}],c={toc:u},p="wrapper";function f(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"intro"},"Intro"),(0,o.kt)("p",null,"This tutorial will show you how to use ",(0,o.kt)("b",null,"FastKafkaAPI"),", step by\nstep."),(0,o.kt)("p",null,"The goal of FastKafkaAPI is to simplify the use of Apache Kafka in\nPython inspired by FastAPI look and feel."),(0,o.kt)("p",null,"In this Intro tutorial we\u2019ll go trough the basic requirements to run the\ndemos presented in future steps."),(0,o.kt)("h2",{id:"installing-fastkafkaapi"},"Installing FastKafkaAPI"),(0,o.kt)("p",null,"First step is to install FastKafkaAPI"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"$ pip install fastkafka\n")),(0,o.kt)("h2",{id:"preparing-a-kafka-broker"},"Preparing a Kafka broker"),(0,o.kt)("p",null,"Next step is to prepare the Kafka environment, our consumers and\nproducers will need some channel of communication."),(0,o.kt)("p",null,'!!! info "Hey, your first info!"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. \n")),(0,o.kt)("p",null,"To go through the tutorial, we recommend that you use dockerized Kafka\nbrokers, if you have Docker and docker-compose installed the setup\nshould take you no time (if we exclude the container download times)."),(0,o.kt)("p",null,'!!! warning "Listen! This is important."'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},'To be able to setup this configuration you need to have Docker and docker-compose installed\n\nSee here for more info on <a href = \\"https://docs.docker.com/\\" target=\\"_blank\\">Docker</a> and <a href = \\"https://docs.docker.com/compose/install/\\" target=\\"_blank\\">docker compose</a>\n')),(0,o.kt)("p",null,"To setup the recommended environment, first, create a new folder wher\nyou want to save your demo files (e.g.\xa0fastkafka_demo). Inside the new\nfolder create a new YAML file named ",(0,o.kt)("b",null,"kafka_demo.yml")," and copy the\nfollowing configuration into it:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'version: "3"\nservices:\n zookeeper:\n image: wurstmeister/zookeeper\n hostname: zookeeper\n container_name: zookeeper\n networks:\n - fastkafka-network\n ports:\n - "2181:2181"\n - "22:22"\n - "2888:2888"\n - "3888:3888"\n kafka:\n image: wurstmeister/kafka\n container_name: kafka\n ports:\n - "9093:9093"\n environment:\n HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d\' \' -f 2"\n KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"\n KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT\n KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093\n KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093\n KAFKA_INTER_BROKER_LISTENER_NAME: INTER\n KAFKA_CREATE_TOPICS: "hello:1:1"\n volumes:\n - /var/run/docker.sock:/var/run/docker.sock\n depends_on:\n - zookeeper\n healthcheck:\n test: [ "CMD", "kafka-topics.sh", "--list", "--zookeeper", "zookeeper:2181" ]\n interval: 5s\n timeout: 10s\n retries: 5\n networks:\n - fastkafka-network\nnetworks:\n fastkafka-network:\n name: "fastkafka-network"\n')),(0,o.kt)("p",null,"This configuration will start a single instance of Zookeeper, single\ninstance of Kafka broker and create a \u2018hello\u2019 topic (quite enough for a\nstart). To start the configuration, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"$ docker-compose -f kafka_demo.yaml up -d --wait\n")),(0,o.kt)("p",null,"This will start the necessary containers and wait till they report that\nthey are Healthy. After the command finishes, you are good to go to try\nout the FastKafkaAPI capabilities! \ud83c\udf8a"),(0,o.kt)("h2",{id:"running-the-code"},"Running the code"),(0,o.kt)("p",null,"After installing FastKafkaAPI and initialising the Kafka broker you can\nproceed to the \u2018First Steps\u2019 part of the tutorial. There, you will write\nyour first Kafka client and producer apps, run them, and interact with\nthem."),(0,o.kt)("p",null,"You are highly encouraged to follow along the tutorials not just by\nreading trough them but by implementing the code examples in your own\nenvironment. This will not only help you remember the use cases better\nbut also, hopefully, demonstrate to you the ease of use of this library."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/5300e879.8a8dc6e5.js b/assets/js/5300e879.8a8dc6e5.js new file mode 100644 index 0000000..3420000 --- /dev/null +++ b/assets/js/5300e879.8a8dc6e5.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7600],{3905:(e,a,o)=>{o.d(a,{Zo:()=>c,kt:()=>d});var t=o(7294);function n(e,a,o){return a in e?Object.defineProperty(e,a,{value:o,enumerable:!0,configurable:!0,writable:!0}):e[a]=o,e}function s(e,a){var o=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),o.push.apply(o,t)}return o}function r(e){for(var a=1;a<arguments.length;a++){var o=null!=arguments[a]?arguments[a]:{};a%2?s(Object(o),!0).forEach((function(a){n(e,a,o[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(o)):s(Object(o)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(o,a))}))}return e}function l(e,a){if(null==e)return{};var o,t,n=function(e,a){if(null==e)return{};var o,t,n={},s=Object.keys(e);for(t=0;t<s.length;t++)o=s[t],a.indexOf(o)>=0||(n[o]=e[o]);return n}(e,a);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(t=0;t<s.length;t++)o=s[t],a.indexOf(o)>=0||Object.prototype.propertyIsEnumerable.call(e,o)&&(n[o]=e[o])}return n}var i=t.createContext({}),p=function(e){var a=t.useContext(i),o=a;return e&&(o="function"==typeof e?e(a):r(r({},a),e)),o},c=function(e){var a=p(e.components);return t.createElement(i.Provider,{value:a},e.children)},m="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},k=t.forwardRef((function(e,a){var o=e.components,n=e.mdxType,s=e.originalType,i=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),m=p(o),k=n,d=m["".concat(i,".").concat(k)]||m[k]||u[k]||s;return o?t.createElement(d,r(r({ref:a},c),{},{components:o})):t.createElement(d,r({ref:a},c))}));function d(e,a){var o=arguments,n=a&&a.mdxType;if("string"==typeof e||n){var s=o.length,r=new Array(s);r[0]=k;var l={};for(var i in a)hasOwnProperty.call(a,i)&&(l[i]=a[i]);l.originalType=e,l[m]="string"==typeof e?e:n,r[1]=l;for(var p=2;p<s;p++)r[p]=o[p];return t.createElement.apply(null,r)}return t.createElement.apply(null,o)}k.displayName="MDXCreateElement"},1171:(e,a,o)=>{o.r(a),o.d(a,{assets:()=>i,contentTitle:()=>r,default:()=>u,frontMatter:()=>s,metadata:()=>l,toc:()=>p});var t=o(7462),n=(o(7294),o(3905));const s={},r="@consumes basics",l={unversionedId:"guides/Guide_11_Consumes_Basics",id:"version-0.8.0/guides/Guide_11_Consumes_Basics",title:"@consumes basics",description:"You can use @consumes decorator to consume messages from Kafka topics.",source:"@site/versioned_docs/version-0.8.0/guides/Guide_11_Consumes_Basics.md",sourceDirName:"guides",slug:"/guides/Guide_11_Consumes_Basics",permalink:"/docs/guides/Guide_11_Consumes_Basics",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"FastKafka",permalink:"/docs/"},next:{title:"Batch consuming",permalink:"/docs/guides/Guide_12_Batch_Consuming"}},i={},p=[{value:"Import <code>FastKafka</code>",id:"import-fastkafka",level:2},{value:"Define the structure of the messages",id:"define-the-structure-of-the-messages",level:2},{value:"Create a base FastKafka app",id:"create-a-base-fastkafka-app",level:2},{value:"Create a consumer function and decorate it with <code>@consumes</code>",id:"create-a-consumer-function-and-decorate-it-with-consumes",level:2},{value:"Final app",id:"final-app",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Send the message to kafka topic",id:"send-the-message-to-kafka-topic",level:2},{value:"Choosing a topic",id:"choosing-a-topic",level:2},{value:"Message data",id:"message-data",level:2},{value:"Message metadata",id:"message-metadata",level:2},{value:"Create a consumer function with metadata",id:"create-a-consumer-function-with-metadata",level:3},{value:"Dealing with high latency consuming functions",id:"dealing-with-high-latency-consuming-functions",level:2}],c={toc:p},m="wrapper";function u(e){let{components:a,...o}=e;return(0,n.kt)(m,(0,t.Z)({},c,o,{components:a,mdxType:"MDXLayout"}),(0,n.kt)("h1",{id:"consumes-basics"},"@consumes basics"),(0,n.kt)("p",null,"You can use ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator to consume messages from Kafka topics."),(0,n.kt)("p",null,"In this guide we will create a simple FastKafka app that will consume\n",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages from hello_world topic."),(0,n.kt)("h2",{id:"import-fastkafka"},"Import ",(0,n.kt)("a",{parentName:"h2",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,n.kt)("inlineCode",{parentName:"a"},"FastKafka"))),(0,n.kt)("p",null,"To use the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator, first we need to import the base\nFastKafka app to create our application."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka import FastKafka\n")),(0,n.kt)("p",null,"In this demo we will log the messages to the output so that we can\ninspect and verify that our app is consuming properly. For that we need\nto import the logger."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n")),(0,n.kt)("h2",{id:"define-the-structure-of-the-messages"},"Define the structure of the messages"),(0,n.kt)("p",null,"Next, you need to define the structure of the messages you want to\nconsume from the topic using ",(0,n.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"pydantic"),". For\nthe guide we\u2019ll stick to something basic, but you are free to define any\ncomplex message structure you wish in your project, just make sure it\ncan be JSON encoded."),(0,n.kt)("p",null,"Let\u2019s import ",(0,n.kt)("inlineCode",{parentName:"p"},"BaseModel")," and ",(0,n.kt)("inlineCode",{parentName:"p"},"Field")," from pydantic and create a simple\n",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class containing one string parameter ",(0,n.kt)("inlineCode",{parentName:"p"},"msg")),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},"from pydantic import BaseModel, Field\n")),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'class HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n')),(0,n.kt)("h2",{id:"create-a-base-fastkafka-app"},"Create a base FastKafka app"),(0,n.kt)("p",null,"Now we will create and define a base FastKafka app, replace the\n",(0,n.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,n.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values of your\nKafka bootstrap server"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'kafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n')),(0,n.kt)("h2",{id:"create-a-consumer-function-and-decorate-it-with-consumes"},"Create a consumer function and decorate it with ",(0,n.kt)("inlineCode",{parentName:"h2"},"@consumes")),(0,n.kt)("p",null,"Let\u2019s create a consumer function that will consume ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages\nfrom ",(0,n.kt)("em",{parentName:"p"},"hello_world")," topic and log them."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("p",null,"The function decorated with the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will be called\nwhen a message is produced to Kafka."),(0,n.kt)("p",null,"The message will then be injected into the typed ",(0,n.kt)("em",{parentName:"p"},"msg")," argument of the\nfunction and its type will be used to parse the message."),(0,n.kt)("p",null,"In this example case, when the message is sent into a ",(0,n.kt)("em",{parentName:"p"},"hello_world"),"\ntopic, it will be parsed into a HelloWorld class and ",(0,n.kt)("inlineCode",{parentName:"p"},"on_hello_world"),"\nfunction will be called with the parsed class as ",(0,n.kt)("em",{parentName:"p"},"msg")," argument value."),(0,n.kt)("h2",{id:"final-app"},"Final app"),(0,n.kt)("p",null,"Your app code should look like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("h2",{id:"run-the-app"},"Run the app"),(0,n.kt)("p",null,"Now we can run the app. Copy the code above in consumer_example.py and\nrun it by running"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n")),(0,n.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre"},"[14442]: 23-06-15 07:16:00.564 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[14442]: 23-06-15 07:16:00.564 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n[14442]: 23-06-15 07:16:00.577 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[14442]: 23-06-15 07:16:00.577 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n[14442]: 23-06-15 07:16:00.577 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n[14442]: 23-06-15 07:16:00.577 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[14442]: 23-06-15 07:16:00.585 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \nStarting process cleanup, this may take a few seconds...\n23-06-15 07:16:04.626 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 14442...\n[14442]: 23-06-15 07:16:05.735 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[14442]: 23-06-15 07:16:05.735 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-15 07:16:05.853 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 14442 terminated.\n")),(0,n.kt)("h2",{id:"send-the-message-to-kafka-topic"},"Send the message to kafka topic"),(0,n.kt)("p",null,"Lets send a ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message to the ",(0,n.kt)("em",{parentName:"p"},"hello_world")," topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-shell"},'echo { \\"msg\\": \\"Hello world\\" } | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n')),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre"},"[15588]: 23-06-15 07:16:15.282 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[15588]: 23-06-15 07:16:15.282 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n[15588]: 23-06-15 07:16:15.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[15588]: 23-06-15 07:16:15.294 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n[15588]: 23-06-15 07:16:15.295 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n[15588]: 23-06-15 07:16:15.295 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[15588]: 23-06-15 07:16:15.302 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n[15588]: 23-06-15 07:16:25.867 [INFO] consumer_example: Got msg: msg='Hello world'\nStarting process cleanup, this may take a few seconds...\n23-06-15 07:16:34.168 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 15588...\n[15588]: 23-06-15 07:16:35.358 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[15588]: 23-06-15 07:16:35.359 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-15 07:16:35.475 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 15588 terminated.\n")),(0,n.kt)("p",null,"You should see the \u201cGot msg: msg='Hello world'\" being logged by your\nconsumer."),(0,n.kt)("h2",{id:"choosing-a-topic"},"Choosing a topic"),(0,n.kt)("p",null,"You probably noticed that you didn\u2019t define which topic you are\nreceiving the message from, this is because the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator\ndetermines the topic by default from your function name. The decorator\nwill take your function name and strip the default \u201con","_",'" prefix from it\nand use the rest as the topic name. In this example case, the topic is\n',(0,n.kt)("em",{parentName:"p"},"hello_world"),"."),(0,n.kt)("p",null,"You can choose your custom prefix by defining the ",(0,n.kt)("inlineCode",{parentName:"p"},"prefix")," parameter in\nconsumes decorator, like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes(prefix="read_from_")\nasync def read_from_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("p",null,"Also, you can define the topic name completely by defining the ",(0,n.kt)("inlineCode",{parentName:"p"},"topic"),"\nin parameter in consumes decorator, like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes(topic="my_special_topic")\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("h2",{id:"message-data"},"Message data"),(0,n.kt)("p",null,"The message received from kafka is translated from binary JSON\nrepresentation int the class defined by typing of ",(0,n.kt)("em",{parentName:"p"},"msg")," parameter in the\nfunction decorated by the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator."),(0,n.kt)("p",null,"In this example case, the message will be parsed into a ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld"),"\nclass."),(0,n.kt)("h2",{id:"message-metadata"},"Message metadata"),(0,n.kt)("p",null,"If you need any of Kafka message metadata such as timestamp, partition\nor headers you can access the metadata by adding a EventMetadata typed\nargument to your consumes function and the metadata from the incoming\nmessage will be automatically injected when calling the consumes\nfunction."),(0,n.kt)("p",null,"Let\u2019s demonstrate that."),(0,n.kt)("h3",{id:"create-a-consumer-function-with-metadata"},"Create a consumer function with metadata"),(0,n.kt)("p",null,"The only difference from the original basic consume function is that we\nare now passing the ",(0,n.kt)("inlineCode",{parentName:"p"},"meta: EventMetadata")," argument to the function. The\n",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will register that and, when a message is\nconsumed, it will also pass the metadata to your function. Now you can\nuse the metadata in your consume function. Lets log it to see what it\ncontains."),(0,n.kt)("p",null,"First, we need to import the EventMetadata"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka import EventMetadata\n")),(0,n.kt)("p",null,"Now we can add the ",(0,n.kt)("inlineCode",{parentName:"p"},"meta")," argument to our consuming function."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes()\nasync def on_hello_world(msg: HelloWorld, meta: EventMetadata):\n logger.info(f"Got metadata: {meta}")\n')),(0,n.kt)("p",null,"Your final app should look like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka import EventMetadata\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld, meta: EventMetadata):\n logger.info(f"Got metadata: {meta}")\n')),(0,n.kt)("p",null,"Now lets run the app and send a message to the broker to see the logged\nmessage metadata."),(0,n.kt)("p",null,"You should see a similar log as the one below and the metadata being\nlogged in your app."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre"},"[20050]: 23-06-15 07:18:55.661 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[20050]: 23-06-15 07:18:55.661 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n[20050]: 23-06-15 07:18:55.675 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[20050]: 23-06-15 07:18:55.675 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n[20050]: 23-06-15 07:18:55.675 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n[20050]: 23-06-15 07:18:55.675 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[20050]: 23-06-15 07:18:55.682 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n[20050]: 23-06-15 07:19:06.337 [INFO] consumer_example: Got metadata: EventMetadata(topic='hello_world', partition=0, offset=0, timestamp=1686813546255, timestamp_type=0, key=None, value=b'{ \"msg\": \"Hello world\" }', checksum=None, serialized_key_size=-1, serialized_value_size=24, headers=())\nStarting process cleanup, this may take a few seconds...\n23-06-15 07:19:14.547 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 20050...\n[20050]: 23-06-15 07:19:15.630 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[20050]: 23-06-15 07:19:15.630 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-15 07:19:15.742 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 20050 terminated.\n")),(0,n.kt)("p",null,"As you can see in the log, from the metadata you now have the\ninformation about the partition, offset, timestamp, key and headers.\n\ud83c\udf89"),(0,n.kt)("h2",{id:"dealing-with-high-latency-consuming-functions"},"Dealing with high latency consuming functions"),(0,n.kt)("p",null,"If your functions have high latency due to, for example, lengthy\ndatabase calls you will notice a big decrease in performance. This is\ndue to the issue of how the consumes decorator executes your consume\nfunctions when consuming events. By default, the consume function will\nrun the consuming funtions for one topic sequentially, this is the most\nstraightforward approach and results with the least amount of overhead."),(0,n.kt)("p",null,"But, to handle those high latency tasks and run them in parallel,\nFastKafka has a\n",(0,n.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/executors/DynamicTaskExecutor#fastkafka.executors.DynamicTaskExecutor"},(0,n.kt)("inlineCode",{parentName:"a"},"DynamicTaskExecutor")),"\nprepared for your consumers. This executor comes with additional\noverhead, so use it only when you need to handle high latency functions."),(0,n.kt)("p",null,"Lets demonstrate how to use it."),(0,n.kt)("p",null,"To your consumes decorator, add an ",(0,n.kt)("inlineCode",{parentName:"p"},"executor")," option and set it to\n",(0,n.kt)("inlineCode",{parentName:"p"},'"DynamicTaskExecutor"'),", this will enable the consumer to handle high\nlatency functions effectively."),(0,n.kt)("p",null,"Your consuming function should now look like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes(executor="DynamicTaskExecutor")\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("p",null,"And the complete app should now look like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.consumes(executor="DynamicTaskExecutor")\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("p",null,"You can now run your app using the CLI commands described in this guide."),(0,n.kt)("p",null,"Lets send a ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message to the ",(0,n.kt)("em",{parentName:"p"},"hello_world")," topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-shell"},'echo { \\"msg\\": \\"Hello world\\" } | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n')),(0,n.kt)("p",null,"You should see a similar log as the one below."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre"},"[21539]: 23-06-15 07:19:25.135 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[21539]: 23-06-15 07:19:25.135 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n[21539]: 23-06-15 07:19:25.147 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[21539]: 23-06-15 07:19:25.147 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n[21539]: 23-06-15 07:19:25.147 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n[21539]: 23-06-15 07:19:25.147 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[21539]: 23-06-15 07:19:25.154 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n[21539]: 23-06-15 07:19:35.512 [INFO] consumer_example: Got msg: msg='Hello world'\nStarting process cleanup, this may take a few seconds...\n23-06-15 07:19:44.023 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 21539...\n[21539]: 23-06-15 07:19:45.202 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[21539]: 23-06-15 07:19:45.203 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-15 07:19:45.313 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 21539 terminated.\n")),(0,n.kt)("p",null,"Inside the log, you should see the \u201cGot msg: msg='Hello world'\" being\nlogged by your consumer."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/5347168a.4d2a41c3.js b/assets/js/5347168a.4d2a41c3.js new file mode 100644 index 0000000..8fcb031 --- /dev/null +++ b/assets/js/5347168a.4d2a41c3.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[424],{3905:(t,e,a)=>{a.d(e,{Zo:()=>k,kt:()=>d});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function o(t){for(var e=1;e<arguments.length;e++){var a=null!=arguments[e]?arguments[e]:{};e%2?l(Object(a),!0).forEach((function(e){r(t,e,a[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(a)):l(Object(a)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(a,e))}))}return t}function i(t,e){if(null==t)return{};var a,n,r=function(t,e){if(null==t)return{};var a,n,r={},l=Object.keys(t);for(n=0;n<l.length;n++)a=l[n],e.indexOf(a)>=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n<l.length;n++)a=l[n],e.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var s=n.createContext({}),c=function(t){var e=n.useContext(s),a=e;return t&&(a="function"==typeof t?t(e):o(o({},e),t)),a},k=function(t){var e=c(t.components);return n.createElement(s.Provider,{value:e},t.children)},u="mdxType",p={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},m=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,s=t.parentName,k=i(t,["components","mdxType","originalType","parentName"]),u=c(a),m=r,d=u["".concat(s,".").concat(m)]||u[m]||p[m]||l;return a?n.createElement(d,o(o({ref:e},k),{},{components:a})):n.createElement(d,o({ref:e},k))}));function d(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,o=new Array(l);o[0]=m;var i={};for(var s in e)hasOwnProperty.call(e,s)&&(i[s]=e[s]);i.originalType=t,i[u]="string"==typeof t?t:r,o[1]=i;for(var c=2;c<l;c++)o[c]=a[c];return n.createElement.apply(null,o)}return n.createElement.apply(null,a)}m.displayName="MDXCreateElement"},4540:(t,e,a)=>{a.r(e),a.d(e,{assets:()=>s,contentTitle:()=>o,default:()=>p,frontMatter:()=>l,metadata:()=>i,toc:()=>c});var n=a(7462),r=(a(7294),a(3905));const l={},o=void 0,i={unversionedId:"api/fastkafka/executors/DynamicTaskExecutor",id:"api/fastkafka/executors/DynamicTaskExecutor",title:"DynamicTaskExecutor",description:"fastkafka.executors.DynamicTaskExecutor {fastkafka.executors.DynamicTaskExecutor}",source:"@site/docs/api/fastkafka/executors/DynamicTaskExecutor.md",sourceDirName:"api/fastkafka/executors",slug:"/api/fastkafka/executors/DynamicTaskExecutor",permalink:"/docs/next/api/fastkafka/executors/DynamicTaskExecutor",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"json_encoder",permalink:"/docs/next/api/fastkafka/encoder/json_encoder"},next:{title:"SequentialExecutor",permalink:"/docs/next/api/fastkafka/executors/SequentialExecutor"}},s={},c=[{value:"fastkafka.executors.DynamicTaskExecutor",id:"fastkafka.executors.DynamicTaskExecutor",level:2},{value:"<strong>init</strong>",id:"fastkafka._components.task_streaming.DynamicTaskExecutor.init",level:3},{value:"run",id:"fastkafka._components.task_streaming.DynamicTaskExecutor.run",level:3}],k={toc:c},u="wrapper";function p(t){let{components:e,...a}=t;return(0,r.kt)(u,(0,n.Z)({},k,a,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.executors.DynamicTaskExecutor"},"fastkafka.executors.DynamicTaskExecutor"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/task_streaming.py#L207-L272",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("p",null,"A class that implements a dynamic task executor for processing consumer records."),(0,r.kt)("p",null,"The DynamicTaskExecutor class extends the StreamExecutor class and provides functionality\nfor running a tasks in parallel using asyncio.Task."),(0,r.kt)("h3",{id:"fastkafka._components.task_streaming.DynamicTaskExecutor.init"},(0,r.kt)("strong",{parentName:"h3"},"init")),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/task_streaming.py#L214-L237",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n self, throw_exceptions=False, max_buffer_size=100000, size=100000\n)\n")),(0,r.kt)("p",null,"Create an instance of DynamicTaskExecutor"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"throw_exceptions")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bool")),(0,r.kt)("td",{parentName:"tr",align:null},"Flag indicating whether exceptions should be thrown ot logged.Defaults to False."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"False"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_buffer_size")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum buffer size for the memory object stream.Defaults to 100_000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"size")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"Size of the task pool. Defaults to 100_000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100000"))))),(0,r.kt)("h3",{id:"fastkafka._components.task_streaming.DynamicTaskExecutor.run"},"run"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/task_streaming.py#L239-L272",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"run(\n self, is_shutting_down_f, generator, processor\n)\n")),(0,r.kt)("p",null,"Runs the dynamic task executor."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"is_shutting_down_f")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[], bool]")),(0,r.kt)("td",{parentName:"tr",align:null},"Function to check if the executor is shutting down."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"generator")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Generator function for retrieving consumer records."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"processor")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Processor function for processing consumer records."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))))}p.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/5527e5b7.a5884eda.js b/assets/js/5527e5b7.a5884eda.js new file mode 100644 index 0000000..cef4af1 --- /dev/null +++ b/assets/js/5527e5b7.a5884eda.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3114],{3905:(t,e,a)=>{a.d(e,{Zo:()=>k,kt:()=>c});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function i(t){for(var e=1;e<arguments.length;e++){var a=null!=arguments[e]?arguments[e]:{};e%2?l(Object(a),!0).forEach((function(e){r(t,e,a[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(a)):l(Object(a)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(a,e))}))}return t}function p(t,e){if(null==t)return{};var a,n,r=function(t,e){if(null==t)return{};var a,n,r={},l=Object.keys(t);for(n=0;n<l.length;n++)a=l[n],e.indexOf(a)>=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n<l.length;n++)a=l[n],e.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var d=n.createContext({}),o=function(t){var e=n.useContext(d),a=e;return t&&(a="function"==typeof t?t(e):i(i({},e),t)),a},k=function(t){var e=o(t.components);return n.createElement(d.Provider,{value:e},t.children)},m="mdxType",u={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},s=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,d=t.parentName,k=p(t,["components","mdxType","originalType","parentName"]),m=o(a),s=r,c=m["".concat(d,".").concat(s)]||m[s]||u[s]||l;return a?n.createElement(c,i(i({ref:e},k),{},{components:a})):n.createElement(c,i({ref:e},k))}));function c(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,i=new Array(l);i[0]=s;var p={};for(var d in e)hasOwnProperty.call(e,d)&&(p[d]=e[d]);p.originalType=t,p[m]="string"==typeof t?t:r,i[1]=p;for(var o=2;o<l;o++)i[o]=a[o];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}s.displayName="MDXCreateElement"},8979:(t,e,a)=>{a.r(e),a.d(e,{assets:()=>d,contentTitle:()=>i,default:()=>u,frontMatter:()=>l,metadata:()=>p,toc:()=>o});var n=a(7462),r=(a(7294),a(3905));const l={},i=void 0,p={unversionedId:"api/fastkafka/EventMetadata",id:"version-0.8.0/api/fastkafka/EventMetadata",title:"EventMetadata",description:"fastkafka.EventMetadata {fastkafka.EventMetadata}",source:"@site/versioned_docs/version-0.8.0/api/fastkafka/EventMetadata.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/EventMetadata",permalink:"/docs/api/fastkafka/EventMetadata",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Benchmarking FastKafka app",permalink:"/docs/guides/Guide_06_Benchmarking_FastKafka"},next:{title:"FastKafka",permalink:"/docs/api/fastkafka/"}},d={},o=[{value:"fastkafka.EventMetadata",id:"fastkafka.EventMetadata",level:2},{value:"create_event_metadata",id:"fastkafka._components.aiokafka_consumer_loop.EventMetadata.create_event_metadata",level:3}],k={toc:o},m="wrapper";function u(t){let{components:e,...a}=t;return(0,r.kt)(m,(0,n.Z)({},k,a,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.EventMetadata"},"fastkafka.EventMetadata"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/aiokafka_consumer_loop.py#L27-L77",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("p",null,"A class for encapsulating Kafka record metadata."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"topic")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"The topic this record is received from"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"partition")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"The partition from which this record is received"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"offset")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"The position of this record in the corresponding Kafka partition"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"timestamp")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"The timestamp of this record"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"timestamp_type")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"The timestamp type of this record"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[bytes]")),(0,r.kt)("td",{parentName:"tr",align:null},"The key (or ",(0,r.kt)("inlineCode",{parentName:"td"},"None")," if no key is specified)"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"value")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[bytes]")),(0,r.kt)("td",{parentName:"tr",align:null},"The value"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"serialized_key_size")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"The size of the serialized, uncompressed key in bytes"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"serialized_value_size")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"The size of the serialized, uncompressed value in bytes"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"headers")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Sequence[Tuple[str, bytes]]")),(0,r.kt)("td",{parentName:"tr",align:null},"The headers"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("h3",{id:"fastkafka._components.aiokafka_consumer_loop.EventMetadata.create_event_metadata"},"create_event_metadata"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/aiokafka_consumer_loop.py#L56-L77",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"@staticmethod\ncreate_event_metadata(\n record\n)\n")),(0,r.kt)("p",null,"Creates an instance of EventMetadata from a ConsumerRecord."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"record")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ConsumerRecord")),(0,r.kt)("td",{parentName:"tr",align:null},"The Kafka ConsumerRecord."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"EventMetadata")),(0,r.kt)("td",{parentName:"tr",align:null},"The created EventMetadata instance.")))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/5534c352.d3e76e0e.js b/assets/js/5534c352.d3e76e0e.js new file mode 100644 index 0000000..e5767fb --- /dev/null +++ b/assets/js/5534c352.d3e76e0e.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7229],{3905:(t,e,a)=>{a.d(e,{Zo:()=>u,kt:()=>c});var i=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function n(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(t);e&&(i=i.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,i)}return a}function s(t){for(var e=1;e<arguments.length;e++){var a=null!=arguments[e]?arguments[e]:{};e%2?n(Object(a),!0).forEach((function(e){r(t,e,a[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(a)):n(Object(a)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(a,e))}))}return t}function l(t,e){if(null==t)return{};var a,i,r=function(t,e){if(null==t)return{};var a,i,r={},n=Object.keys(t);for(i=0;i<n.length;i++)a=n[i],e.indexOf(a)>=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);for(i=0;i<n.length;i++)a=n[i],e.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var p=i.createContext({}),k=function(t){var e=i.useContext(p),a=e;return t&&(a="function"==typeof t?t(e):s(s({},e),t)),a},u=function(t){var e=k(t.components);return i.createElement(p.Provider,{value:e},t.children)},o="mdxType",m={inlineCode:"code",wrapper:function(t){var e=t.children;return i.createElement(i.Fragment,{},e)}},h=i.forwardRef((function(t,e){var a=t.components,r=t.mdxType,n=t.originalType,p=t.parentName,u=l(t,["components","mdxType","originalType","parentName"]),o=k(a),h=r,c=o["".concat(p,".").concat(h)]||o[h]||m[h]||n;return a?i.createElement(c,s(s({ref:e},u),{},{components:a})):i.createElement(c,s({ref:e},u))}));function c(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var n=a.length,s=new Array(n);s[0]=h;var l={};for(var p in e)hasOwnProperty.call(e,p)&&(l[p]=e[p]);l.originalType=t,l[o]="string"==typeof t?t:r,s[1]=l;for(var k=2;k<n;k++)s[k]=a[k];return i.createElement.apply(null,s)}return i.createElement.apply(null,a)}h.displayName="MDXCreateElement"},885:(t,e,a)=>{a.r(e),a.d(e,{assets:()=>p,contentTitle:()=>s,default:()=>m,frontMatter:()=>n,metadata:()=>l,toc:()=>k});var i=a(7462),r=(a(7294),a(3905));const n={},s="Release notes",l={unversionedId:"CHANGELOG",id:"version-0.6.0/CHANGELOG",title:"Release notes",description:"0.6.0",source:"@site/versioned_docs/version-0.6.0/CHANGELOG.md",sourceDirName:".",slug:"/CHANGELOG",permalink:"/docs/0.6.0/CHANGELOG",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Contributing to fastkafka",permalink:"/docs/0.6.0/CONTRIBUTING"}},p={},k=[{value:"0.6.0",id:"060",level:2},{value:"New Features",id:"new-features",level:3},{value:"Bugs Squashed",id:"bugs-squashed",level:3},{value:"0.5.0",id:"050",level:2},{value:"New Features",id:"new-features-1",level:3},{value:"Bugs Squashed",id:"bugs-squashed-1",level:3},{value:"0.4.0",id:"040",level:2},{value:"New Features",id:"new-features-2",level:3},{value:"0.3.1",id:"031",level:2},{value:"0.3.0",id:"030",level:2},{value:"New Features",id:"new-features-3",level:3},{value:"Bugs Squashed",id:"bugs-squashed-2",level:3},{value:"0.2.3",id:"023",level:2},{value:"0.2.2",id:"022",level:2},{value:"New Features",id:"new-features-4",level:3},{value:"Bugs Squashed",id:"bugs-squashed-3",level:3},{value:"0.2.0",id:"020",level:2},{value:"New Features",id:"new-features-5",level:3},{value:"Bugs Squashed",id:"bugs-squashed-4",level:3},{value:"0.1.3",id:"013",level:2},{value:"0.1.2",id:"012",level:2},{value:"New Features",id:"new-features-6",level:3},{value:"Bugs Squashed",id:"bugs-squashed-5",level:3},{value:"0.1.1",id:"011",level:2},{value:"Bugs Squashed",id:"bugs-squashed-6",level:3},{value:"0.1.0",id:"010",level:2}],u={toc:k},o="wrapper";function m(t){let{components:e,...a}=t;return(0,r.kt)(o,(0,i.Z)({},u,a,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"release-notes"},"Release notes"),(0,r.kt)("h2",{id:"060"},"0.6.0"),(0,r.kt)("h3",{id:"new-features"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Timestamps added to CLI commands (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/283"},"#283"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/davorrunje"},"@davorrunje"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Added option to process messages concurrently (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/278"},"#278"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"A new ",(0,r.kt)("inlineCode",{parentName:"li"},"executor")," option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add consumes and produces functions to app (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/274"},"#274"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add batching for producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/273"},"#273"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirement(batch): batch support is a real need! and i see it on the issue list.... so hope we do not need to wait too long"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("a",{parentName:"p",href:"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"},"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix broken links in guides (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/272"},"#272"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Generate the docusaurus sidebar dynamically by parsing summary.md (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/270"},"#270"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Metadata passed to consumer (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/269"},"#269"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirement(key): read the key value somehow..Maybe I missed something in the docs\nrequirement(header): read header values, Reason: I use CDC | Debezium and in the current system the header values are important to differentiate between the CRUD operations."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("a",{parentName:"p",href:"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"},"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Contribution with instructions how to build and test added (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/255"},"#255"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Export encoders, decoders from fastkafka.encoder (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/246"},"#246"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/239"},"#239"),")")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"UI Improvement: Post screenshots with links to the actual messages in testimonials section (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/228"},"#228"),")")),(0,r.kt)("h3",{id:"bugs-squashed"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Batch testing fix (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/280"},"#280"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Tester breaks when using Batching or KafkaEvent producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/279"},"#279"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Consumer loop callbacks are not executing in parallel (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/276"},"#276"),")"))),(0,r.kt)("h2",{id:"050"},"0.5.0"),(0,r.kt)("h3",{id:"new-features-1"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Significant speedup of Kafka producer (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/236"},"#236"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Added support for AVRO encoding/decoding (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/231"},"#231"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("h3",{id:"bugs-squashed-1"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fixed sidebar to include guides in docusaurus documentation (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/238"},"#238"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fixed link to symbols in docusaurus docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/227"},"#227"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Removed bootstrap servers from constructor (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/220"},"#220"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl")))),(0,r.kt)("h2",{id:"040"},"0.4.0"),(0,r.kt)("h3",{id:"new-features-2"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Integrate fastkafka chat (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/208"},"#208"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add benchmarking (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/206"},"#206"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Enable fast testing without running kafka locally (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/198"},"#198"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Generate docs using Docusaurus (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/194"},"#194"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add test cases for LocalRedpandaBroker (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/189"},"#189"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Reimplement patch and delegates from fastcore (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/188"},"#188"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Rename existing functions into start and stop and add lifespan handler (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/117"},"#117"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios"},"https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios"))))),(0,r.kt)("h2",{id:"031"},"0.3.1"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"README.md file updated")),(0,r.kt)("h2",{id:"030"},"0.3.0"),(0,r.kt)("h3",{id:"new-features-3"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Guide for fastkafka produces using partition key (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/172"},"#172"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Closes #161"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add support for Redpanda for testing and deployment (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/181"},"#181"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Remove bootstrap_servers from ",(0,r.kt)("strong",{parentName:"p"},"init")," and use the name of broker as an option when running/testing (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/134"},"#134"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add a GH action file to check for broken links in the docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/163"},"#163"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Optimize requirements for testing and docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/151"},"#151"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Break requirements into base and optional for testing and dev (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/124"},"#124"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Minimize base requirements needed just for running the service."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add link to example git repo into guide for building docs using actions (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/81"},"#81"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add logging for run_in_background (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/46"},"#46"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Implement partition Key mechanism for producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/16"},"#16"),")"))),(0,r.kt)("h3",{id:"bugs-squashed-2"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Implement checks for npm installation and version (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/176"},"#176"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Closes #158 by checking if the npx is installed and more verbose error handling"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix the helper.py link in CHANGELOG.md (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/165"},"#165"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"fastkafka docs install_deps fails (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/157"},"#157"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Unexpected internal error: ","[Errno 2]"," No such file or directory: 'npx'"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Broken links in docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/141"},"#141"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"fastkafka run is not showing up in CLI docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/132"},"#132"),")"))),(0,r.kt)("h2",{id:"023"},"0.2.3"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Fixed broken links on PyPi index page")),(0,r.kt)("h2",{id:"022"},"0.2.2"),(0,r.kt)("h3",{id:"new-features-4"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Extract JDK and Kafka installation out of LocalKafkaBroker (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/131"},"#131"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"PyYAML version relaxed (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/119"},"#119"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/davorrunje"},"@davorrunje"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Replace docker based kafka with local (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/68"},"#68"),")"),(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","replace docker compose with a simple docker run (standard run_jupyter.sh should do)"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","replace all tests to use LocalKafkaBroker"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","update documentation")))),(0,r.kt)("h3",{id:"bugs-squashed-3"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix broken link for FastKafka docs in index notebook (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/145"},"#145"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix encoding issues when loading setup.py on windows OS (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/135"},"#135"),")"))),(0,r.kt)("h2",{id:"020"},"0.2.0"),(0,r.kt)("h3",{id:"new-features-5"},"New Features"),(0,r.kt)("ul",{className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul"},"Replace kafka container with LocalKafkaBroker (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/112"},"#112"),")",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Replace kafka container with LocalKafkaBroker in tests"))))),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Remove kafka container from tests environment"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Fix failing tests")),(0,r.kt)("h3",{id:"bugs-squashed-4"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Fix random failing in CI (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/109"},"#109"),")")),(0,r.kt)("h2",{id:"013"},"0.1.3"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"version update in ",(0,r.kt)("strong",{parentName:"li"},"init"),".py")),(0,r.kt)("h2",{id:"012"},"0.1.2"),(0,r.kt)("h3",{id:"new-features-6"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Git workflow action for publishing Kafka docs (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/78"},"#78"),")")),(0,r.kt)("h3",{id:"bugs-squashed-5"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Include missing requirement (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/110"},"#110"),")",(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Typer is imported in this ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/helpers.py"},"file")," but it is not included in ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/settings.ini"},"settings.ini")),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add aiohttp which is imported in this ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_helpers.py"},"file")),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add nbformat which is imported in _components/helpers.py"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add nbconvert which is imported in _components/helpers.py")))),(0,r.kt)("h2",{id:"011"},"0.1.1"),(0,r.kt)("h3",{id:"bugs-squashed-6"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"JDK install fails on Python 3.8 (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/106"},"#106"),")")),(0,r.kt)("h2",{id:"010"},"0.1.0"),(0,r.kt)("p",null,"Initial release"))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/5584c47d.98379ebd.js b/assets/js/5584c47d.98379ebd.js new file mode 100644 index 0000000..7475194 --- /dev/null +++ b/assets/js/5584c47d.98379ebd.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8119],{3905:(e,r,t)=>{t.d(r,{Zo:()=>s,kt:()=>k});var a=t(7294);function n(e,r,t){return r in e?Object.defineProperty(e,r,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[r]=t,e}function o(e,r){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);r&&(a=a.filter((function(r){return Object.getOwnPropertyDescriptor(e,r).enumerable}))),t.push.apply(t,a)}return t}function c(e){for(var r=1;r<arguments.length;r++){var t=null!=arguments[r]?arguments[r]:{};r%2?o(Object(t),!0).forEach((function(r){n(e,r,t[r])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):o(Object(t)).forEach((function(r){Object.defineProperty(e,r,Object.getOwnPropertyDescriptor(t,r))}))}return e}function d(e,r){if(null==e)return{};var t,a,n=function(e,r){if(null==e)return{};var t,a,n={},o=Object.keys(e);for(a=0;a<o.length;a++)t=o[a],r.indexOf(t)>=0||(n[t]=e[t]);return n}(e,r);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)t=o[a],r.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(n[t]=e[t])}return n}var i=a.createContext({}),l=function(e){var r=a.useContext(i),t=r;return e&&(t="function"==typeof e?e(r):c(c({},r),e)),t},s=function(e){var r=l(e.components);return a.createElement(i.Provider,{value:r},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var r=e.children;return a.createElement(a.Fragment,{},r)}},u=a.forwardRef((function(e,r){var t=e.components,n=e.mdxType,o=e.originalType,i=e.parentName,s=d(e,["components","mdxType","originalType","parentName"]),p=l(t),u=n,k=p["".concat(i,".").concat(u)]||p[u]||f[u]||o;return t?a.createElement(k,c(c({ref:r},s),{},{components:t})):a.createElement(k,c({ref:r},s))}));function k(e,r){var t=arguments,n=r&&r.mdxType;if("string"==typeof e||n){var o=t.length,c=new Array(o);c[0]=u;var d={};for(var i in r)hasOwnProperty.call(r,i)&&(d[i]=r[i]);d.originalType=e,d[p]="string"==typeof e?e:n,c[1]=d;for(var l=2;l<o;l++)c[l]=t[l];return a.createElement.apply(null,c)}return a.createElement.apply(null,t)}u.displayName="MDXCreateElement"},1164:(e,r,t)=>{t.r(r),t.d(r,{assets:()=>i,contentTitle:()=>c,default:()=>f,frontMatter:()=>o,metadata:()=>d,toc:()=>l});var a=t(7462),n=(t(7294),t(3905));const o={},c=void 0,d={unversionedId:"api/fastkafka/encoder/avro_decoder",id:"version-0.7.0/api/fastkafka/encoder/avro_decoder",title:"avro_decoder",description:"fastkafka.encoder.avrodecoder {fastkafka.encoder.avrodecoder}",source:"@site/versioned_docs/version-0.7.0/api/fastkafka/encoder/avro_decoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avro_decoder",permalink:"/docs/0.7.0/api/fastkafka/encoder/avro_decoder",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"AvroBase",permalink:"/docs/0.7.0/api/fastkafka/encoder/AvroBase"},next:{title:"avro_encoder",permalink:"/docs/0.7.0/api/fastkafka/encoder/avro_encoder"}},i={},l=[{value:"<code>fastkafka.encoder.avro_decoder</code>",id:"fastkafka.encoder.avro_decoder",level:2},{value:"<code>avro_decoder</code>",id:"avro_decoder",level:3}],s={toc:l},p="wrapper";function f(e){let{components:r,...t}=e;return(0,n.kt)(p,(0,a.Z)({},s,t,{components:r,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"fastkafka.encoder.avro_decoder"},(0,n.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.avro_decoder")),(0,n.kt)("h3",{id:"avro_decoder"},(0,n.kt)("inlineCode",{parentName:"h3"},"avro_decoder")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def avro_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any")),(0,n.kt)("p",null,"Decoder to decode avro encoded messages to pydantic model instance"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"raw_msg"),": Avro encoded bytes message received from Kafka topic"),(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"cls"),": Pydantic class; This pydantic class will be used to construct instance of same class")),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"An instance of given pydantic class")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/58b4829f.f9857ac8.js b/assets/js/58b4829f.f9857ac8.js new file mode 100644 index 0000000..69515c8 --- /dev/null +++ b/assets/js/58b4829f.f9857ac8.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[721],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>m});var n=t(7294);function o(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function s(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function i(e){for(var a=1;a<arguments.length;a++){var t=null!=arguments[a]?arguments[a]:{};a%2?s(Object(t),!0).forEach((function(a){o(e,a,t[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(t,a))}))}return e}function r(e,a){if(null==e)return{};var t,n,o=function(e,a){if(null==e)return{};var t,n,o={},s=Object.keys(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||(o[t]=e[t]);return o}(e,a);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var p=n.createContext({}),l=function(e){var a=n.useContext(p),t=a;return e&&(t="function"==typeof e?e(a):i(i({},a),e)),t},c=function(e){var a=l(e.components);return n.createElement(p.Provider,{value:a},e.children)},k="mdxType",d={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},f=n.forwardRef((function(e,a){var t=e.components,o=e.mdxType,s=e.originalType,p=e.parentName,c=r(e,["components","mdxType","originalType","parentName"]),k=l(t),f=o,m=k["".concat(p,".").concat(f)]||k[f]||d[f]||s;return t?n.createElement(m,i(i({ref:a},c),{},{components:t})):n.createElement(m,i({ref:a},c))}));function m(e,a){var t=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var s=t.length,i=new Array(s);i[0]=f;var r={};for(var p in a)hasOwnProperty.call(a,p)&&(r[p]=a[p]);r.originalType=e,r[k]="string"==typeof e?e:o,i[1]=r;for(var l=2;l<s;l++)i[l]=t[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,t)}f.displayName="MDXCreateElement"},2693:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>p,contentTitle:()=>i,default:()=>d,frontMatter:()=>s,metadata:()=>r,toc:()=>l});var n=t(7462),o=(t(7294),t(3905));const s={},i="FastKafka tutorial",r={unversionedId:"guides/Guide_00_FastKafka_Demo",id:"version-0.8.0/guides/Guide_00_FastKafka_Demo",title:"FastKafka tutorial",description:"FastKafka is a powerful and easy-to-use",source:"@site/versioned_docs/version-0.8.0/guides/Guide_00_FastKafka_Demo.md",sourceDirName:"guides",slug:"/guides/Guide_00_FastKafka_Demo",permalink:"/docs/guides/Guide_00_FastKafka_Demo",draft:!1,tags:[],version:"0.8.0",frontMatter:{}},p={},l=[{value:"Install",id:"install",level:2},{value:"Running in Colab",id:"running-in-colab",level:2},{value:"Writing server code",id:"writing-server-code",level:2},{value:"Preparing the demo model",id:"preparing-the-demo-model",level:3},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"Testing the service",id:"testing-the-service",level:2},{value:"Recap",id:"recap",level:3},{value:"Running the service",id:"running-the-service",level:2},{value:"Documentation",id:"documentation",level:2}],c={toc:l},k="wrapper";function d(e){let{components:a,...t}=e;return(0,o.kt)(k,(0,n.Z)({},c,t,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"fastkafka-tutorial"},"FastKafka tutorial"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,o.kt)("h2",{id:"install"},"Install"),(0,o.kt)("p",null,"FastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install it with ",(0,o.kt)("inlineCode",{parentName:"p"},"pip")," as usual:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"try:\n import fastkafka\nexcept:\n ! pip install fastkafka\n")),(0,o.kt)("h2",{id:"running-in-colab"},"Running in Colab"),(0,o.kt)("p",null,"You can start this interactive tutorial in Google Colab by clicking the\nbutton below:"),(0,o.kt)("a",{href:"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb",target:"_blank"},(0,o.kt)("img",{src:"https://colab.research.google.com/assets/colab-badge.svg",alt:"Open In Colab"})),(0,o.kt)("h2",{id:"writing-server-code"},"Writing server code"),(0,o.kt)("p",null,"Here is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic."),(0,o.kt)("h3",{id:"preparing-the-demo-model"},"Preparing the demo model"),(0,o.kt)("p",null,"First we will prepare our model using the Iris dataset so that we can\ndemonstrate the preditions using FastKafka. The following call downloads\nthe dataset and trains the model."),(0,o.kt)("p",null,"We will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n')),(0,o.kt)("h3",{id:"messages"},"Messages"),(0,o.kt)("p",null,"FastKafka uses ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,o.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,o.kt)("p",null,"This example defines two message classes for use in a FastKafka\napplication:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," class is used to represent input data for a\npredictive model. It has four fields of type\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/types/#constrained-types"},(0,o.kt)("inlineCode",{parentName:"a"},"NonNegativeFloat")),",\nwhich is a subclass of float that only allows non-negative floating\npoint values.")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," class is used to represent the output of the\npredictive model. It has a single field ",(0,o.kt)("inlineCode",{parentName:"p"},"species")," of type string\nrepresenting the predicted species."))),(0,o.kt)("p",null,"These message classes will be used to parse and validate incoming data\nin Kafka consumers and producers."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,o.kt)("h3",{id:"application"},"Application"),(0,o.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,o.kt)("p",null,"It starts by defining a dictionary called ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,o.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,o.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\ngenerating the documentation only and it is not being checked by the\nactual server."),(0,o.kt)("p",null,"Next, an object of the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum set of arguments:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generation of documentation")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n')),(0,o.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,o.kt)("p",null,"FastKafka provides convenient function decorators ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,o.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,o.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,o.kt)("p",null,"This following example shows how to use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message class. Specifying the\ntype of the single argument is instructing the Pydantic to use\n",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData.parse_raw()")," on the consumed message before passing it\nto the user defined function ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),' function,\nwhich specifies that this function should produce a message to the\n\u201cpredictions" Kafka topic whenever it is called. The ',(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),"\nfunction takes a single integer argument ",(0,o.kt)("inlineCode",{parentName:"p"},"species_class")," representing\none of three possible strign values predicted by the mdoel. It creates\na new ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," message using this value and then returns it.\nThe framework will call the ",(0,o.kt)("inlineCode",{parentName:"p"},'IrisPrediction.json().encode("utf-8")'),"\nfunction on the returned value and produce it to the specified topic."))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h2",{id:"testing-the-service"},"Testing the service"),(0,o.kt)("p",null,"The service can be tested using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstances which internally starts Kafka broker and zookeeper."),(0,o.kt)("p",null,"Before running tests, we have to install Java runtime and Apache Kafka\nlocally. To simplify the process, we provide the following convenience\ncommand:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka testing install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nmsg = IrisInputData(\n sepal_length=0.1,\n sepal_width=0.2,\n petal_length=0.3,\n petal_width=0.4,\n)\n\n# Start Tester app and create local Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n # Send IrisInputData message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with IrisPrediction in predictions topic\n await tester.awaited_mocks.on_predictions.assert_awaited_with(\n IrisPrediction(species="setosa"), timeout=2\n )\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,o.kt)("h3",{id:"recap"},"Recap"),(0,o.kt)("p",null,"We have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic and produce the predictions to ",(0,o.kt)("inlineCode",{parentName:"p"},"predictions")," topic."),(0,o.kt)("p",null,"To test the app we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Created the app")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Started our Tester class which mirrors the developed app topics for\ntesting purpuoses")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Sent IrisInputData message to ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Asserted and checked that the developed iris classification service\nhas reacted to IrisInputData message"))),(0,o.kt)("h2",{id:"running-the-service"},"Running the service"),(0,o.kt)("p",null,"The service can be started using builtin ",(0,o.kt)("inlineCode",{parentName:"p"},"faskafka run")," CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file ",(0,o.kt)("inlineCode",{parentName:"p"},'"application.py"')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("p",null,"To run the service, you will need a running Kafka broker on localhost as\nspecified in the ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")," parameter above. We can start the Kafka\nbroker locally using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/testing/ApacheKafkaBroker#fastkafka.testing.ApacheKafkaBroker"},(0,o.kt)("inlineCode",{parentName:"a"},"ApacheKafkaBroker")),".\nNotice that the same happens automatically in the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\nas shown above."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n'127.0.0.1:9092'\n")),(0,o.kt)("p",null,"Then, we start the FastKafka service by running the following command in\nthe folder where the ",(0,o.kt)("inlineCode",{parentName:"p"},"application.py")," file is located:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n")),(0,o.kt)("p",null,"In the above command, we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--num-workers")," option to specify how many\nworkers to launch and we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--kafka-broker")," option to specify which\nkafka broker configuration to use from earlier specified ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[1200654]: [ERROR] aiokafka: Unable to update metadata from [0]\n[1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[1200656]: [ERROR] aiokafka: Unable to update metadata from [0]\n^C\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...\n")),(0,o.kt)("p",null,"You need to interupt running of the cell above by selecting\n",(0,o.kt)("inlineCode",{parentName:"p"},"Runtime->Interupt execution")," on the toolbar above."),(0,o.kt)("p",null,"Finally, we can stop the local Kafka Broker:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n")),(0,o.kt)("h2",{id:"documentation"},"Documentation"),(0,o.kt)("p",null,"The kafka app comes with builtin documentation generation using\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/tools/generator"},"AsyncApi HTML generator"),"."),(0,o.kt)("p",null,"When running in Colab, we need to update Node.js first:"),(0,o.kt)("p",null,"We need to install all dependancies for the generator using the\nfollowing command line:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n")),(0,o.kt)("p",null,"To generate the documentation programatically you just need to call the\nfolloving command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs generate application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n")),(0,o.kt)("p",null,". This will generate the ",(0,o.kt)("em",{parentName:"p"},"asyncapi")," folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"ls -l asyncapi\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"total 8\ndrwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs\ndrwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec\n")),(0,o.kt)("p",null,"In docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka docs serve"),"\nCLI command (more on that in our guides)."),(0,o.kt)("p",null,"In spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application."),(0,o.kt)("p",null,"We can locally preview the generated documentation by running the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs serve application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n\nServing documentation on http://127.0.0.1:8000\n^C\nInterupting serving of documentation and cleaning up...\n")),(0,o.kt)("p",null,"From the parameters passed to the application constructor, we get the\ndocumentation bellow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n bootstrap_servers="localhost:9092",\n)\n')),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png",alt:"Kafka_servers"})),(0,o.kt)("p",null,"The following documentation snippet are for the consumer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png",alt:"Kafka_consumer"})),(0,o.kt)("p",null,"The following documentation snippet are for the producer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png",alt:"Kafka_producer"})),(0,o.kt)("p",null,"Finally, all messages as defined as subclasses of ",(0,o.kt)("em",{parentName:"p"},"BaseModel")," are\ndocumented as well:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png",alt:"Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)"})))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/58f10d9f.49ed8c8f.js b/assets/js/58f10d9f.49ed8c8f.js new file mode 100644 index 0000000..cfc2bef --- /dev/null +++ b/assets/js/58f10d9f.49ed8c8f.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[2493],{9005:a=>{a.exports=JSON.parse('{"pluginId":"default","version":"0.6.0","label":"0.6.0","banner":"unmaintained","badge":true,"noIndex":false,"className":"docs-version-0.6.0","isLast":false,"docsSidebars":{"tutorialSidebar":[{"type":"link","label":"FastKafka","href":"/docs/0.6.0/","docId":"index"},{"type":"category","label":"Guides","items":[{"type":"category","label":"Writing services","items":[{"type":"link","label":"@consumes basics","href":"/docs/0.6.0/guides/Guide_11_Consumes_Basics","docId":"guides/Guide_11_Consumes_Basics"},{"type":"link","label":"@produces basics","href":"/docs/0.6.0/guides/Guide_21_Produces_Basics","docId":"guides/Guide_21_Produces_Basics"},{"type":"link","label":"Defining a partition key","href":"/docs/0.6.0/guides/Guide_22_Partition_Keys","docId":"guides/Guide_22_Partition_Keys"},{"type":"link","label":"Batch producing","href":"/docs/0.6.0/guides/Guide_23_Batch_Producing","docId":"guides/Guide_23_Batch_Producing"},{"type":"link","label":"Lifespan Events","href":"/docs/0.6.0/guides/Guide_05_Lifespan_Handler","docId":"guides/Guide_05_Lifespan_Handler"},{"type":"link","label":"Encoding and Decoding Kafka Messages with FastKafka","href":"/docs/0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","docId":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Testing","items":[{"type":"link","label":"Using Redpanda to test FastKafka","href":"/docs/0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka","docId":"guides/Guide_31_Using_redpanda_to_test_fastkafka"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Documentation generation","items":[{"type":"link","label":"Deploy FastKafka docs to GitHub Pages","href":"/docs/0.6.0/guides/Guide_04_Github_Actions_Workflow","docId":"guides/Guide_04_Github_Actions_Workflow"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Deployment","items":[{"type":"link","label":"Deploying FastKafka using Docker","href":"/docs/0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka","docId":"guides/Guide_30_Using_docker_to_deploy_fastkafka"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Benchmarking","items":[{"type":"link","label":"Benchmarking FastKafka app","href":"/docs/0.6.0/guides/Guide_06_Benchmarking_FastKafka","docId":"guides/Guide_06_Benchmarking_FastKafka"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"API","items":[{"type":"link","label":"EventMetadata","href":"/docs/0.6.0/api/fastkafka/EventMetadata","docId":"api/fastkafka/EventMetadata"},{"type":"link","label":"FastKafka","href":"/docs/0.6.0/api/fastkafka/","docId":"api/fastkafka/FastKafka"},{"type":"link","label":"KafkaEvent","href":"/docs/0.6.0/api/fastkafka/KafkaEvent","docId":"api/fastkafka/KafkaEvent"},{"type":"category","label":"encoder","items":[{"type":"link","label":"AvroBase","href":"/docs/0.6.0/api/fastkafka/encoder/AvroBase","docId":"api/fastkafka/encoder/AvroBase"},{"type":"link","label":"avro_decoder","href":"/docs/0.6.0/api/fastkafka/encoder/avro_decoder","docId":"api/fastkafka/encoder/avro_decoder"},{"type":"link","label":"avro_encoder","href":"/docs/0.6.0/api/fastkafka/encoder/avro_encoder","docId":"api/fastkafka/encoder/avro_encoder"},{"type":"link","label":"avsc_to_pydantic","href":"/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic","docId":"api/fastkafka/encoder/avsc_to_pydantic"},{"type":"link","label":"json_decoder","href":"/docs/0.6.0/api/fastkafka/encoder/json_decoder","docId":"api/fastkafka/encoder/json_decoder"},{"type":"link","label":"json_encoder","href":"/docs/0.6.0/api/fastkafka/encoder/json_encoder","docId":"api/fastkafka/encoder/json_encoder"}],"collapsed":true,"collapsible":true},{"type":"category","label":"testing","items":[{"type":"link","label":"ApacheKafkaBroker","href":"/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker","docId":"api/fastkafka/testing/ApacheKafkaBroker"},{"type":"link","label":"LocalRedpandaBroker","href":"/docs/0.6.0/api/fastkafka/testing/LocalRedpandaBroker","docId":"api/fastkafka/testing/LocalRedpandaBroker"},{"type":"link","label":"Tester","href":"/docs/0.6.0/api/fastkafka/testing/Tester","docId":"api/fastkafka/testing/Tester"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"CLI","items":[{"type":"link","label":"fastkafka","href":"/docs/0.6.0/cli/fastkafka","docId":"cli/fastkafka"},{"type":"link","label":"run_fastkafka_server_process","href":"/docs/0.6.0/cli/run_fastkafka_server_process","docId":"cli/run_fastkafka_server_process"}],"collapsed":true,"collapsible":true},{"type":"link","label":"LICENSE","href":"/docs/0.6.0/LICENSE","docId":"LICENSE"},{"type":"link","label":"Contributing to fastkafka","href":"/docs/0.6.0/CONTRIBUTING","docId":"CONTRIBUTING"},{"type":"link","label":"Release notes","href":"/docs/0.6.0/CHANGELOG","docId":"CHANGELOG"}]},"docs":{"api/fastkafka/encoder/avro_decoder":{"id":"api/fastkafka/encoder/avro_decoder","title":"avro_decoder","description":"fastkafka.encoder.avrodecoder {fastkafka.encoder.avrodecoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/avro_encoder":{"id":"api/fastkafka/encoder/avro_encoder","title":"avro_encoder","description":"fastkafka.encoder.avroencoder {fastkafka.encoder.avroencoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/AvroBase":{"id":"api/fastkafka/encoder/AvroBase","title":"AvroBase","description":"fastkafka.encoder.AvroBase {fastkafka.encoder.AvroBase}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/avsc_to_pydantic":{"id":"api/fastkafka/encoder/avsc_to_pydantic","title":"avsc_to_pydantic","description":"fastkafka.encoder.avsctopydantic {fastkafka.encoder.avsctopydantic}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/json_decoder":{"id":"api/fastkafka/encoder/json_decoder","title":"json_decoder","description":"fastkafka.encoder.jsondecoder {fastkafka.encoder.jsondecoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/json_encoder":{"id":"api/fastkafka/encoder/json_encoder","title":"json_encoder","description":"fastkafka.encoder.jsonencoder {fastkafka.encoder.jsonencoder}","sidebar":"tutorialSidebar"},"api/fastkafka/EventMetadata":{"id":"api/fastkafka/EventMetadata","title":"EventMetadata","description":"fastkafka.EventMetadata {fastkafka.EventMetadata}","sidebar":"tutorialSidebar"},"api/fastkafka/executors/DynamicTaskExecutor":{"id":"api/fastkafka/executors/DynamicTaskExecutor","title":"DynamicTaskExecutor","description":"fastkafka.executors.DynamicTaskExecutor {fastkafka.executors.DynamicTaskExecutor}"},"api/fastkafka/executors/SequentialExecutor":{"id":"api/fastkafka/executors/SequentialExecutor","title":"SequentialExecutor","description":"fastkafka.executors.SequentialExecutor {fastkafka.executors.SequentialExecutor}"},"api/fastkafka/FastKafka":{"id":"api/fastkafka/FastKafka","title":"FastKafka","description":"fastkafka.FastKafka {fastkafka.FastKafka}","sidebar":"tutorialSidebar"},"api/fastkafka/KafkaEvent":{"id":"api/fastkafka/KafkaEvent","title":"KafkaEvent","description":"fastkafka.KafkaEvent {fastkafka.KafkaEvent}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/ApacheKafkaBroker":{"id":"api/fastkafka/testing/ApacheKafkaBroker","title":"ApacheKafkaBroker","description":"fastkafka.testing.ApacheKafkaBroker {fastkafka.testing.ApacheKafkaBroker}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/LocalRedpandaBroker":{"id":"api/fastkafka/testing/LocalRedpandaBroker","title":"LocalRedpandaBroker","description":"fastkafka.testing.LocalRedpandaBroker {fastkafka.testing.LocalRedpandaBroker}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/Tester":{"id":"api/fastkafka/testing/Tester","title":"Tester","description":"fastkafka.testing.Tester {fastkafka.testing.Tester}","sidebar":"tutorialSidebar"},"CHANGELOG":{"id":"CHANGELOG","title":"Release notes","description":"0.6.0","sidebar":"tutorialSidebar"},"cli/fastkafka":{"id":"cli/fastkafka","title":"fastkafka","description":"Usage:","sidebar":"tutorialSidebar"},"cli/run_fastkafka_server_process":{"id":"cli/run_fastkafka_server_process","title":"run_fastkafka_server_process","description":"Usage:","sidebar":"tutorialSidebar"},"CONTRIBUTING":{"id":"CONTRIBUTING","title":"Contributing to fastkafka","description":"First off, thanks for taking the time to contribute! \u2764\ufe0f","sidebar":"tutorialSidebar"},"guides/Guide_00_FastKafka_Demo":{"id":"guides/Guide_00_FastKafka_Demo","title":"FastKafka tutorial","description":"FastKafka is a powerful and easy-to-use"},"guides/Guide_01_Intro":{"id":"guides/Guide_01_Intro","title":"Intro","description":"This tutorial will show you how to use FastKafkaAPI, step by"},"guides/Guide_02_First_Steps":{"id":"guides/Guide_02_First_Steps","title":"First Steps","description":"Creating a simple Kafka consumer app"},"guides/Guide_03_Authentication":{"id":"guides/Guide_03_Authentication","title":"Authentication","description":"TLS Authentication"},"guides/Guide_04_Github_Actions_Workflow":{"id":"guides/Guide_04_Github_Actions_Workflow","title":"Deploy FastKafka docs to GitHub Pages","description":"Getting started","sidebar":"tutorialSidebar"},"guides/Guide_05_Lifespan_Handler":{"id":"guides/Guide_05_Lifespan_Handler","title":"Lifespan Events","description":"Did you know that you can define some special code that runs before and","sidebar":"tutorialSidebar"},"guides/Guide_06_Benchmarking_FastKafka":{"id":"guides/Guide_06_Benchmarking_FastKafka","title":"Benchmarking FastKafka app","description":"Prerequisites","sidebar":"tutorialSidebar"},"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka":{"id":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","title":"Encoding and Decoding Kafka Messages with FastKafka","description":"Prerequisites","sidebar":"tutorialSidebar"},"guides/Guide_11_Consumes_Basics":{"id":"guides/Guide_11_Consumes_Basics","title":"@consumes basics","description":"You can use @consumes decorator to consume messages from Kafka topics.","sidebar":"tutorialSidebar"},"guides/Guide_21_Produces_Basics":{"id":"guides/Guide_21_Produces_Basics","title":"@produces basics","description":"You can use @produces decorator to produce messages to Kafka topics.","sidebar":"tutorialSidebar"},"guides/Guide_22_Partition_Keys":{"id":"guides/Guide_22_Partition_Keys","title":"Defining a partition key","description":"Partition keys are used in Apache Kafka to determine which partition a","sidebar":"tutorialSidebar"},"guides/Guide_23_Batch_Producing":{"id":"guides/Guide_23_Batch_Producing","title":"Batch producing","description":"If you want to send your data in batches @produces decorator makes","sidebar":"tutorialSidebar"},"guides/Guide_30_Using_docker_to_deploy_fastkafka":{"id":"guides/Guide_30_Using_docker_to_deploy_fastkafka","title":"Deploying FastKafka using Docker","description":"Building a Docker Image","sidebar":"tutorialSidebar"},"guides/Guide_31_Using_redpanda_to_test_fastkafka":{"id":"guides/Guide_31_Using_redpanda_to_test_fastkafka","title":"Using Redpanda to test FastKafka","description":"What is FastKafka?","sidebar":"tutorialSidebar"},"index":{"id":"index","title":"FastKafka","description":"Effortless Kafka integration for your web services","sidebar":"tutorialSidebar"},"LICENSE":{"id":"LICENSE","title":"LICENSE","description":"Apache License","sidebar":"tutorialSidebar"}}}')}}]); \ No newline at end of file diff --git a/assets/js/5a11a8c6.882f537d.js b/assets/js/5a11a8c6.882f537d.js new file mode 100644 index 0000000..6672ba1 --- /dev/null +++ b/assets/js/5a11a8c6.882f537d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5746],{3905:(e,t,r)=>{r.d(t,{Zo:()=>c,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?o(Object(r),!0).forEach((function(t){a(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):o(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function l(e,t){if(null==e)return{};var r,n,a=function(e,t){if(null==e)return{};var r,n,a={},o=Object.keys(e);for(n=0;n<o.length;n++)r=o[n],t.indexOf(r)>=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)r=o[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var u=n.createContext({}),s=function(e){var t=n.useContext(u),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},c=function(e){var t=s(e.components);return n.createElement(u.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,u=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),p=s(r),k=a,m=p["".concat(u,".").concat(k)]||p[k]||f[k]||o;return r?n.createElement(m,i(i({ref:t},c),{},{components:r})):n.createElement(m,i({ref:t},c))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,i=new Array(o);i[0]=k;var l={};for(var u in t)hasOwnProperty.call(t,u)&&(l[u]=t[u]);l.originalType=e,l[p]="string"==typeof e?e:a,i[1]=l;for(var s=2;s<o;s++)i[s]=r[s];return n.createElement.apply(null,i)}return n.createElement.apply(null,r)}k.displayName="MDXCreateElement"},4321:(e,t,r)=>{r.r(t),r.d(t,{assets:()=>u,contentTitle:()=>i,default:()=>f,frontMatter:()=>o,metadata:()=>l,toc:()=>s});var n=r(7462),a=(r(7294),r(3905));const o={},i=void 0,l={unversionedId:"api/fastkafka/executors/SequentialExecutor",id:"version-0.7.0/api/fastkafka/executors/SequentialExecutor",title:"SequentialExecutor",description:"fastkafka.executors.SequentialExecutor {fastkafka.executors.SequentialExecutor}",source:"@site/versioned_docs/version-0.7.0/api/fastkafka/executors/SequentialExecutor.md",sourceDirName:"api/fastkafka/executors",slug:"/api/fastkafka/executors/SequentialExecutor",permalink:"/docs/0.7.0/api/fastkafka/executors/SequentialExecutor",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"DynamicTaskExecutor",permalink:"/docs/0.7.0/api/fastkafka/executors/DynamicTaskExecutor"},next:{title:"ApacheKafkaBroker",permalink:"/docs/0.7.0/api/fastkafka/testing/ApacheKafkaBroker"}},u={},s=[{value:"<code>fastkafka.executors.SequentialExecutor</code>",id:"fastkafka.executors.SequentialExecutor",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>run</code>",id:"run",level:3}],c={toc:s},p="wrapper";function f(e){let{components:t,...r}=e;return(0,a.kt)(p,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"fastkafka.executors.SequentialExecutor"},(0,a.kt)("inlineCode",{parentName:"h2"},"fastkafka.executors.SequentialExecutor")),(0,a.kt)("p",null,"A class that implements a sequential executor for processing consumer records."),(0,a.kt)("p",null,"The SequentialExecutor class extends the StreamExecutor class and provides functionality\nfor running processing tasks in sequence by awaiting their coroutines."),(0,a.kt)("h3",{id:"init"},(0,a.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000) -> None")),(0,a.kt)("p",null,"Create an instance of SequentialExecutor"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"throw_exceptions"),": Flag indicating whether exceptions should be thrown or logged.\nDefaults to False."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"max_buffer_size"),": Maximum buffer size for the memory object stream.\nDefaults to 100_000.")),(0,a.kt)("h3",{id:"run"},(0,a.kt)("inlineCode",{parentName:"h3"},"run")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None")),(0,a.kt)("p",null,"Runs the sequential executor."),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"is_shutting_down_f"),": Function to check if the executor is shutting down."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"generator"),": Generator function for retrieving consumer records."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"processor"),": Processor function for processing consumer records.")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Returns"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"None")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/5cf0f698.9f573412.js b/assets/js/5cf0f698.9f573412.js new file mode 100644 index 0000000..a12a51c --- /dev/null +++ b/assets/js/5cf0f698.9f573412.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4884],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function s(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},u=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},k="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),k=l(a),d=r,f=k["".concat(p,".").concat(d)]||k[d]||c[d]||o;return a?n.createElement(f,i(i({ref:t},u),{},{components:a})):n.createElement(f,i({ref:t},u))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=d;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[k]="string"==typeof e?e:r,i[1]=s;for(var l=2;l<o;l++)i[l]=a[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}d.displayName="MDXCreateElement"},2950:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>c,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var n=a(7462),r=(a(7294),a(3905));const o={},i="Defining a partition key",s={unversionedId:"guides/Guide_22_Partition_Keys",id:"version-0.6.0/guides/Guide_22_Partition_Keys",title:"Defining a partition key",description:"Partition keys are used in Apache Kafka to determine which partition a",source:"@site/versioned_docs/version-0.6.0/guides/Guide_22_Partition_Keys.md",sourceDirName:"guides",slug:"/guides/Guide_22_Partition_Keys",permalink:"/docs/0.6.0/guides/Guide_22_Partition_Keys",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"@produces basics",permalink:"/docs/0.6.0/guides/Guide_21_Produces_Basics"},next:{title:"Batch producing",permalink:"/docs/0.6.0/guides/Guide_23_Batch_Producing"}},p={},l=[{value:"Return a key from the producing function",id:"return-a-key-from-the-producing-function",level:2},{value:"App example",id:"app-example",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the message was sent to the Kafka topic with the desired key",id:"check-if-the-message-was-sent-to-the-kafka-topic-with-the-desired-key",level:2}],u={toc:l},k="wrapper";function c(e){let{components:t,...a}=e;return(0,r.kt)(k,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"defining-a-partition-key"},"Defining a partition key"),(0,r.kt)("p",null,"Partition keys are used in Apache Kafka to determine which partition a\nmessage should be written to. This ensures that related messages are\nkept together in the same partition, which can be useful for ensuring\norder or for grouping related messages together for efficient\nprocessing. Additionally, partitioning data across multiple partitions\nallows Kafka to distribute load across multiple brokers and scale\nhorizontally, while replicating data across multiple brokers provides\nfault tolerance."),(0,r.kt)("p",null,"You can define your partition keys when using the ",(0,r.kt)("inlineCode",{parentName:"p"},"@produces")," decorator,\nthis guide will demonstrate to you this feature."),(0,r.kt)("h2",{id:"return-a-key-from-the-producing-function"},"Return a key from the producing function"),(0,r.kt)("p",null,"To define a key for the message that you want to produce to Kafka topic,\nyou need to wrap the response into\n",(0,r.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/KafkaEvent/#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass and set the key value. Check the example below:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")\n')),(0,r.kt)("p",null,"In the example, we want to return the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message class with\nthe key defined as ",(0,r.kt)("em",{parentName:"p"},"my_key"),". So, we wrap the message and key into a\nKafkaEvent class and return it as such."),(0,r.kt)("p",null,"While generating the documentation, the\n",(0,r.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/KafkaEvent/#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass will be unwrapped and the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class will be documented in\nthe definition of message type, same way if you didn\u2019t use the key."),(0,r.kt)("p",null,'!!! info "Which key to choose?"'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.\n")),(0,r.kt)("h2",{id:"app-example"},"App example"),(0,r.kt)("p",null,"We will modify the app example from ",(0,r.kt)("strong",{parentName:"p"},"@producer basics")," guide to return\nthe ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," with our key. The final app will look like this (make\nsure you replace the ",(0,r.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,r.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values):"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,r.kt)("h2",{id:"run-the-app"},"Run the app"),(0,r.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n")),(0,r.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"[347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.\n")),(0,r.kt)("h2",{id:"check-if-the-message-was-sent-to-the-kafka-topic-with-the-desired-key"},"Check if the message was sent to the Kafka topic with the desired key"),(0,r.kt)("p",null,'Lets check the topic and see if there is a \u201cHello world!" message in the\nhello_world topic with the defined key. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n")),(0,r.kt)("p",null,"You should see the ",(0,r.kt)("em",{parentName:"p"},'my_key {\u201cmsg": \u201cHello world!"}')," messages in your\ntopic appearing, the ",(0,r.kt)("em",{parentName:"p"},"my_key")," part of the message is the key that we\ndefined in our producing function."))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/61386b8d.6fd0979f.js b/assets/js/61386b8d.6fd0979f.js new file mode 100644 index 0000000..b665376 --- /dev/null +++ b/assets/js/61386b8d.6fd0979f.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5375],{3905:(e,t,a)=>{a.d(t,{Zo:()=>k,kt:()=>d});var r=a(7294);function n(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,r)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){n(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,r,n=function(e,t){if(null==e)return{};var a,r,n={},o=Object.keys(e);for(r=0;r<o.length;r++)a=o[r],t.indexOf(a)>=0||(n[a]=e[a]);return n}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)a=o[r],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(n[a]=e[a])}return n}var s=r.createContext({}),p=function(e){var t=r.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},k=function(e){var t=p(e.components);return r.createElement(s.Provider,{value:t},e.children)},c="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},u=r.forwardRef((function(e,t){var a=e.components,n=e.mdxType,o=e.originalType,s=e.parentName,k=l(e,["components","mdxType","originalType","parentName"]),c=p(a),u=n,d=c["".concat(s,".").concat(u)]||c[u]||f[u]||o;return a?r.createElement(d,i(i({ref:t},k),{},{components:a})):r.createElement(d,i({ref:t},k))}));function d(e,t){var a=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var o=a.length,i=new Array(o);i[0]=u;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:n,i[1]=l;for(var p=2;p<o;p++)i[p]=a[p];return r.createElement.apply(null,i)}return r.createElement.apply(null,a)}u.displayName="MDXCreateElement"},7530:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>f,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var r=a(7462),n=(a(7294),a(3905));const o={},i=void 0,l={unversionedId:"api/fastkafka/testing/ApacheKafkaBroker",id:"version-0.5.0/api/fastkafka/testing/ApacheKafkaBroker",title:"ApacheKafkaBroker",description:"fastkafka.testing.ApacheKafkaBroker {fastkafka.testing.ApacheKafkaBroker}",source:"@site/versioned_docs/version-0.5.0/api/fastkafka/testing/ApacheKafkaBroker.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/ApacheKafkaBroker",permalink:"/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"KafkaEvent",permalink:"/docs/0.5.0/api/fastkafka/KafkaEvent"},next:{title:"LocalRedpandaBroker",permalink:"/docs/0.5.0/api/fastkafka/testing/LocalRedpandaBroker"}},s={},p=[{value:"<code>fastkafka.testing.ApacheKafkaBroker</code>",id:"fastkafka.testing.ApacheKafkaBroker",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>start</code>",id:"start",level:3},{value:"<code>stop</code>",id:"stop",level:3}],k={toc:p},c="wrapper";function f(e){let{components:t,...a}=e;return(0,n.kt)(c,(0,r.Z)({},k,a,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"fastkafka.testing.ApacheKafkaBroker"},(0,n.kt)("inlineCode",{parentName:"h2"},"fastkafka.testing.ApacheKafkaBroker")),(0,n.kt)("p",null,"ApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing."),(0,n.kt)("h3",{id:"init"},(0,n.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None")),(0,n.kt)("p",null,"Initialises the ApacheKafkaBroker object"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"zookeeper_port"),": Port for clients (Kafka brokes) to connect"),(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect")),(0,n.kt)("h3",{id:"start"},(0,n.kt)("inlineCode",{parentName:"h3"},"start")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def start(self: fastkafka.testing.ApacheKafkaBroker) -> str")),(0,n.kt)("p",null,"Starts a local kafka broker and zookeeper instance synchronously"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"Kafka broker bootstrap server address in string format: add:port")),(0,n.kt)("h3",{id:"stop"},(0,n.kt)("inlineCode",{parentName:"h3"},"stop")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def stop(self: fastkafka.testing.ApacheKafkaBroker) -> None")),(0,n.kt)("p",null,"Stops a local kafka broker and zookeeper instance synchronously"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"None")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/62ff7ec9.ec600e31.js b/assets/js/62ff7ec9.ec600e31.js new file mode 100644 index 0000000..cd58c8e --- /dev/null +++ b/assets/js/62ff7ec9.ec600e31.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5625],{3905:(e,a,n)=>{n.d(a,{Zo:()=>c,kt:()=>f});var t=n(7294);function o(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function i(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function r(e){for(var a=1;a<arguments.length;a++){var n=null!=arguments[a]?arguments[a]:{};a%2?i(Object(n),!0).forEach((function(a){o(e,a,n[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):i(Object(n)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(n,a))}))}return e}function s(e,a){if(null==e)return{};var n,t,o=function(e,a){if(null==e)return{};var n,t,o={},i=Object.keys(e);for(t=0;t<i.length;t++)n=i[t],a.indexOf(n)>=0||(o[n]=e[n]);return o}(e,a);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(t=0;t<i.length;t++)n=i[t],a.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=t.createContext({}),l=function(e){var a=t.useContext(p),n=a;return e&&(n="function"==typeof e?e(a):r(r({},a),e)),n},c=function(e){var a=l(e.components);return t.createElement(p.Provider,{value:a},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},m=t.forwardRef((function(e,a){var n=e.components,o=e.mdxType,i=e.originalType,p=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=l(n),m=o,f=d["".concat(p,".").concat(m)]||d[m]||u[m]||i;return n?t.createElement(f,r(r({ref:a},c),{},{components:n})):t.createElement(f,r({ref:a},c))}));function f(e,a){var n=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var i=n.length,r=new Array(i);r[0]=m;var s={};for(var p in a)hasOwnProperty.call(a,p)&&(s[p]=a[p]);s.originalType=e,s[d]="string"==typeof e?e:o,r[1]=s;for(var l=2;l<i;l++)r[l]=n[l];return t.createElement.apply(null,r)}return t.createElement.apply(null,n)}m.displayName="MDXCreateElement"},5233:(e,a,n)=>{n.r(a),n.d(a,{assets:()=>p,contentTitle:()=>r,default:()=>u,frontMatter:()=>i,metadata:()=>s,toc:()=>l});var t=n(7462),o=(n(7294),n(3905));const i={},r="Lifespan Events",s={unversionedId:"guides/Guide_05_Lifespan_Handler",id:"version-0.6.0/guides/Guide_05_Lifespan_Handler",title:"Lifespan Events",description:"Did you know that you can define some special code that runs before and",source:"@site/versioned_docs/version-0.6.0/guides/Guide_05_Lifespan_Handler.md",sourceDirName:"guides",slug:"/guides/Guide_05_Lifespan_Handler",permalink:"/docs/0.6.0/guides/Guide_05_Lifespan_Handler",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Batch producing",permalink:"/docs/0.6.0/guides/Guide_23_Batch_Producing"},next:{title:"Encoding and Decoding Kafka Messages with FastKafka",permalink:"/docs/0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"}},p={},l=[{value:"Lifespan example - Iris prediction model",id:"lifespan-example---iris-prediction-model",level:2},{value:"Lifespan",id:"lifespan",level:3},{value:"Async context manager",id:"async-context-manager",level:3},{value:"App demo",id:"app-demo",level:2},{value:"FastKafka app",id:"fastkafka-app",level:3},{value:"Data modeling",id:"data-modeling",level:3},{value:"Consumers and producers",id:"consumers-and-producers",level:3},{value:"Final app",id:"final-app",level:3},{value:"Running the app",id:"running-the-app",level:3},{value:"Recap",id:"recap",level:2}],c={toc:l},d="wrapper";function u(e){let{components:a,...n}=e;return(0,o.kt)(d,(0,t.Z)({},c,n,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"lifespan-events"},"Lifespan Events"),(0,o.kt)("p",null,"Did you know that you can define some special code that runs before and\nafter your Kafka application? This code will be executed just once, but\nit covers the whole lifespan of your app! \ud83d\ude80"),(0,o.kt)("p",null,"Lets break it down:"),(0,o.kt)("p",null,"You can define logic (code) that should be executed before the\napplication starts up. This is like a warm-up for your app, getting it\nready to consume and produce messages."),(0,o.kt)("p",null,"Similarly, you can define logic (code) that should be executed when the\napplication is shutting down. This is like a cool-down for your app,\nmaking sure everything is properly closed and cleaned up."),(0,o.kt)("p",null,"By executing code before consuming and after producing, you cover the\nentire lifecycle of your application \ud83c\udf89"),(0,o.kt)("p",null,"This is super handy for setting up shared resources that are needed\nacross consumers and producers, like a database connection pool or a\nmachine learning model. And the best part? You can clean up these\nresources when the app is shutting down!"),(0,o.kt)("p",null,"So lets give it a try and see how it can make your Kafka app even more\nawesome! \ud83d\udcaa"),(0,o.kt)("h2",{id:"lifespan-example---iris-prediction-model"},"Lifespan example - Iris prediction model"),(0,o.kt)("p",null,"Let\u2019s dive into an example to see how you can leverage the lifecycle\nhandler to solve a common use case. Imagine that you have some machine\nlearning models that need to consume incoming messages and produce\nresponse/prediction messages. These models are shared among consumers\nand producers, which means you don\u2019t want to load them for every\nmessage."),(0,o.kt)("p",null,"Here\u2019s where the lifecycle handler comes to the rescue! By loading the\nmodel before the messages are consumed and produced, but only right\nbefore the application starts receiving messages, you can ensure that\nthe model is ready to use without compromising the performance of your\ntests. In the upcoming sections, we\u2019ll walk you through how to\ninitialize an Iris species prediction model and use it in your developed\napplication."),(0,o.kt)("h3",{id:"lifespan"},"Lifespan"),(0,o.kt)("p",null,"You can define this startup and shutdown logic using the lifespan\nparameter of the FastKafka app, and an async context manager."),(0,o.kt)("p",null,"Let\u2019s start with an example and then see it in detail."),(0,o.kt)("p",null,"We create an async function lifespan() with yield like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n print("Loading the model!")\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n yield\n # Clean up the ML models and release the resources\n \n print("Exiting, clearing model dict!")\n ml_models.clear()\n \n')),(0,o.kt)("p",null,"The first thing to notice, is that we are defining an async function\nwith ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),". This is very similar to Dependencies with ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),"."),(0,o.kt)("p",null,"The first part of the function, before the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),", will be executed\n",(0,o.kt)("strong",{parentName:"p"},"before")," the application starts. And the part after the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield")," will\nbe executed ",(0,o.kt)("strong",{parentName:"p"},"after")," the application has finished."),(0,o.kt)("p",null,"This lifespan will create an iris_prediction model on application\nstartup and cleanup the references after the app is shutdown."),(0,o.kt)("p",null,"The lifespan will be passed an KafkaApp reference on startup of your\napplication, which you can use to reference your application on startup."),(0,o.kt)("p",null,"For demonstration sake, we also added prints so that when running the\napp we can see that our lifespan was called."),(0,o.kt)("h3",{id:"async-context-manager"},"Async context manager"),(0,o.kt)("p",null,"Context managers can be used in ",(0,o.kt)("inlineCode",{parentName:"p"},"with")," blocks, our lifespan, for example\ncould be used like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"ml_models = {}\nasync with lifespan(None):\n print(ml_models)\n")),(0,o.kt)("p",null,"When you create a context manager or an async context manager, what it\ndoes is that, before entering the ",(0,o.kt)("inlineCode",{parentName:"p"},"with")," block, it will execute the code\nbefore the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),", and after exiting the ",(0,o.kt)("inlineCode",{parentName:"p"},"with")," block, it will execute\nthe code after the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),"."),(0,o.kt)("p",null,"If you want to learn more about context managers and contextlib\ndecorators, please visit ",(0,o.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/contextlib.html"},"Python official\ndocs")),(0,o.kt)("h2",{id:"app-demo"},"App demo"),(0,o.kt)("h3",{id:"fastkafka-app"},"FastKafka app"),(0,o.kt)("p",null,"Lets now create our application using the created lifespan handler."),(0,o.kt)("p",null,"Notice how we passed our lifespan handler to the app when constructing\nit trough the ",(0,o.kt)("inlineCode",{parentName:"p"},"lifespan")," argument."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local development kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n')),(0,o.kt)("h3",{id:"data-modeling"},"Data modeling"),(0,o.kt)("p",null,"Lets model the Iris data for our app:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,o.kt)("h3",{id:"consumers-and-producers"},"Consumers and producers"),(0,o.kt)("p",null,"Lets create a consumer and producer for our app that will generate\npredictions from input iris data."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h3",{id:"final-app"},"Final app"),(0,o.kt)("p",null,"The final app looks like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n print("Loading the model!")\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n yield\n # Clean up the ML models and release the resources\n \n print("Exiting, clearing model dict!")\n ml_models.clear()\n \nkafka_brokers = {\n "localhost": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local development kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h3",{id:"running-the-app"},"Running the app"),(0,o.kt)("p",null,"Now we can run the app with your custom lifespan handler. Copy the code\nabove in lifespan_example.py and run it by running"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app\n")),(0,o.kt)("p",null,"When you run the app, you should see a simmilar output to the one below:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[262292]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[262292]: Loading the model!\n[262292]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Entering...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting send_stream\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Finished.\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[262292]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[262292]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[262292]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[262292]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 262292...\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Entering...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Exiting send_stream\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Finished.\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Stoping producer...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Finished\n[262292]: Exiting, clearing model dict!\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 262292 terminated.\n")),(0,o.kt)("h2",{id:"recap"},"Recap"),(0,o.kt)("p",null,"In this guide we have defined a lifespan handler and passed to our\nFastKafka app."),(0,o.kt)("p",null,"Some important points are:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Lifespan handler is implemented as\n",(0,o.kt)("a",{parentName:"li",href:"https://docs.python.org/3/library/contextlib.html#contextlib.asynccontextmanager"},"AsyncContextManager")),(0,o.kt)("li",{parentName:"ol"},"Code ",(0,o.kt)("strong",{parentName:"li"},"before")," yield in lifespan will be executed ",(0,o.kt)("strong",{parentName:"li"},"before"),"\napplication ",(0,o.kt)("strong",{parentName:"li"},"startup")),(0,o.kt)("li",{parentName:"ol"},"Code ",(0,o.kt)("strong",{parentName:"li"},"after")," yield in lifespan will be executed ",(0,o.kt)("strong",{parentName:"li"},"after"),"\napplication ",(0,o.kt)("strong",{parentName:"li"},"shutdown")),(0,o.kt)("li",{parentName:"ol"},"You can pass your lifespan handler to FastKafka app on\ninitialisation by passing a ",(0,o.kt)("inlineCode",{parentName:"li"},"lifespan")," argument")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/647303d6.98160b7c.js b/assets/js/647303d6.98160b7c.js new file mode 100644 index 0000000..4b282c6 --- /dev/null +++ b/assets/js/647303d6.98160b7c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[554],{3905:(e,a,n)=>{n.d(a,{Zo:()=>c,kt:()=>f});var t=n(7294);function o(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function i(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function r(e){for(var a=1;a<arguments.length;a++){var n=null!=arguments[a]?arguments[a]:{};a%2?i(Object(n),!0).forEach((function(a){o(e,a,n[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):i(Object(n)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(n,a))}))}return e}function s(e,a){if(null==e)return{};var n,t,o=function(e,a){if(null==e)return{};var n,t,o={},i=Object.keys(e);for(t=0;t<i.length;t++)n=i[t],a.indexOf(n)>=0||(o[n]=e[n]);return o}(e,a);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(t=0;t<i.length;t++)n=i[t],a.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=t.createContext({}),l=function(e){var a=t.useContext(p),n=a;return e&&(n="function"==typeof e?e(a):r(r({},a),e)),n},c=function(e){var a=l(e.components);return t.createElement(p.Provider,{value:a},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},m=t.forwardRef((function(e,a){var n=e.components,o=e.mdxType,i=e.originalType,p=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=l(n),m=o,f=d["".concat(p,".").concat(m)]||d[m]||u[m]||i;return n?t.createElement(f,r(r({ref:a},c),{},{components:n})):t.createElement(f,r({ref:a},c))}));function f(e,a){var n=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var i=n.length,r=new Array(i);r[0]=m;var s={};for(var p in a)hasOwnProperty.call(a,p)&&(s[p]=a[p]);s.originalType=e,s[d]="string"==typeof e?e:o,r[1]=s;for(var l=2;l<i;l++)r[l]=n[l];return t.createElement.apply(null,r)}return t.createElement.apply(null,n)}m.displayName="MDXCreateElement"},8292:(e,a,n)=>{n.r(a),n.d(a,{assets:()=>p,contentTitle:()=>r,default:()=>u,frontMatter:()=>i,metadata:()=>s,toc:()=>l});var t=n(7462),o=(n(7294),n(3905));const i={},r="Lifespan Events",s={unversionedId:"guides/Guide_05_Lifespan_Handler",id:"version-0.7.1/guides/Guide_05_Lifespan_Handler",title:"Lifespan Events",description:"Did you know that you can define some special code that runs before and",source:"@site/versioned_docs/version-0.7.1/guides/Guide_05_Lifespan_Handler.md",sourceDirName:"guides",slug:"/guides/Guide_05_Lifespan_Handler",permalink:"/docs/0.7.1/guides/Guide_05_Lifespan_Handler",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Batch producing",permalink:"/docs/0.7.1/guides/Guide_23_Batch_Producing"},next:{title:"Encoding and Decoding Kafka Messages with FastKafka",permalink:"/docs/0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"}},p={},l=[{value:"Lifespan example - Iris prediction model",id:"lifespan-example---iris-prediction-model",level:2},{value:"Lifespan",id:"lifespan",level:3},{value:"Async context manager",id:"async-context-manager",level:3},{value:"App demo",id:"app-demo",level:2},{value:"FastKafka app",id:"fastkafka-app",level:3},{value:"Data modeling",id:"data-modeling",level:3},{value:"Consumers and producers",id:"consumers-and-producers",level:3},{value:"Final app",id:"final-app",level:3},{value:"Running the app",id:"running-the-app",level:3},{value:"Recap",id:"recap",level:2}],c={toc:l},d="wrapper";function u(e){let{components:a,...n}=e;return(0,o.kt)(d,(0,t.Z)({},c,n,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"lifespan-events"},"Lifespan Events"),(0,o.kt)("p",null,"Did you know that you can define some special code that runs before and\nafter your Kafka application? This code will be executed just once, but\nit covers the whole lifespan of your app! \ud83d\ude80"),(0,o.kt)("p",null,"Lets break it down:"),(0,o.kt)("p",null,"You can define logic (code) that should be executed before the\napplication starts up. This is like a warm-up for your app, getting it\nready to consume and produce messages."),(0,o.kt)("p",null,"Similarly, you can define logic (code) that should be executed when the\napplication is shutting down. This is like a cool-down for your app,\nmaking sure everything is properly closed and cleaned up."),(0,o.kt)("p",null,"By executing code before consuming and after producing, you cover the\nentire lifecycle of your application \ud83c\udf89"),(0,o.kt)("p",null,"This is super handy for setting up shared resources that are needed\nacross consumers and producers, like a database connection pool or a\nmachine learning model. And the best part? You can clean up these\nresources when the app is shutting down!"),(0,o.kt)("p",null,"So lets give it a try and see how it can make your Kafka app even more\nawesome! \ud83d\udcaa"),(0,o.kt)("h2",{id:"lifespan-example---iris-prediction-model"},"Lifespan example - Iris prediction model"),(0,o.kt)("p",null,"Let\u2019s dive into an example to see how you can leverage the lifecycle\nhandler to solve a common use case. Imagine that you have some machine\nlearning models that need to consume incoming messages and produce\nresponse/prediction messages. These models are shared among consumers\nand producers, which means you don\u2019t want to load them for every\nmessage."),(0,o.kt)("p",null,"Here\u2019s where the lifecycle handler comes to the rescue! By loading the\nmodel before the messages are consumed and produced, but only right\nbefore the application starts receiving messages, you can ensure that\nthe model is ready to use without compromising the performance of your\ntests. In the upcoming sections, we\u2019ll walk you through how to\ninitialize an Iris species prediction model and use it in your developed\napplication."),(0,o.kt)("h3",{id:"lifespan"},"Lifespan"),(0,o.kt)("p",null,"You can define this startup and shutdown logic using the lifespan\nparameter of the FastKafka app, and an async context manager."),(0,o.kt)("p",null,"Let\u2019s start with an example and then see it in detail."),(0,o.kt)("p",null,"We create an async function lifespan() with yield like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n print("Loading the model!")\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n yield\n # Clean up the ML models and release the resources\n \n print("Exiting, clearing model dict!")\n ml_models.clear()\n \n')),(0,o.kt)("p",null,"The first thing to notice, is that we are defining an async function\nwith ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),". This is very similar to Dependencies with ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),"."),(0,o.kt)("p",null,"The first part of the function, before the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),", will be executed\n",(0,o.kt)("strong",{parentName:"p"},"before")," the application starts. And the part after the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield")," will\nbe executed ",(0,o.kt)("strong",{parentName:"p"},"after")," the application has finished."),(0,o.kt)("p",null,"This lifespan will create an iris_prediction model on application\nstartup and cleanup the references after the app is shutdown."),(0,o.kt)("p",null,"The lifespan will be passed an KafkaApp reference on startup of your\napplication, which you can use to reference your application on startup."),(0,o.kt)("p",null,"For demonstration sake, we also added prints so that when running the\napp we can see that our lifespan was called."),(0,o.kt)("h3",{id:"async-context-manager"},"Async context manager"),(0,o.kt)("p",null,"Context managers can be used in ",(0,o.kt)("inlineCode",{parentName:"p"},"with")," blocks, our lifespan, for example\ncould be used like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"ml_models = {}\nasync with lifespan(None):\n print(ml_models)\n")),(0,o.kt)("p",null,"When you create a context manager or an async context manager, what it\ndoes is that, before entering the ",(0,o.kt)("inlineCode",{parentName:"p"},"with")," block, it will execute the code\nbefore the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),", and after exiting the ",(0,o.kt)("inlineCode",{parentName:"p"},"with")," block, it will execute\nthe code after the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),"."),(0,o.kt)("p",null,"If you want to learn more about context managers and contextlib\ndecorators, please visit ",(0,o.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/contextlib.html"},"Python official\ndocs")),(0,o.kt)("h2",{id:"app-demo"},"App demo"),(0,o.kt)("h3",{id:"fastkafka-app"},"FastKafka app"),(0,o.kt)("p",null,"Lets now create our application using the created lifespan handler."),(0,o.kt)("p",null,"Notice how we passed our lifespan handler to the app when constructing\nit trough the ",(0,o.kt)("inlineCode",{parentName:"p"},"lifespan")," argument."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local development kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n')),(0,o.kt)("h3",{id:"data-modeling"},"Data modeling"),(0,o.kt)("p",null,"Lets model the Iris data for our app:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,o.kt)("h3",{id:"consumers-and-producers"},"Consumers and producers"),(0,o.kt)("p",null,"Lets create a consumer and producer for our app that will generate\npredictions from input iris data."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h3",{id:"final-app"},"Final app"),(0,o.kt)("p",null,"The final app looks like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n print("Loading the model!")\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n yield\n # Clean up the ML models and release the resources\n \n print("Exiting, clearing model dict!")\n ml_models.clear()\n \nkafka_brokers = {\n "localhost": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local development kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h3",{id:"running-the-app"},"Running the app"),(0,o.kt)("p",null,"Now we can run the app with your custom lifespan handler. Copy the code\nabove in lifespan_example.py and run it by running"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app\n")),(0,o.kt)("p",null,"When you run the app, you should see a simmilar output to the one below:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[262292]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[262292]: Loading the model!\n[262292]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Entering...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting send_stream\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Finished.\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[262292]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[262292]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[262292]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[262292]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 262292...\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Entering...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Exiting send_stream\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Finished.\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Stoping producer...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Finished\n[262292]: Exiting, clearing model dict!\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 262292 terminated.\n")),(0,o.kt)("h2",{id:"recap"},"Recap"),(0,o.kt)("p",null,"In this guide we have defined a lifespan handler and passed to our\nFastKafka app."),(0,o.kt)("p",null,"Some important points are:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Lifespan handler is implemented as\n",(0,o.kt)("a",{parentName:"li",href:"https://docs.python.org/3/library/contextlib.html#contextlib.asynccontextmanager"},"AsyncContextManager")),(0,o.kt)("li",{parentName:"ol"},"Code ",(0,o.kt)("strong",{parentName:"li"},"before")," yield in lifespan will be executed ",(0,o.kt)("strong",{parentName:"li"},"before"),"\napplication ",(0,o.kt)("strong",{parentName:"li"},"startup")),(0,o.kt)("li",{parentName:"ol"},"Code ",(0,o.kt)("strong",{parentName:"li"},"after")," yield in lifespan will be executed ",(0,o.kt)("strong",{parentName:"li"},"after"),"\napplication ",(0,o.kt)("strong",{parentName:"li"},"shutdown")),(0,o.kt)("li",{parentName:"ol"},"You can pass your lifespan handler to FastKafka app on\ninitialisation by passing a ",(0,o.kt)("inlineCode",{parentName:"li"},"lifespan")," argument")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/65ab9689.c83f172b.js b/assets/js/65ab9689.c83f172b.js new file mode 100644 index 0000000..f1c83f3 --- /dev/null +++ b/assets/js/65ab9689.c83f172b.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7786],{3905:(e,n,a)=>{a.d(n,{Zo:()=>l,kt:()=>f});var t=a(7294);function i(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function s(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function o(e){for(var n=1;n<arguments.length;n++){var a=null!=arguments[n]?arguments[n]:{};n%2?s(Object(a),!0).forEach((function(n){i(e,n,a[n])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):s(Object(a)).forEach((function(n){Object.defineProperty(e,n,Object.getOwnPropertyDescriptor(a,n))}))}return e}function r(e,n){if(null==e)return{};var a,t,i=function(e,n){if(null==e)return{};var a,t,i={},s=Object.keys(e);for(t=0;t<s.length;t++)a=s[t],n.indexOf(a)>=0||(i[a]=e[a]);return i}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(t=0;t<s.length;t++)a=s[t],n.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var d=t.createContext({}),c=function(e){var n=t.useContext(d),a=n;return e&&(a="function"==typeof e?e(n):o(o({},n),e)),a},l=function(e){var n=c(e.components);return t.createElement(d.Provider,{value:n},e.children)},p="mdxType",m={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},u=t.forwardRef((function(e,n){var a=e.components,i=e.mdxType,s=e.originalType,d=e.parentName,l=r(e,["components","mdxType","originalType","parentName"]),p=c(a),u=i,f=p["".concat(d,".").concat(u)]||p[u]||m[u]||s;return a?t.createElement(f,o(o({ref:n},l),{},{components:a})):t.createElement(f,o({ref:n},l))}));function f(e,n){var a=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var s=a.length,o=new Array(s);o[0]=u;var r={};for(var d in n)hasOwnProperty.call(n,d)&&(r[d]=n[d]);r.originalType=e,r[p]="string"==typeof e?e:i,o[1]=r;for(var c=2;c<s;c++)o[c]=a[c];return t.createElement.apply(null,o)}return t.createElement.apply(null,a)}u.displayName="MDXCreateElement"},4744:(e,n,a)=>{a.r(n),a.d(n,{assets:()=>d,contentTitle:()=>o,default:()=>m,frontMatter:()=>s,metadata:()=>r,toc:()=>c});var t=a(7462),i=(a(7294),a(3905));const s={},o="Encoding and Decoding Kafka Messages with FastKafka",r={unversionedId:"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",id:"version-0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",title:"Encoding and Decoding Kafka Messages with FastKafka",description:"Prerequisites",source:"@site/versioned_docs/version-0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",sourceDirName:"guides",slug:"/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",permalink:"/docs/0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Lifespan Events",permalink:"/docs/0.7.1/guides/Guide_05_Lifespan_Handler"},next:{title:"Using multiple Kafka clusters",permalink:"/docs/0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters"}},d={},c=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Ways to Encode and Decode Messages with FastKafka",id:"ways-to-encode-and-decode-messages-with-fastkafka",level:2},{value:"1. Json encoder and decoder",id:"1-json-encoder-and-decoder",level:2},{value:"2. Avro encoder and decoder",id:"2-avro-encoder-and-decoder",level:2},{value:"What is Avro?",id:"what-is-avro",level:3},{value:"Installing FastKafka with Avro dependencies",id:"installing-fastkafka-with-avro-dependencies",level:3},{value:"Defining Avro Schema Using Pydantic Models",id:"defining-avro-schema-using-pydantic-models",level:3},{value:"Reusing existing avro schema",id:"reusing-existing-avro-schema",level:3},{value:"Building pydantic models from avro schema dictionary",id:"building-pydantic-models-from-avro-schema-dictionary",level:4},{value:"Building pydantic models from <code>.avsc</code> file",id:"building-pydantic-models-from-avsc-file",level:4},{value:"Consume/Produce avro messages with FastKafka",id:"consumeproduce-avro-messages-with-fastkafka",level:3},{value:"Assembling it all together",id:"assembling-it-all-together",level:3},{value:"3. Custom encoder and decoder",id:"3-custom-encoder-and-decoder",level:2},{value:"Writing a custom encoder and decoder",id:"writing-a-custom-encoder-and-decoder",level:3},{value:"Assembling it all together",id:"assembling-it-all-together-1",level:3}],l={toc:c},p="wrapper";function m(e){let{components:n,...a}=e;return(0,i.kt)(p,(0,t.Z)({},l,a,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"encoding-and-decoding-kafka-messages-with-fastkafka"},"Encoding and Decoding Kafka Messages with FastKafka"),(0,i.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A basic knowledge of\n",(0,i.kt)("a",{parentName:"li",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nis needed to proceed with this guide. If you are not familiar with\n",(0,i.kt)("a",{parentName:"li",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),",\nplease go through the ",(0,i.kt)("a",{parentName:"li",href:"/docs#tutorial"},"tutorial")," first."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("a",{parentName:"li",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith its dependencies installed is needed. Please install\n",(0,i.kt)("a",{parentName:"li",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nusing the command - ",(0,i.kt)("inlineCode",{parentName:"li"},"pip install fastkafka"))),(0,i.kt)("h2",{id:"ways-to-encode-and-decode-messages-with-fastkafka"},"Ways to Encode and Decode Messages with FastKafka"),(0,i.kt)("p",null,"In python, by default, we send Kafka messages as bytes. Even if our\nmessage is a string, we convert it to bytes and then send it to Kafka\ntopic. imilarly, while consuming messages, we consume them as bytes and\nthen convert them to strings."),(0,i.kt)("p",null,"In FastKafka, we specify message schema using Pydantic models as\nmentioned in ",(0,i.kt)("a",{parentName:"p",href:"/docs#messages"},"tutorial"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# Define Pydantic models for Kafka messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("p",null,"Then, we send and receive messages as instances of Pydantic models which\nwe defined. So, FastKafka needs a way to encode/decode to these Pydantic\nmodel messages to bytes in order to send/receive messages to/from Kafka\ntopics."),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods of FastKafka accept a parameter\ncalled ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," to decode/encode Kafka messages. FastKafka\nprovides three ways to encode and decode messages:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"json - This is the default encoder/decoder option in FastKafka.\nWhile producing, this option converts our instance of Pydantic model\nmessages to a JSON string and then converts it to bytes before\nsending it to the topic. While consuming, it converts bytes to a\nJSON string and then constructs an instance of Pydantic model from\nthe JSON string."),(0,i.kt)("li",{parentName:"ol"},"avro - This option uses Avro encoding/decoding to convert instances\nof Pydantic model messages to bytes while producing, and while\nconsuming, it constructs an instance of Pydantic model from bytes."),(0,i.kt)("li",{parentName:"ol"},"custom encoder/decoder - If you are not happy with the json or avro\nencoder/decoder options, you can write your own encoder/decoder\nfunctions and use them to encode/decode Pydantic messages.")),(0,i.kt)("h2",{id:"1-json-encoder-and-decoder"},"1. Json encoder and decoder"),(0,i.kt)("p",null,"The default option in FastKafka is json encoder/decoder. This option,\nwhile producing, converts our instance of pydantic model messages to\njson string and then converts to bytes before sending it to the topics.\nWhile consuming it converts bytes to json string and then constructs\ninstance of pydantic model from json string."),(0,i.kt)("p",null,"We can use the application from ",(0,i.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," as\nis, and it will use the json encoder/decoder by default. But, for\nclarity, let\u2019s modify it to explicitly accept the \u2018json\u2019 encoder/decoder\nparameter:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder="json")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder="json")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"In the above code, the ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),' decorator sets up a\nconsumer for the \u201cinput_data" topic, using the \u2018json\u2019 decoder to convert\nthe message payload to an instance of ',(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData"),". The\n",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces"),' decorator sets up a producer for the \u201cpredictions"\ntopic, using the \u2018json\u2019 encoder to convert the instance of\n',(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," to message payload."),(0,i.kt)("h2",{id:"2-avro-encoder-and-decoder"},"2. Avro encoder and decoder"),(0,i.kt)("h3",{id:"what-is-avro"},"What is Avro?"),(0,i.kt)("p",null,"Avro is a row-oriented remote procedure call and data serialization\nframework developed within Apache\u2019s Hadoop project. It uses JSON for\ndefining data types and protocols, and serializes data in a compact\nbinary format. To learn more about the Apache Avro, please check out the\n",(0,i.kt)("a",{parentName:"p",href:"https://avro.apache.org/docs/"},"docs"),"."),(0,i.kt)("h3",{id:"installing-fastkafka-with-avro-dependencies"},"Installing FastKafka with Avro dependencies"),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith dependencies for Apache Avro installed is needed to use avro\nencoder/decoder. Please install\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith Avro support using the command - ",(0,i.kt)("inlineCode",{parentName:"p"},"pip install fastkafka[avro]")),(0,i.kt)("h3",{id:"defining-avro-schema-using-pydantic-models"},"Defining Avro Schema Using Pydantic Models"),(0,i.kt)("p",null,"By default, you can use Pydantic model to define your message schemas.\nFastKafka internally takes care of encoding and decoding avro messages,\nbased on the Pydantic models."),(0,i.kt)("p",null,"So, similar to the ",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),", the message schema will\nremain as it is."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# Define Pydantic models for Avro messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("p",null,"No need to change anything to support avro. You can use existing\nPydantic models as is."),(0,i.kt)("h3",{id:"reusing-existing-avro-schema"},"Reusing existing avro schema"),(0,i.kt)("p",null,"If you are using some other library to send and receive avro encoded\nmessages, it is highly likely that you already have an Avro schema\ndefined."),(0,i.kt)("h4",{id:"building-pydantic-models-from-avro-schema-dictionary"},"Building pydantic models from avro schema dictionary"),(0,i.kt)("p",null,"Let\u2019s modify the above example and let\u2019s assume we have schemas already\nfor ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," which will look like below:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'iris_input_data_schema = {\n "type": "record",\n "namespace": "IrisInputData",\n "name": "IrisInputData",\n "fields": [\n {"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},\n {"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},\n {"doc": "Petal length in cm", "type": "double", "name": "petal_length"},\n {"doc": "Petal width in cm", "type": "double", "name": "petal_width"},\n ],\n}\niris_prediction_schema = {\n "type": "record",\n "namespace": "IrisPrediction",\n "name": "IrisPrediction",\n "fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],\n}\n')),(0,i.kt)("p",null,"We can easily construct pydantic models from avro schema using\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic/#fastkafka.encoder.avsc_to_pydantic"},(0,i.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction which is included as part of\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nitself."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n")),(0,i.kt)("p",null,"The above code will convert avro schema to pydantic models and will\nprint pydantic models\u2019 fields. The output of the above is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"{'sepal_length': ModelField(name='sepal_length', type=float, required=True),\n 'sepal_width': ModelField(name='sepal_width', type=float, required=True),\n 'petal_length': ModelField(name='petal_length', type=float, required=True),\n 'petal_width': ModelField(name='petal_width', type=float, required=True)}\n \n {'species': ModelField(name='species', type=str, required=True)}\n")),(0,i.kt)("p",null,"This is exactly same as manually defining the pydantic models ourselves.\nYou don\u2019t have to worry about not making any mistakes while converting\navro schema to pydantic models manually. You can easily and\nautomatically accomplish it by using\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic/#fastkafka.encoder.avsc_to_pydantic"},(0,i.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction as demonstrated above."),(0,i.kt)("h4",{id:"building-pydantic-models-from-avsc-file"},"Building pydantic models from ",(0,i.kt)("inlineCode",{parentName:"h4"},".avsc")," file"),(0,i.kt)("p",null,"Not all cases will have avro schema conveniently defined as a python\ndictionary. You may have it stored as the proprietary ",(0,i.kt)("inlineCode",{parentName:"p"},".avsc")," files in\nfilesystem. Let\u2019s see how to convert those ",(0,i.kt)("inlineCode",{parentName:"p"},".avsc")," files to pydantic\nmodels."),(0,i.kt)("p",null,"Let\u2019s assume our avro files are stored in files called\n",(0,i.kt)("inlineCode",{parentName:"p"},"iris_input_data_schema.avsc")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"iris_prediction_schema.avsc"),". In that\ncase, following code converts the schema to pydantic models:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'import json\nfrom fastkafka.encoder import avsc_to_pydantic\n\n\nwith open("iris_input_data_schema.avsc", "rb") as f:\n iris_input_data_schema = json.load(f)\n \nwith open("iris_prediction_schema.avsc", "rb") as f:\n iris_prediction_schema = json.load(f)\n \n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n')),(0,i.kt)("h3",{id:"consumeproduce-avro-messages-with-fastkafka"},"Consume/Produce avro messages with FastKafka"),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nprovides ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods to consume/produces\nmessages to/from a ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," topic. This is explained in\n",(0,i.kt)("a",{parentName:"p",href:"/docs#function-decorators"},"tutorial"),"."),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods accepts a parameter called\n",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," to decode/encode avro messages."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", encoder="avro")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", decoder="avro")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"In the above example, in ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods, we\nexplicitly instruct FastKafka to ",(0,i.kt)("inlineCode",{parentName:"p"},"decode")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"encode")," messages using\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"avro")," ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," instead of the default ",(0,i.kt)("inlineCode",{parentName:"p"},"json"),"\n",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder"),"."),(0,i.kt)("h3",{id:"assembling-it-all-together"},"Assembling it all together"),(0,i.kt)("p",null,"Let\u2019s rewrite the sample code found in\n",(0,i.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," to use ",(0,i.kt)("inlineCode",{parentName:"p"},"avro")," to ",(0,i.kt)("inlineCode",{parentName:"p"},"decode")," and\n",(0,i.kt)("inlineCode",{parentName:"p"},"encode")," messages:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\niris_input_data_schema = {\n "type": "record",\n "namespace": "IrisInputData",\n "name": "IrisInputData",\n "fields": [\n {"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},\n {"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},\n {"doc": "Petal length in cm", "type": "double", "name": "petal_length"},\n {"doc": "Petal width in cm", "type": "double", "name": "petal_width"},\n ],\n}\niris_prediction_schema = {\n "type": "record",\n "namespace": "IrisPrediction",\n "name": "IrisPrediction",\n "fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],\n}\n# Or load schema from avsc files\n\nfrom fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\n\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder="avro")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder="avro")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"The above code is a sample implementation of using FastKafka to consume\nand produce Avro-encoded messages from/to a Kafka topic. The code\ndefines two Avro schemas for the input data and the prediction result.\nIt then uses the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic/#fastkafka.encoder.avsc_to_pydantic"},(0,i.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction from the FastKafka library to convert the Avro schema into\nPydantic models, which will be used to decode and encode Avro messages."),(0,i.kt)("p",null,"The\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is then instantiated with the broker details, and two functions\ndecorated with ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces"),' are\ndefined to consume messages from the \u201cinput_data" topic and produce\nmessages to the \u201cpredictions" topic, respectively. The functions uses\nthe decoder=\u201cavro" and encoder=\u201cavro" parameters to decode and encode\nthe Avro messages.'),(0,i.kt)("p",null,"In summary, the above code demonstrates a straightforward way to use\nAvro-encoded messages with FastKafka to build a message processing\npipeline."),(0,i.kt)("h2",{id:"3-custom-encoder-and-decoder"},"3. Custom encoder and decoder"),(0,i.kt)("p",null,"If you are not happy with the json or avro encoder/decoder options, you\ncan write your own encoder/decoder functions and use them to\nencode/decode Pydantic messages."),(0,i.kt)("h3",{id:"writing-a-custom-encoder-and-decoder"},"Writing a custom encoder and decoder"),(0,i.kt)("p",null,"In this section, let\u2019s see how to write a custom encoder and decoder\nwhich obfuscates kafka message with simple\n",(0,i.kt)("a",{parentName:"p",href:"https://en.wikipedia.org/wiki/ROT13"},"ROT13")," cipher."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},"import codecs\nimport json\nfrom typing import Any\n\nfrom pydantic.main import ModelMetaclass\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n msg_str = msg.json()\n obfuscated = codecs.encode(msg_str, 'rot13')\n raw_bytes = obfuscated.encode(\"utf-8\")\n return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n obfuscated = raw_msg.decode(\"utf-8\")\n msg_str = codecs.decode(obfuscated, 'rot13')\n msg_dict = json.loads(msg_str)\n return cls(**msg_dict)\n")),(0,i.kt)("p",null,"The above code defines two custom functions for encoding and decoding\nmessages in a Kafka application using the FastKafka library."),(0,i.kt)("p",null,"The encoding function, ",(0,i.kt)("inlineCode",{parentName:"p"},"custom_encoder()"),", takes a message ",(0,i.kt)("inlineCode",{parentName:"p"},"msg")," which\nis an instance of a Pydantic model, converts it to a JSON string using\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"json()")," method, obfuscates the resulting string using the ROT13\nalgorithm from the ",(0,i.kt)("inlineCode",{parentName:"p"},"codecs")," module, and finally encodes the obfuscated\nstring as raw bytes using the UTF-8 encoding."),(0,i.kt)("p",null,"The decoding function, ",(0,i.kt)("inlineCode",{parentName:"p"},"custom_decoder()"),", takes a raw message ",(0,i.kt)("inlineCode",{parentName:"p"},"raw_msg"),"\nin bytes format, a Pydantic class to construct instance with cls\nparameter. It first decodes the raw message from UTF-8 encoding, then\nuses the ROT13 algorithm to de-obfuscate the string. Finally, it loads\nthe resulting JSON string using the ",(0,i.kt)("inlineCode",{parentName:"p"},"json.loads()")," method and returns a\nnew instance of the specified ",(0,i.kt)("inlineCode",{parentName:"p"},"cls")," class initialized with the decoded\ndictionary."),(0,i.kt)("p",null,"These functions can be used with FastKafka\u2019s ",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"\nparameters to customize the serialization and deserialization of\nmessages in Kafka topics."),(0,i.kt)("p",null,"Let\u2019s test the above code"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},"i = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n\nencoded = custom_encoder(i)\ndisplay(encoded)\n\ndecoded = custom_decoder(encoded, IrisInputData)\ndisplay(decoded)\n")),(0,i.kt)("p",null,"This will result in following output"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},'b\'{"frcny_yratgu": 0.5, "frcny_jvqgu": 0.5, "crgny_yratgu": 0.5, "crgny_jvqgu": 0.5}\'\n\nIrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n')),(0,i.kt)("h3",{id:"assembling-it-all-together-1"},"Assembling it all together"),(0,i.kt)("p",null,"Let\u2019s rewrite the sample code found in\n",(0,i.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," to use our custom decoder and\nencoder functions:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\n\nimport codecs\nimport json\nfrom typing import Any\n\nfrom pydantic.main import ModelMetaclass\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n msg_str = msg.json()\n obfuscated = codecs.encode(msg_str, \'rot13\')\n raw_bytes = obfuscated.encode("utf-8")\n return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n obfuscated = raw_msg.decode("utf-8")\n msg_str = codecs.decode(obfuscated, \'rot13\')\n msg_dict = json.loads(msg_str)\n return cls(**msg_dict)\n\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder=custom_decoder)\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder=custom_encoder)\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"This code defines a custom encoder and decoder functions for encoding\nand decoding messages sent through a Kafka messaging system."),(0,i.kt)("p",null,"The custom ",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," function takes a message represented as a\n",(0,i.kt)("inlineCode",{parentName:"p"},"BaseModel")," and encodes it as bytes by first converting it to a JSON\nstring and then obfuscating it using the ROT13 encoding. The obfuscated\nmessage is then converted to bytes using UTF-8 encoding and returned."),(0,i.kt)("p",null,"The custom ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder")," function takes in the bytes representing an\nobfuscated message, decodes it using UTF-8 encoding, then decodes the\nROT13 obfuscation, and finally loads it as a dictionary using the ",(0,i.kt)("inlineCode",{parentName:"p"},"json"),"\nmodule. This dictionary is then converted to a ",(0,i.kt)("inlineCode",{parentName:"p"},"BaseModel")," instance\nusing the cls parameter."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/6780.b8374b3c.js b/assets/js/6780.b8374b3c.js new file mode 100644 index 0000000..2e9b904 --- /dev/null +++ b/assets/js/6780.b8374b3c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6780],{6356:(e,t,r)=>{function n(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t,r){var a,c=t.initialState;return{getState:function(){return c},dispatch:function(a,i){var l=function(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?n(Object(r),!0).forEach((function(t){o(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):n(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}({},c);c=e(c,{type:a,props:t,payload:i}),r({state:c,prevState:l})},pendingRequests:(a=[],{add:function(e){return a.push(e),e.finally((function(){a=a.filter((function(t){return t!==e}))}))},cancelAll:function(){a.forEach((function(e){return e.cancel()}))},isEmpty:function(){return 0===a.length}})}}function c(e){return e.reduce((function(e,t){return e.concat(t)}),[])}function i(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function l(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?i(Object(r),!0).forEach((function(t){s(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):i(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function s(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function u(e){return 0===e.collections.length?0:e.collections.reduce((function(e,t){return e+t.items.length}),0)}r.r(t),r.d(t,{DocSearchModal:()=>wr});var f=0;var m=function(){};function p(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function d(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function h(e,t){var r=[];return Promise.resolve(e(t)).then((function(e){return Array.isArray(e),Promise.all(e.filter((function(e){return Boolean(e)})).map((function(e){if(e.sourceId,r.includes(e.sourceId))throw new Error("[Autocomplete] The `sourceId` ".concat(JSON.stringify(e.sourceId)," is not unique."));r.push(e.sourceId);var t=function(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?p(Object(r),!0).forEach((function(t){d(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):p(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}({getItemInputValue:function(e){return e.state.query},getItemUrl:function(){},onSelect:function(e){(0,e.setIsOpen)(!1)},onActive:m},e);return Promise.resolve(t)})))}))}function v(e){return function(e){if(Array.isArray(e))return y(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||function(e,t){if(!e)return;if("string"==typeof e)return y(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);"Object"===r&&e.constructor&&(r=e.constructor.name);if("Map"===r||"Set"===r)return Array.from(e);if("Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r))return y(e,t)}(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function y(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,n=new Array(t);r<t;r++)n[r]=e[r];return n}function g(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function b(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?g(Object(r),!0).forEach((function(t){O(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):g(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function O(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function S(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function E(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?S(Object(r),!0).forEach((function(t){j(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):S(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function j(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function w(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function P(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?w(Object(r),!0).forEach((function(t){I(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):w(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function I(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function D(e){return function(e){if(Array.isArray(e))return k(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||function(e,t){if(!e)return;if("string"==typeof e)return k(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);"Object"===r&&e.constructor&&(r=e.constructor.name);if("Map"===r||"Set"===r)return Array.from(e);if("Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r))return k(e,t)}(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function k(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,n=new Array(t);r<t;r++)n[r]=e[r];return n}function C(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function A(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?C(Object(r),!0).forEach((function(t){x(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):C(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function x(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function N(e){return Boolean(e.execute)}function R(e,t){return r=e,Boolean(null==r?void 0:r.execute)?A(A({},e),{},{requests:e.queries.map((function(r){return{query:r,sourceId:t,transformResponse:e.transformResponse}}))}):{items:e,sourceId:t};var r}function q(e){var t=e.reduce((function(e,t){if(!N(t))return e.push(t),e;var r=t.searchClient,n=t.execute,o=t.requesterId,a=t.requests,c=e.find((function(e){return N(t)&&N(e)&&e.searchClient===r&&Boolean(o)&&e.requesterId===o}));if(c){var i;(i=c.items).push.apply(i,D(a))}else{var l={execute:n,requesterId:o,items:a,searchClient:r};e.push(l)}return e}),[]).map((function(e){if(!N(e))return Promise.resolve(e);var t=e,r=t.execute,n=t.items;return r({searchClient:t.searchClient,requests:n})}));return Promise.all(t).then((function(e){return c(e)}))}function T(e,t){return t.map((function(t){var r=e.filter((function(e){return e.sourceId===t.sourceId})),n=r.map((function(e){return e.items})),o=r[0].transformResponse,a=o?o(function(e){var t=e.map((function(e){var t;return P(P({},e),{},{hits:null===(t=e.hits)||void 0===t?void 0:t.map((function(t){return P(P({},t),{},{__autocomplete_indexName:e.index,__autocomplete_queryID:e.queryID})}))})}));return{results:t,hits:t.map((function(e){return e.hits})).filter(Boolean),facetHits:t.map((function(e){var t;return null===(t=e.facetHits)||void 0===t?void 0:t.map((function(e){return{label:e.value,count:e.count,_highlightResult:{label:{value:e.highlighted}}}}))})).filter(Boolean)}}(n)):n;return Array.isArray(a),a.every(Boolean),'The `getItems` function from source "'.concat(t.sourceId,'" must return an array of items but returned ').concat(JSON.stringify(void 0),".\n\nDid you forget to return items?\n\nSee: https://www.algolia.com/doc/ui-libraries/autocomplete/core-concepts/sources/#param-getitems"),{source:t,items:a}}))}function _(e,t){var r=t;return{then:function(t,n){return _(e.then(H(t,r,e),H(n,r,e)),r)},catch:function(t){return _(e.catch(H(t,r,e)),r)},finally:function(t){return t&&r.onCancelList.push(t),_(e.finally(H(t&&function(){return r.onCancelList=[],t()},r,e)),r)},cancel:function(){r.isCanceled=!0;var e=r.onCancelList;r.onCancelList=[],e.forEach((function(e){e()}))},isCanceled:function(){return!0===r.isCanceled}}}function L(e){return _(new Promise((function(t,r){return e(t,r)})),{isCanceled:!1,onCancelList:[]})}function M(e){return _(e,{isCanceled:!1,onCancelList:[]})}function H(e,t,r){return e?function(r){return t.isCanceled?r:e(r)}:r}function F(e){var t=function(e){var t=e.collections.map((function(e){return e.items.length})).reduce((function(e,t,r){var n=(e[r-1]||0)+t;return e.push(n),e}),[]).reduce((function(t,r){return r<=e.activeItemId?t+1:t}),0);return e.collections[t]}(e);if(!t)return null;var r=t.items[function(e){for(var t=e.state,r=e.collection,n=!1,o=0,a=0;!1===n;){var c=t.collections[o];if(c===r){n=!0;break}a+=c.items.length,o++}return t.activeItemId-a}({state:e,collection:t})],n=t.source;return{item:r,itemInputValue:n.getItemInputValue({item:r,state:e}),itemUrl:n.getItemUrl({item:r,state:e}),source:n}}L.resolve=function(e){return M(Promise.resolve(e))},L.reject=function(e){return M(Promise.reject(e))};var U=["event","nextState","props","query","refresh","store"];function B(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function V(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?B(Object(r),!0).forEach((function(t){K(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):B(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function K(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function $(e,t){if(null==e)return{};var r,n,o=function(e,t){if(null==e)return{};var r,n,o={},a=Object.keys(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var J,z,W,Q=null,Z=(J=-1,z=-1,W=void 0,function(e){var t=++J;return Promise.resolve(e).then((function(e){return W&&t<z?W:(z=t,W=e,e)}))});function G(e){var t=e.event,r=e.nextState,n=void 0===r?{}:r,o=e.props,a=e.query,i=e.refresh,l=e.store,s=$(e,U);Q&&o.environment.clearTimeout(Q);var u=s.setCollections,f=s.setIsOpen,m=s.setQuery,p=s.setActiveItemId,d=s.setStatus;if(m(a),p(o.defaultActiveItemId),!a&&!1===o.openOnFocus){var h,v=l.getState().collections.map((function(e){return V(V({},e),{},{items:[]})}));d("idle"),u(v),f(null!==(h=n.isOpen)&&void 0!==h?h:o.shouldPanelOpen({state:l.getState()}));var y=M(Z(v).then((function(){return Promise.resolve()})));return l.pendingRequests.add(y)}d("loading"),Q=o.environment.setTimeout((function(){d("stalled")}),o.stallThreshold);var g=M(Z(o.getSources(V({query:a,refresh:i,state:l.getState()},s)).then((function(e){return Promise.all(e.map((function(e){return Promise.resolve(e.getItems(V({query:a,refresh:i,state:l.getState()},s))).then((function(t){return R(t,e.sourceId)}))}))).then(q).then((function(t){return T(t,e)})).then((function(e){return function(e){var t=e.collections,r=e.props,n=e.state,o=t.reduce((function(e,t){return E(E({},e),{},j({},t.source.sourceId,E(E({},t.source),{},{getItems:function(){return c(t.items)}})))}),{});return c(r.reshape({sources:Object.values(o),sourcesBySourceId:o,state:n})).filter(Boolean).map((function(e){return{source:e,items:e.getItems()}}))}({collections:e,props:o,state:l.getState()})}))})))).then((function(e){var r;d("idle"),u(e);var c=o.shouldPanelOpen({state:l.getState()});f(null!==(r=n.isOpen)&&void 0!==r?r:o.openOnFocus&&!a&&c||c);var m=F(l.getState());if(null!==l.getState().activeItemId&&m){var p=m.item,h=m.itemInputValue,v=m.itemUrl,y=m.source;y.onActive(V({event:t,item:p,itemInputValue:h,itemUrl:v,refresh:i,source:y,state:l.getState()},s))}})).finally((function(){d("idle"),Q&&o.environment.clearTimeout(Q)}));return l.pendingRequests.add(g)}var Y=["event","props","refresh","store"];function X(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function ee(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?X(Object(r),!0).forEach((function(t){te(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):X(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function te(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function re(e,t){if(null==e)return{};var r,n,o=function(e,t){if(null==e)return{};var r,n,o={},a=Object.keys(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var ne=/((gt|sm)-|galaxy nexus)|samsung[- ]/i;var oe=["props","refresh","store"],ae=["inputElement","formElement","panelElement"],ce=["inputElement"],ie=["inputElement","maxLength"],le=["item","source"];function se(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function ue(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?se(Object(r),!0).forEach((function(t){fe(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):se(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function fe(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function me(e,t){if(null==e)return{};var r,n,o=function(e,t){if(null==e)return{};var r,n,o={},a=Object.keys(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}function pe(e){var t=e.props,r=e.refresh,n=e.store,o=me(e,oe);return{getEnvironmentProps:function(e){var r=e.inputElement,o=e.formElement,a=e.panelElement;function c(e){!n.getState().isOpen&&n.pendingRequests.isEmpty()||e.target===r||!1===[o,a].some((function(t){return r=t,n=e.target,r===n||r.contains(n);var r,n}))&&(n.dispatch("blur",null),t.debug||n.pendingRequests.cancelAll())}return ue({onTouchStart:c,onMouseDown:c,onTouchMove:function(e){!1!==n.getState().isOpen&&r===t.environment.document.activeElement&&e.target!==r&&r.blur()}},me(e,ae))},getRootProps:function(e){return ue({role:"combobox","aria-expanded":n.getState().isOpen,"aria-haspopup":"listbox","aria-owns":n.getState().isOpen?"".concat(t.id,"-list"):void 0,"aria-labelledby":"".concat(t.id,"-label")},e)},getFormProps:function(e){e.inputElement;return ue({action:"",noValidate:!0,role:"search",onSubmit:function(a){var c;a.preventDefault(),t.onSubmit(ue({event:a,refresh:r,state:n.getState()},o)),n.dispatch("submit",null),null===(c=e.inputElement)||void 0===c||c.blur()},onReset:function(a){var c;a.preventDefault(),t.onReset(ue({event:a,refresh:r,state:n.getState()},o)),n.dispatch("reset",null),null===(c=e.inputElement)||void 0===c||c.focus()}},me(e,ce))},getLabelProps:function(e){return ue({htmlFor:"".concat(t.id,"-input"),id:"".concat(t.id,"-label")},e)},getInputProps:function(e){var a;function c(e){(t.openOnFocus||Boolean(n.getState().query))&&G(ue({event:e,props:t,query:n.getState().completion||n.getState().query,refresh:r,store:n},o)),n.dispatch("focus",null)}var i=e||{},l=(i.inputElement,i.maxLength),s=void 0===l?512:l,u=me(i,ie),f=F(n.getState()),p=function(e){return Boolean(e&&e.match(ne))}((null===(a=t.environment.navigator)||void 0===a?void 0:a.userAgent)||""),d=null!=f&&f.itemUrl&&!p?"go":"search";return ue({"aria-autocomplete":"both","aria-activedescendant":n.getState().isOpen&&null!==n.getState().activeItemId?"".concat(t.id,"-item-").concat(n.getState().activeItemId):void 0,"aria-controls":n.getState().isOpen?"".concat(t.id,"-list"):void 0,"aria-labelledby":"".concat(t.id,"-label"),value:n.getState().completion||n.getState().query,id:"".concat(t.id,"-input"),autoComplete:"off",autoCorrect:"off",autoCapitalize:"off",enterKeyHint:d,spellCheck:"false",autoFocus:t.autoFocus,placeholder:t.placeholder,maxLength:s,type:"search",onChange:function(e){G(ue({event:e,props:t,query:e.currentTarget.value.slice(0,s),refresh:r,store:n},o))},onKeyDown:function(e){!function(e){var t=e.event,r=e.props,n=e.refresh,o=e.store,a=re(e,Y);if("ArrowUp"===t.key||"ArrowDown"===t.key){var c=function(){var e=r.environment.document.getElementById("".concat(r.id,"-item-").concat(o.getState().activeItemId));e&&(e.scrollIntoViewIfNeeded?e.scrollIntoViewIfNeeded(!1):e.scrollIntoView(!1))},i=function(){var e=F(o.getState());if(null!==o.getState().activeItemId&&e){var r=e.item,c=e.itemInputValue,i=e.itemUrl,l=e.source;l.onActive(ee({event:t,item:r,itemInputValue:c,itemUrl:i,refresh:n,source:l,state:o.getState()},a))}};t.preventDefault(),!1===o.getState().isOpen&&(r.openOnFocus||Boolean(o.getState().query))?G(ee({event:t,props:r,query:o.getState().query,refresh:n,store:o},a)).then((function(){o.dispatch(t.key,{nextActiveItemId:r.defaultActiveItemId}),i(),setTimeout(c,0)})):(o.dispatch(t.key,{}),i(),c())}else if("Escape"===t.key)t.preventDefault(),o.dispatch(t.key,null),o.pendingRequests.cancelAll();else if("Tab"===t.key)o.dispatch("blur",null),o.pendingRequests.cancelAll();else if("Enter"===t.key){if(null===o.getState().activeItemId||o.getState().collections.every((function(e){return 0===e.items.length})))return void(r.debug||o.pendingRequests.cancelAll());t.preventDefault();var l=F(o.getState()),s=l.item,u=l.itemInputValue,f=l.itemUrl,m=l.source;if(t.metaKey||t.ctrlKey)void 0!==f&&(m.onSelect(ee({event:t,item:s,itemInputValue:u,itemUrl:f,refresh:n,source:m,state:o.getState()},a)),r.navigator.navigateNewTab({itemUrl:f,item:s,state:o.getState()}));else if(t.shiftKey)void 0!==f&&(m.onSelect(ee({event:t,item:s,itemInputValue:u,itemUrl:f,refresh:n,source:m,state:o.getState()},a)),r.navigator.navigateNewWindow({itemUrl:f,item:s,state:o.getState()}));else if(t.altKey);else{if(void 0!==f)return m.onSelect(ee({event:t,item:s,itemInputValue:u,itemUrl:f,refresh:n,source:m,state:o.getState()},a)),void r.navigator.navigate({itemUrl:f,item:s,state:o.getState()});G(ee({event:t,nextState:{isOpen:!1},props:r,query:u,refresh:n,store:o},a)).then((function(){m.onSelect(ee({event:t,item:s,itemInputValue:u,itemUrl:f,refresh:n,source:m,state:o.getState()},a))}))}}}(ue({event:e,props:t,refresh:r,store:n},o))},onFocus:c,onBlur:m,onClick:function(r){e.inputElement!==t.environment.document.activeElement||n.getState().isOpen||c(r)}},u)},getPanelProps:function(e){return ue({onMouseDown:function(e){e.preventDefault()},onMouseLeave:function(){n.dispatch("mouseleave",null)}},e)},getListProps:function(e){return ue({role:"listbox","aria-labelledby":"".concat(t.id,"-label"),id:"".concat(t.id,"-list")},e)},getItemProps:function(e){var a=e.item,c=e.source,i=me(e,le);return ue({id:"".concat(t.id,"-item-").concat(a.__autocomplete_id),role:"option","aria-selected":n.getState().activeItemId===a.__autocomplete_id,onMouseMove:function(e){if(a.__autocomplete_id!==n.getState().activeItemId){n.dispatch("mousemove",a.__autocomplete_id);var t=F(n.getState());if(null!==n.getState().activeItemId&&t){var c=t.item,i=t.itemInputValue,l=t.itemUrl,s=t.source;s.onActive(ue({event:e,item:c,itemInputValue:i,itemUrl:l,refresh:r,source:s,state:n.getState()},o))}}},onMouseDown:function(e){e.preventDefault()},onClick:function(e){var i=c.getItemInputValue({item:a,state:n.getState()}),l=c.getItemUrl({item:a,state:n.getState()});(l?Promise.resolve():G(ue({event:e,nextState:{isOpen:!1},props:t,query:i,refresh:r,store:n},o))).then((function(){c.onSelect(ue({event:e,item:a,itemInputValue:i,itemUrl:l,refresh:r,source:c,state:n.getState()},o))}))}},i)}}}var de=[{segment:"autocomplete-core",version:"1.7.4"}];function he(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function ve(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?he(Object(r),!0).forEach((function(t){ye(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):he(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function ye(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function ge(e){var t,r,n,o,a=e.plugins,c=e.options,i=null===(t=((null===(r=c.__autocomplete_metadata)||void 0===r?void 0:r.userAgents)||[])[0])||void 0===t?void 0:t.segment,l=i?ye({},i,Object.keys((null===(n=c.__autocomplete_metadata)||void 0===n?void 0:n.options)||{})):{};return{plugins:a.map((function(e){return{name:e.name,options:Object.keys(e.__autocomplete_pluginOptions||[])}})),options:ve({"autocomplete-core":Object.keys(c)},l),ua:de.concat((null===(o=c.__autocomplete_metadata)||void 0===o?void 0:o.userAgents)||[])}}function be(e){var t,r=e.state;return!1===r.isOpen||null===r.activeItemId?null:(null===(t=F(r))||void 0===t?void 0:t.itemInputValue)||null}function Oe(e,t,r,n){if(!r)return null;if(e<0&&(null===t||null!==n&&0===t))return r+e;var o=(null===t?-1:t)+e;return o<=-1||o>=r?null===n?null:0:o}function Se(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function Ee(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?Se(Object(r),!0).forEach((function(t){je(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):Se(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function je(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}var we=function(e,t){switch(t.type){case"setActiveItemId":case"mousemove":return Ee(Ee({},e),{},{activeItemId:t.payload});case"setQuery":return Ee(Ee({},e),{},{query:t.payload,completion:null});case"setCollections":return Ee(Ee({},e),{},{collections:t.payload});case"setIsOpen":return Ee(Ee({},e),{},{isOpen:t.payload});case"setStatus":return Ee(Ee({},e),{},{status:t.payload});case"setContext":return Ee(Ee({},e),{},{context:Ee(Ee({},e.context),t.payload)});case"ArrowDown":var r=Ee(Ee({},e),{},{activeItemId:t.payload.hasOwnProperty("nextActiveItemId")?t.payload.nextActiveItemId:Oe(1,e.activeItemId,u(e),t.props.defaultActiveItemId)});return Ee(Ee({},r),{},{completion:be({state:r})});case"ArrowUp":var n=Ee(Ee({},e),{},{activeItemId:Oe(-1,e.activeItemId,u(e),t.props.defaultActiveItemId)});return Ee(Ee({},n),{},{completion:be({state:n})});case"Escape":return e.isOpen?Ee(Ee({},e),{},{activeItemId:null,isOpen:!1,completion:null}):Ee(Ee({},e),{},{activeItemId:null,query:"",status:"idle",collections:[]});case"submit":return Ee(Ee({},e),{},{activeItemId:null,isOpen:!1,status:"idle"});case"reset":return Ee(Ee({},e),{},{activeItemId:!0===t.props.openOnFocus?t.props.defaultActiveItemId:null,status:"idle",query:""});case"focus":return Ee(Ee({},e),{},{activeItemId:t.props.defaultActiveItemId,isOpen:(t.props.openOnFocus||Boolean(e.query))&&t.props.shouldPanelOpen({state:e})});case"blur":return t.props.debug?e:Ee(Ee({},e),{},{isOpen:!1,activeItemId:null});case"mouseleave":return Ee(Ee({},e),{},{activeItemId:t.props.defaultActiveItemId});default:return"The reducer action ".concat(JSON.stringify(t.type)," is not supported."),e}};function Pe(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function Ie(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?Pe(Object(r),!0).forEach((function(t){De(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):Pe(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function De(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function ke(e){var t=[],r=function(e,t){var r,n="undefined"!=typeof window?window:{},o=e.plugins||[];return b(b({debug:!1,openOnFocus:!1,placeholder:"",autoFocus:!1,defaultActiveItemId:null,stallThreshold:300,environment:n,shouldPanelOpen:function(e){return u(e.state)>0},reshape:function(e){return e.sources}},e),{},{id:null!==(r=e.id)&&void 0!==r?r:"autocomplete-".concat(f++),plugins:o,initialState:b({activeItemId:null,query:"",completion:null,collections:[],isOpen:!1,status:"idle",context:{}},e.initialState),onStateChange:function(t){var r;null===(r=e.onStateChange)||void 0===r||r.call(e,t),o.forEach((function(e){var r;return null===(r=e.onStateChange)||void 0===r?void 0:r.call(e,t)}))},onSubmit:function(t){var r;null===(r=e.onSubmit)||void 0===r||r.call(e,t),o.forEach((function(e){var r;return null===(r=e.onSubmit)||void 0===r?void 0:r.call(e,t)}))},onReset:function(t){var r;null===(r=e.onReset)||void 0===r||r.call(e,t),o.forEach((function(e){var r;return null===(r=e.onReset)||void 0===r?void 0:r.call(e,t)}))},getSources:function(r){return Promise.all([].concat(v(o.map((function(e){return e.getSources}))),[e.getSources]).filter(Boolean).map((function(e){return h(e,r)}))).then((function(e){return c(e)})).then((function(e){return e.map((function(e){return b(b({},e),{},{onSelect:function(r){e.onSelect(r),t.forEach((function(e){var t;return null===(t=e.onSelect)||void 0===t?void 0:t.call(e,r)}))},onActive:function(r){e.onActive(r),t.forEach((function(e){var t;return null===(t=e.onActive)||void 0===t?void 0:t.call(e,r)}))}})}))}))},navigator:b({navigate:function(e){var t=e.itemUrl;n.location.assign(t)},navigateNewTab:function(e){var t=e.itemUrl,r=n.open(t,"_blank","noopener");null==r||r.focus()},navigateNewWindow:function(e){var t=e.itemUrl;n.open(t,"_blank","noopener")}},e.navigator)})}(e,t),n=a(we,r,(function(e){var t=e.prevState,n=e.state;r.onStateChange(Ie({prevState:t,state:n,refresh:s},o))})),o=function(e){var t=e.store;return{setActiveItemId:function(e){t.dispatch("setActiveItemId",e)},setQuery:function(e){t.dispatch("setQuery",e)},setCollections:function(e){var r=0,n=e.map((function(e){return l(l({},e),{},{items:c(e.items).map((function(e){return l(l({},e),{},{__autocomplete_id:r++})}))})}));t.dispatch("setCollections",n)},setIsOpen:function(e){t.dispatch("setIsOpen",e)},setStatus:function(e){t.dispatch("setStatus",e)},setContext:function(e){t.dispatch("setContext",e)}}}({store:n}),i=pe(Ie({props:r,refresh:s,store:n},o));function s(){return G(Ie({event:new Event("input"),nextState:{isOpen:n.getState().isOpen},props:r,query:n.getState().query,refresh:s,store:n},o))}return r.plugins.forEach((function(e){var r;return null===(r=e.subscribe)||void 0===r?void 0:r.call(e,Ie(Ie({},o),{},{refresh:s,onSelect:function(e){t.push({onSelect:e})},onActive:function(e){t.push({onActive:e})}}))})),function(e){var t,r,n=e.metadata,o=e.environment;if(null===(t=o.navigator)||void 0===t||null===(r=t.userAgent)||void 0===r?void 0:r.includes("Algolia Crawler")){var a=o.document.createElement("meta"),c=o.document.querySelector("head");a.name="algolia:metadata",setTimeout((function(){a.content=JSON.stringify(n),c.appendChild(a)}),0)}}({metadata:ge({plugins:r.plugins,options:e}),environment:r.environment}),Ie(Ie({refresh:s},i),o)}var Ce=r(7294),Ae=64;function xe(e){var t=e.translations,r=(void 0===t?{}:t).searchByText,n=void 0===r?"Search by":r;return Ce.createElement("a",{href:"https://www.algolia.com/ref/docsearch/?utm_source=".concat(window.location.hostname,"&utm_medium=referral&utm_content=powered_by&utm_campaign=docsearch"),target:"_blank",rel:"noopener noreferrer"},Ce.createElement("span",{className:"DocSearch-Label"},n),Ce.createElement("svg",{width:"77",height:"19","aria-label":"Algolia",role:"img",id:"Layer_1",xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 2196.2 500"},Ce.createElement("defs",null,Ce.createElement("style",null,".cls-1,.cls-2{fill:#003dff;}.cls-2{fill-rule:evenodd;}")),Ce.createElement("path",{className:"cls-2",d:"M1070.38,275.3V5.91c0-3.63-3.24-6.39-6.82-5.83l-50.46,7.94c-2.87,.45-4.99,2.93-4.99,5.84l.17,273.22c0,12.92,0,92.7,95.97,95.49,3.33,.1,6.09-2.58,6.09-5.91v-40.78c0-2.96-2.19-5.51-5.12-5.84-34.85-4.01-34.85-47.57-34.85-54.72Z"}),Ce.createElement("rect",{className:"cls-1",x:"1845.88",y:"104.73",width:"62.58",height:"277.9",rx:"5.9",ry:"5.9"}),Ce.createElement("path",{className:"cls-2",d:"M1851.78,71.38h50.77c3.26,0,5.9-2.64,5.9-5.9V5.9c0-3.62-3.24-6.39-6.82-5.83l-50.77,7.95c-2.87,.45-4.99,2.92-4.99,5.83v51.62c0,3.26,2.64,5.9,5.9,5.9Z"}),Ce.createElement("path",{className:"cls-2",d:"M1764.03,275.3V5.91c0-3.63-3.24-6.39-6.82-5.83l-50.46,7.94c-2.87,.45-4.99,2.93-4.99,5.84l.17,273.22c0,12.92,0,92.7,95.97,95.49,3.33,.1,6.09-2.58,6.09-5.91v-40.78c0-2.96-2.19-5.51-5.12-5.84-34.85-4.01-34.85-47.57-34.85-54.72Z"}),Ce.createElement("path",{className:"cls-2",d:"M1631.95,142.72c-11.14-12.25-24.83-21.65-40.78-28.31-15.92-6.53-33.26-9.85-52.07-9.85-18.78,0-36.15,3.17-51.92,9.85-15.59,6.66-29.29,16.05-40.76,28.31-11.47,12.23-20.38,26.87-26.76,44.03-6.38,17.17-9.24,37.37-9.24,58.36,0,20.99,3.19,36.87,9.55,54.21,6.38,17.32,15.14,32.11,26.45,44.36,11.29,12.23,24.83,21.62,40.6,28.46,15.77,6.83,40.12,10.33,52.4,10.48,12.25,0,36.78-3.82,52.7-10.48,15.92-6.68,29.46-16.23,40.78-28.46,11.29-12.25,20.05-27.04,26.25-44.36,6.22-17.34,9.24-33.22,9.24-54.21,0-20.99-3.34-41.19-10.03-58.36-6.38-17.17-15.14-31.8-26.43-44.03Zm-44.43,163.75c-11.47,15.75-27.56,23.7-48.09,23.7-20.55,0-36.63-7.8-48.1-23.7-11.47-15.75-17.21-34.01-17.21-61.2,0-26.89,5.59-49.14,17.06-64.87,11.45-15.75,27.54-23.52,48.07-23.52,20.55,0,36.63,7.78,48.09,23.52,11.47,15.57,17.36,37.98,17.36,64.87,0,27.19-5.72,45.3-17.19,61.2Z"}),Ce.createElement("path",{className:"cls-2",d:"M894.42,104.73h-49.33c-48.36,0-90.91,25.48-115.75,64.1-14.52,22.58-22.99,49.63-22.99,78.73,0,44.89,20.13,84.92,51.59,111.1,2.93,2.6,6.05,4.98,9.31,7.14,12.86,8.49,28.11,13.47,44.52,13.47,1.23,0,2.46-.03,3.68-.09,.36-.02,.71-.05,1.07-.07,.87-.05,1.75-.11,2.62-.2,.34-.03,.68-.08,1.02-.12,.91-.1,1.82-.21,2.73-.34,.21-.03,.42-.07,.63-.1,32.89-5.07,61.56-30.82,70.9-62.81v57.83c0,3.26,2.64,5.9,5.9,5.9h50.42c3.26,0,5.9-2.64,5.9-5.9V110.63c0-3.26-2.64-5.9-5.9-5.9h-56.32Zm0,206.92c-12.2,10.16-27.97,13.98-44.84,15.12-.16,.01-.33,.03-.49,.04-1.12,.07-2.24,.1-3.36,.1-42.24,0-77.12-35.89-77.12-79.37,0-10.25,1.96-20.01,5.42-28.98,11.22-29.12,38.77-49.74,71.06-49.74h49.33v142.83Z"}),Ce.createElement("path",{className:"cls-2",d:"M2133.97,104.73h-49.33c-48.36,0-90.91,25.48-115.75,64.1-14.52,22.58-22.99,49.63-22.99,78.73,0,44.89,20.13,84.92,51.59,111.1,2.93,2.6,6.05,4.98,9.31,7.14,12.86,8.49,28.11,13.47,44.52,13.47,1.23,0,2.46-.03,3.68-.09,.36-.02,.71-.05,1.07-.07,.87-.05,1.75-.11,2.62-.2,.34-.03,.68-.08,1.02-.12,.91-.1,1.82-.21,2.73-.34,.21-.03,.42-.07,.63-.1,32.89-5.07,61.56-30.82,70.9-62.81v57.83c0,3.26,2.64,5.9,5.9,5.9h50.42c3.26,0,5.9-2.64,5.9-5.9V110.63c0-3.26-2.64-5.9-5.9-5.9h-56.32Zm0,206.92c-12.2,10.16-27.97,13.98-44.84,15.12-.16,.01-.33,.03-.49,.04-1.12,.07-2.24,.1-3.36,.1-42.24,0-77.12-35.89-77.12-79.37,0-10.25,1.96-20.01,5.42-28.98,11.22-29.12,38.77-49.74,71.06-49.74h49.33v142.83Z"}),Ce.createElement("path",{className:"cls-2",d:"M1314.05,104.73h-49.33c-48.36,0-90.91,25.48-115.75,64.1-11.79,18.34-19.6,39.64-22.11,62.59-.58,5.3-.88,10.68-.88,16.14s.31,11.15,.93,16.59c4.28,38.09,23.14,71.61,50.66,94.52,2.93,2.6,6.05,4.98,9.31,7.14,12.86,8.49,28.11,13.47,44.52,13.47h0c17.99,0,34.61-5.93,48.16-15.97,16.29-11.58,28.88-28.54,34.48-47.75v50.26h-.11v11.08c0,21.84-5.71,38.27-17.34,49.36-11.61,11.08-31.04,16.63-58.25,16.63-11.12,0-28.79-.59-46.6-2.41-2.83-.29-5.46,1.5-6.27,4.22l-12.78,43.11c-1.02,3.46,1.27,7.02,4.83,7.53,21.52,3.08,42.52,4.68,54.65,4.68,48.91,0,85.16-10.75,108.89-32.21,21.48-19.41,33.15-48.89,35.2-88.52V110.63c0-3.26-2.64-5.9-5.9-5.9h-56.32Zm0,64.1s.65,139.13,0,143.36c-12.08,9.77-27.11,13.59-43.49,14.7-.16,.01-.33,.03-.49,.04-1.12,.07-2.24,.1-3.36,.1-1.32,0-2.63-.03-3.94-.1-40.41-2.11-74.52-37.26-74.52-79.38,0-10.25,1.96-20.01,5.42-28.98,11.22-29.12,38.77-49.74,71.06-49.74h49.33Z"}),Ce.createElement("path",{className:"cls-1",d:"M249.83,0C113.3,0,2,110.09,.03,246.16c-2,138.19,110.12,252.7,248.33,253.5,42.68,.25,83.79-10.19,120.3-30.03,3.56-1.93,4.11-6.83,1.08-9.51l-23.38-20.72c-4.75-4.21-11.51-5.4-17.36-2.92-25.48,10.84-53.17,16.38-81.71,16.03-111.68-1.37-201.91-94.29-200.13-205.96,1.76-110.26,92-199.41,202.67-199.41h202.69V407.41l-115-102.18c-3.72-3.31-9.42-2.66-12.42,1.31-18.46,24.44-48.53,39.64-81.93,37.34-46.33-3.2-83.87-40.5-87.34-86.81-4.15-55.24,39.63-101.52,94-101.52,49.18,0,89.68,37.85,93.91,85.95,.38,4.28,2.31,8.27,5.52,11.12l29.95,26.55c3.4,3.01,8.79,1.17,9.63-3.3,2.16-11.55,2.92-23.58,2.07-35.92-4.82-70.34-61.8-126.93-132.17-131.26-80.68-4.97-148.13,58.14-150.27,137.25-2.09,77.1,61.08,143.56,138.19,145.26,32.19,.71,62.03-9.41,86.14-26.95l150.26,133.2c6.44,5.71,16.61,1.14,16.61-7.47V9.48C499.66,4.25,495.42,0,490.18,0H249.83Z"})))}function Ne(e){return Ce.createElement("svg",{width:"15",height:"15","aria-label":e.ariaLabel,role:"img"},Ce.createElement("g",{fill:"none",stroke:"currentColor",strokeLinecap:"round",strokeLinejoin:"round",strokeWidth:"1.2"},e.children))}function Re(e){var t=e.translations,r=void 0===t?{}:t,n=r.selectText,o=void 0===n?"to select":n,a=r.selectKeyAriaLabel,c=void 0===a?"Enter key":a,i=r.navigateText,l=void 0===i?"to navigate":i,s=r.navigateUpKeyAriaLabel,u=void 0===s?"Arrow up":s,f=r.navigateDownKeyAriaLabel,m=void 0===f?"Arrow down":f,p=r.closeText,d=void 0===p?"to close":p,h=r.closeKeyAriaLabel,v=void 0===h?"Escape key":h,y=r.searchByText,g=void 0===y?"Search by":y;return Ce.createElement(Ce.Fragment,null,Ce.createElement("div",{className:"DocSearch-Logo"},Ce.createElement(xe,{translations:{searchByText:g}})),Ce.createElement("ul",{className:"DocSearch-Commands"},Ce.createElement("li",null,Ce.createElement("kbd",{className:"DocSearch-Commands-Key"},Ce.createElement(Ne,{ariaLabel:c},Ce.createElement("path",{d:"M12 3.53088v3c0 1-1 2-2 2H4M7 11.53088l-3-3 3-3"}))),Ce.createElement("span",{className:"DocSearch-Label"},o)),Ce.createElement("li",null,Ce.createElement("kbd",{className:"DocSearch-Commands-Key"},Ce.createElement(Ne,{ariaLabel:m},Ce.createElement("path",{d:"M7.5 3.5v8M10.5 8.5l-3 3-3-3"}))),Ce.createElement("kbd",{className:"DocSearch-Commands-Key"},Ce.createElement(Ne,{ariaLabel:u},Ce.createElement("path",{d:"M7.5 11.5v-8M10.5 6.5l-3-3-3 3"}))),Ce.createElement("span",{className:"DocSearch-Label"},l)),Ce.createElement("li",null,Ce.createElement("kbd",{className:"DocSearch-Commands-Key"},Ce.createElement(Ne,{ariaLabel:v},Ce.createElement("path",{d:"M13.6167 8.936c-.1065.3583-.6883.962-1.4875.962-.7993 0-1.653-.9165-1.653-2.1258v-.5678c0-1.2548.7896-2.1016 1.653-2.1016.8634 0 1.3601.4778 1.4875 1.0724M9 6c-.1352-.4735-.7506-.9219-1.46-.8972-.7092.0246-1.344.57-1.344 1.2166s.4198.8812 1.3445.9805C8.465 7.3992 8.968 7.9337 9 8.5c.032.5663-.454 1.398-1.4595 1.398C6.6593 9.898 6 9 5.963 8.4851m-1.4748.5368c-.2635.5941-.8099.876-1.5443.876s-1.7073-.6248-1.7073-2.204v-.4603c0-1.0416.721-2.131 1.7073-2.131.9864 0 1.6425 1.031 1.5443 2.2492h-2.956"}))),Ce.createElement("span",{className:"DocSearch-Label"},d))))}function qe(e){var t=e.hit,r=e.children;return Ce.createElement("a",{href:t.url},r)}function Te(){return Ce.createElement("svg",{width:"40",height:"40",viewBox:"0 0 20 20",fill:"none",fillRule:"evenodd",stroke:"currentColor",strokeLinecap:"round",strokeLinejoin:"round"},Ce.createElement("path",{d:"M19 4.8a16 16 0 00-2-1.2m-3.3-1.2A16 16 0 001.1 4.7M16.7 8a12 12 0 00-2.8-1.4M10 6a12 12 0 00-6.7 2M12.3 14.7a4 4 0 00-4.5 0M14.5 11.4A8 8 0 0010 10M3 16L18 2M10 18h0"}))}function _e(e){var t=e.translations,r=void 0===t?{}:t,n=r.titleText,o=void 0===n?"Unable to fetch results":n,a=r.helpText,c=void 0===a?"You might want to check your network connection.":a;return Ce.createElement("div",{className:"DocSearch-ErrorScreen"},Ce.createElement("div",{className:"DocSearch-Screen-Icon"},Ce.createElement(Te,null)),Ce.createElement("p",{className:"DocSearch-Title"},o),Ce.createElement("p",{className:"DocSearch-Help"},c))}function Le(){return Ce.createElement("svg",{width:"40",height:"40",viewBox:"0 0 20 20",fill:"none",fillRule:"evenodd",stroke:"currentColor",strokeLinecap:"round",strokeLinejoin:"round"},Ce.createElement("path",{d:"M15.5 4.8c2 3 1.7 7-1 9.7h0l4.3 4.3-4.3-4.3a7.8 7.8 0 01-9.8 1m-2.2-2.2A7.8 7.8 0 0113.2 2.4M2 18L18 2"}))}var Me=["translations"];function He(e){return function(e){if(Array.isArray(e))return Fe(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||function(e,t){if(!e)return;if("string"==typeof e)return Fe(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);"Object"===r&&e.constructor&&(r=e.constructor.name);if("Map"===r||"Set"===r)return Array.from(e);if("Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r))return Fe(e,t)}(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function Fe(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,n=new Array(t);r<t;r++)n[r]=e[r];return n}function Ue(e,t){if(null==e)return{};var r,n,o=function(e,t){if(null==e)return{};var r,n,o={},a=Object.keys(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}function Be(e){var t=e.translations,r=void 0===t?{}:t,n=Ue(e,Me),o=r.noResultsText,a=void 0===o?"No results for":o,c=r.suggestedQueryText,i=void 0===c?"Try searching for":c,l=r.reportMissingResultsText,s=void 0===l?"Believe this query should return results?":l,u=r.reportMissingResultsLinkText,f=void 0===u?"Let us know.":u,m=n.state.context.searchSuggestions;return Ce.createElement("div",{className:"DocSearch-NoResults"},Ce.createElement("div",{className:"DocSearch-Screen-Icon"},Ce.createElement(Le,null)),Ce.createElement("p",{className:"DocSearch-Title"},a,' "',Ce.createElement("strong",null,n.state.query),'"'),m&&m.length>0&&Ce.createElement("div",{className:"DocSearch-NoResults-Prefill-List"},Ce.createElement("p",{className:"DocSearch-Help"},i,":"),Ce.createElement("ul",null,m.slice(0,3).reduce((function(e,t){return[].concat(He(e),[Ce.createElement("li",{key:t},Ce.createElement("button",{className:"DocSearch-Prefill",key:t,type:"button",onClick:function(){n.setQuery(t.toLowerCase()+" "),n.refresh(),n.inputRef.current.focus()}},t))])}),[]))),n.getMissingResultsUrl&&Ce.createElement("p",{className:"DocSearch-Help"},"".concat(s," "),Ce.createElement("a",{href:n.getMissingResultsUrl({query:n.state.query}),target:"_blank",rel:"noopener noreferrer"},f)))}var Ve=function(){return Ce.createElement("svg",{width:"20",height:"20",viewBox:"0 0 20 20"},Ce.createElement("path",{d:"M17 6v12c0 .52-.2 1-1 1H4c-.7 0-1-.33-1-1V2c0-.55.42-1 1-1h8l5 5zM14 8h-3.13c-.51 0-.87-.34-.87-.87V4",stroke:"currentColor",fill:"none",fillRule:"evenodd",strokeLinejoin:"round"}))};function Ke(e){switch(e.type){case"lvl1":return Ce.createElement(Ve,null);case"content":return Ce.createElement(Je,null);default:return Ce.createElement($e,null)}}function $e(){return Ce.createElement("svg",{width:"20",height:"20",viewBox:"0 0 20 20"},Ce.createElement("path",{d:"M13 13h4-4V8H7v5h6v4-4H7V8H3h4V3v5h6V3v5h4-4v5zm-6 0v4-4H3h4z",stroke:"currentColor",fill:"none",fillRule:"evenodd",strokeLinecap:"round",strokeLinejoin:"round"}))}function Je(){return Ce.createElement("svg",{width:"20",height:"20",viewBox:"0 0 20 20"},Ce.createElement("path",{d:"M17 5H3h14zm0 5H3h14zm0 5H3h14z",stroke:"currentColor",fill:"none",fillRule:"evenodd",strokeLinejoin:"round"}))}function ze(){return Ce.createElement("svg",{className:"DocSearch-Hit-Select-Icon",width:"20",height:"20",viewBox:"0 0 20 20"},Ce.createElement("g",{stroke:"currentColor",fill:"none",fillRule:"evenodd",strokeLinecap:"round",strokeLinejoin:"round"},Ce.createElement("path",{d:"M18 3v4c0 2-2 4-4 4H2"}),Ce.createElement("path",{d:"M8 17l-6-6 6-6"})))}var We=["hit","attribute","tagName"];function Qe(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function Ze(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?Qe(Object(r),!0).forEach((function(t){Ge(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):Qe(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function Ge(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function Ye(e,t){if(null==e)return{};var r,n,o=function(e,t){if(null==e)return{};var r,n,o={},a=Object.keys(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}function Xe(e,t){return t.split(".").reduce((function(e,t){return null!=e&&e[t]?e[t]:null}),e)}function et(e){var t=e.hit,r=e.attribute,n=e.tagName,o=void 0===n?"span":n,a=Ye(e,We);return(0,Ce.createElement)(o,Ze(Ze({},a),{},{dangerouslySetInnerHTML:{__html:Xe(t,"_snippetResult.".concat(r,".value"))||Xe(t,r)}}))}function tt(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){var r=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null==r)return;var n,o,a=[],c=!0,i=!1;try{for(r=r.call(e);!(c=(n=r.next()).done)&&(a.push(n.value),!t||a.length!==t);c=!0);}catch(l){i=!0,o=l}finally{try{c||null==r.return||r.return()}finally{if(i)throw o}}return a}(e,t)||function(e,t){if(!e)return;if("string"==typeof e)return rt(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);"Object"===r&&e.constructor&&(r=e.constructor.name);if("Map"===r||"Set"===r)return Array.from(e);if("Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r))return rt(e,t)}(e,t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function rt(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,n=new Array(t);r<t;r++)n[r]=e[r];return n}function nt(){return nt=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var r=arguments[t];for(var n in r)Object.prototype.hasOwnProperty.call(r,n)&&(e[n]=r[n])}return e},nt.apply(this,arguments)}function ot(e){return e.collection&&0!==e.collection.items.length?Ce.createElement("section",{className:"DocSearch-Hits"},Ce.createElement("div",{className:"DocSearch-Hit-source"},e.title),Ce.createElement("ul",e.getListProps(),e.collection.items.map((function(t,r){return Ce.createElement(at,nt({key:[e.title,t.objectID].join(":"),item:t,index:r},e))})))):null}function at(e){var t=e.item,r=e.index,n=e.renderIcon,o=e.renderAction,a=e.getItemProps,c=e.onItemClick,i=e.collection,l=e.hitComponent,s=tt(Ce.useState(!1),2),u=s[0],f=s[1],m=tt(Ce.useState(!1),2),p=m[0],d=m[1],h=Ce.useRef(null),v=l;return Ce.createElement("li",nt({className:["DocSearch-Hit",t.__docsearch_parent&&"DocSearch-Hit--Child",u&&"DocSearch-Hit--deleting",p&&"DocSearch-Hit--favoriting"].filter(Boolean).join(" "),onTransitionEnd:function(){h.current&&h.current()}},a({item:t,source:i.source,onClick:function(){c(t)}})),Ce.createElement(v,{hit:t},Ce.createElement("div",{className:"DocSearch-Hit-Container"},n({item:t,index:r}),t.hierarchy[t.type]&&"lvl1"===t.type&&Ce.createElement("div",{className:"DocSearch-Hit-content-wrapper"},Ce.createElement(et,{className:"DocSearch-Hit-title",hit:t,attribute:"hierarchy.lvl1"}),t.content&&Ce.createElement(et,{className:"DocSearch-Hit-path",hit:t,attribute:"content"})),t.hierarchy[t.type]&&("lvl2"===t.type||"lvl3"===t.type||"lvl4"===t.type||"lvl5"===t.type||"lvl6"===t.type)&&Ce.createElement("div",{className:"DocSearch-Hit-content-wrapper"},Ce.createElement(et,{className:"DocSearch-Hit-title",hit:t,attribute:"hierarchy.".concat(t.type)}),Ce.createElement(et,{className:"DocSearch-Hit-path",hit:t,attribute:"hierarchy.lvl1"})),"content"===t.type&&Ce.createElement("div",{className:"DocSearch-Hit-content-wrapper"},Ce.createElement(et,{className:"DocSearch-Hit-title",hit:t,attribute:"content"}),Ce.createElement(et,{className:"DocSearch-Hit-path",hit:t,attribute:"hierarchy.lvl1"})),o({item:t,runDeleteTransition:function(e){f(!0),h.current=e},runFavoriteTransition:function(e){d(!0),h.current=e}}))))}var ct=/(<mark>|<\/mark>)/g,it=RegExp(ct.source);function lt(e){var t,r,n,o,a,c=e;if(!c.__docsearch_parent&&!e._highlightResult)return e.hierarchy.lvl0;var i=((c.__docsearch_parent?null===(t=c.__docsearch_parent)||void 0===t||null===(r=t._highlightResult)||void 0===r||null===(n=r.hierarchy)||void 0===n?void 0:n.lvl0:null===(o=e._highlightResult)||void 0===o||null===(a=o.hierarchy)||void 0===a?void 0:a.lvl0)||{}).value;return i&&it.test(i)?i.replace(ct,""):i}function st(){return st=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var r=arguments[t];for(var n in r)Object.prototype.hasOwnProperty.call(r,n)&&(e[n]=r[n])}return e},st.apply(this,arguments)}function ut(e){return Ce.createElement("div",{className:"DocSearch-Dropdown-Container"},e.state.collections.map((function(t){if(0===t.items.length)return null;var r=lt(t.items[0]);return Ce.createElement(ot,st({},e,{key:t.source.sourceId,title:r,collection:t,renderIcon:function(e){var r,n=e.item,o=e.index;return Ce.createElement(Ce.Fragment,null,n.__docsearch_parent&&Ce.createElement("svg",{className:"DocSearch-Hit-Tree",viewBox:"0 0 24 54"},Ce.createElement("g",{stroke:"currentColor",fill:"none",fillRule:"evenodd",strokeLinecap:"round",strokeLinejoin:"round"},n.__docsearch_parent!==(null===(r=t.items[o+1])||void 0===r?void 0:r.__docsearch_parent)?Ce.createElement("path",{d:"M8 6v21M20 27H8.3"}):Ce.createElement("path",{d:"M8 6v42M20 27H8.3"}))),Ce.createElement("div",{className:"DocSearch-Hit-icon"},Ce.createElement(Ke,{type:n.type})))},renderAction:function(){return Ce.createElement("div",{className:"DocSearch-Hit-action"},Ce.createElement(ze,null))}}))})),e.resultsFooterComponent&&Ce.createElement("section",{className:"DocSearch-HitsFooter"},Ce.createElement(e.resultsFooterComponent,{state:e.state})))}function ft(){return Ce.createElement("svg",{width:"20",height:"20",viewBox:"0 0 20 20"},Ce.createElement("g",{stroke:"currentColor",fill:"none",fillRule:"evenodd",strokeLinecap:"round",strokeLinejoin:"round"},Ce.createElement("path",{d:"M3.18 6.6a8.23 8.23 0 1112.93 9.94h0a8.23 8.23 0 01-11.63 0"}),Ce.createElement("path",{d:"M6.44 7.25H2.55V3.36M10.45 6v5.6M10.45 11.6L13 13"})))}function mt(){return Ce.createElement("svg",{width:"20",height:"20",viewBox:"0 0 20 20"},Ce.createElement("path",{d:"M10 14.2L5 17l1-5.6-4-4 5.5-.7 2.5-5 2.5 5 5.6.8-4 4 .9 5.5z",stroke:"currentColor",fill:"none",fillRule:"evenodd",strokeLinejoin:"round"}))}function pt(){return Ce.createElement("svg",{width:"20",height:"20",viewBox:"0 0 20 20"},Ce.createElement("path",{d:"M10 10l5.09-5.09L10 10l5.09 5.09L10 10zm0 0L4.91 4.91 10 10l-5.09 5.09L10 10z",stroke:"currentColor",fill:"none",fillRule:"evenodd",strokeLinecap:"round",strokeLinejoin:"round"}))}var dt=["translations"];function ht(){return ht=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var r=arguments[t];for(var n in r)Object.prototype.hasOwnProperty.call(r,n)&&(e[n]=r[n])}return e},ht.apply(this,arguments)}function vt(e,t){if(null==e)return{};var r,n,o=function(e,t){if(null==e)return{};var r,n,o={},a=Object.keys(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}function yt(e){var t=e.translations,r=void 0===t?{}:t,n=vt(e,dt),o=r.recentSearchesTitle,a=void 0===o?"Recent":o,c=r.noRecentSearchesText,i=void 0===c?"No recent searches":c,l=r.saveRecentSearchButtonTitle,s=void 0===l?"Save this search":l,u=r.removeRecentSearchButtonTitle,f=void 0===u?"Remove this search from history":u,m=r.favoriteSearchesTitle,p=void 0===m?"Favorite":m,d=r.removeFavoriteSearchButtonTitle,h=void 0===d?"Remove this search from favorites":d;return"idle"===n.state.status&&!1===n.hasCollections?n.disableUserPersonalization?null:Ce.createElement("div",{className:"DocSearch-StartScreen"},Ce.createElement("p",{className:"DocSearch-Help"},i)):!1===n.hasCollections?null:Ce.createElement("div",{className:"DocSearch-Dropdown-Container"},Ce.createElement(ot,ht({},n,{title:a,collection:n.state.collections[0],renderIcon:function(){return Ce.createElement("div",{className:"DocSearch-Hit-icon"},Ce.createElement(ft,null))},renderAction:function(e){var t=e.item,r=e.runFavoriteTransition,o=e.runDeleteTransition;return Ce.createElement(Ce.Fragment,null,Ce.createElement("div",{className:"DocSearch-Hit-action"},Ce.createElement("button",{className:"DocSearch-Hit-action-button",title:s,type:"submit",onClick:function(e){e.preventDefault(),e.stopPropagation(),r((function(){n.favoriteSearches.add(t),n.recentSearches.remove(t),n.refresh()}))}},Ce.createElement(mt,null))),Ce.createElement("div",{className:"DocSearch-Hit-action"},Ce.createElement("button",{className:"DocSearch-Hit-action-button",title:f,type:"submit",onClick:function(e){e.preventDefault(),e.stopPropagation(),o((function(){n.recentSearches.remove(t),n.refresh()}))}},Ce.createElement(pt,null))))}})),Ce.createElement(ot,ht({},n,{title:p,collection:n.state.collections[1],renderIcon:function(){return Ce.createElement("div",{className:"DocSearch-Hit-icon"},Ce.createElement(mt,null))},renderAction:function(e){var t=e.item,r=e.runDeleteTransition;return Ce.createElement("div",{className:"DocSearch-Hit-action"},Ce.createElement("button",{className:"DocSearch-Hit-action-button",title:h,type:"submit",onClick:function(e){e.preventDefault(),e.stopPropagation(),r((function(){n.favoriteSearches.remove(t),n.refresh()}))}},Ce.createElement(pt,null)))}})))}var gt=["translations"];function bt(){return bt=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var r=arguments[t];for(var n in r)Object.prototype.hasOwnProperty.call(r,n)&&(e[n]=r[n])}return e},bt.apply(this,arguments)}function Ot(e,t){if(null==e)return{};var r,n,o=function(e,t){if(null==e)return{};var r,n,o={},a=Object.keys(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var St=Ce.memo((function(e){var t=e.translations,r=void 0===t?{}:t,n=Ot(e,gt);if("error"===n.state.status)return Ce.createElement(_e,{translations:null==r?void 0:r.errorScreen});var o=n.state.collections.some((function(e){return e.items.length>0}));return n.state.query?!1===o?Ce.createElement(Be,bt({},n,{translations:null==r?void 0:r.noResultsScreen})):Ce.createElement(ut,n):Ce.createElement(yt,bt({},n,{hasCollections:o,translations:null==r?void 0:r.startScreen}))}),(function(e,t){return"loading"===t.state.status||"stalled"===t.state.status}));function Et(){return Ce.createElement("svg",{viewBox:"0 0 38 38",stroke:"currentColor",strokeOpacity:".5"},Ce.createElement("g",{fill:"none",fillRule:"evenodd"},Ce.createElement("g",{transform:"translate(1 1)",strokeWidth:"2"},Ce.createElement("circle",{strokeOpacity:".3",cx:"18",cy:"18",r:"18"}),Ce.createElement("path",{d:"M36 18c0-9.94-8.06-18-18-18"},Ce.createElement("animateTransform",{attributeName:"transform",type:"rotate",from:"0 18 18",to:"360 18 18",dur:"1s",repeatCount:"indefinite"})))))}var jt=r(830),wt=["translations"];function Pt(){return Pt=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var r=arguments[t];for(var n in r)Object.prototype.hasOwnProperty.call(r,n)&&(e[n]=r[n])}return e},Pt.apply(this,arguments)}function It(e,t){if(null==e)return{};var r,n,o=function(e,t){if(null==e)return{};var r,n,o={},a=Object.keys(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}function Dt(e){var t=e.translations,r=void 0===t?{}:t,n=It(e,wt),o=r.resetButtonTitle,a=void 0===o?"Clear the query":o,c=r.resetButtonAriaLabel,i=void 0===c?"Clear the query":c,l=r.cancelButtonText,s=void 0===l?"Cancel":l,u=r.cancelButtonAriaLabel,f=void 0===u?"Cancel":u,m=n.getFormProps({inputElement:n.inputRef.current}).onReset;return Ce.useEffect((function(){n.autoFocus&&n.inputRef.current&&n.inputRef.current.focus()}),[n.autoFocus,n.inputRef]),Ce.useEffect((function(){n.isFromSelection&&n.inputRef.current&&n.inputRef.current.select()}),[n.isFromSelection,n.inputRef]),Ce.createElement(Ce.Fragment,null,Ce.createElement("form",{className:"DocSearch-Form",onSubmit:function(e){e.preventDefault()},onReset:m},Ce.createElement("label",Pt({className:"DocSearch-MagnifierLabel"},n.getLabelProps()),Ce.createElement(jt.W,null)),Ce.createElement("div",{className:"DocSearch-LoadingIndicator"},Ce.createElement(Et,null)),Ce.createElement("input",Pt({className:"DocSearch-Input",ref:n.inputRef},n.getInputProps({inputElement:n.inputRef.current,autoFocus:n.autoFocus,maxLength:Ae}))),Ce.createElement("button",{type:"reset",title:a,className:"DocSearch-Reset","aria-label":i,hidden:!n.state.query},Ce.createElement(pt,null))),Ce.createElement("button",{className:"DocSearch-Cancel",type:"reset","aria-label":f,onClick:n.onClose},s))}var kt=["_highlightResult","_snippetResult"];function Ct(e,t){if(null==e)return{};var r,n,o=function(e,t){if(null==e)return{};var r,n,o={},a=Object.keys(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}function At(e){return!1===function(){var e="__TEST_KEY__";try{return localStorage.setItem(e,""),localStorage.removeItem(e),!0}catch(t){return!1}}()?{setItem:function(){},getItem:function(){return[]}}:{setItem:function(t){return window.localStorage.setItem(e,JSON.stringify(t))},getItem:function(){var t=window.localStorage.getItem(e);return t?JSON.parse(t):[]}}}function xt(e){var t=e.key,r=e.limit,n=void 0===r?5:r,o=At(t),a=o.getItem().slice(0,n);return{add:function(e){var t=e,r=(t._highlightResult,t._snippetResult,Ct(t,kt)),c=a.findIndex((function(e){return e.objectID===r.objectID}));c>-1&&a.splice(c,1),a.unshift(r),a=a.slice(0,n),o.setItem(a)},remove:function(e){a=a.filter((function(t){return t.objectID!==e.objectID})),o.setItem(a)},getAll:function(){return a}}}function Nt(e){const t=`algoliasearch-client-js-${e.key}`;let r;const n=()=>(void 0===r&&(r=e.localStorage||window.localStorage),r),o=()=>JSON.parse(n().getItem(t)||"{}");return{get:(e,t,r={miss:()=>Promise.resolve()})=>Promise.resolve().then((()=>{const r=JSON.stringify(e),n=o()[r];return Promise.all([n||t(),void 0!==n])})).then((([e,t])=>Promise.all([e,t||r.miss(e)]))).then((([e])=>e)),set:(e,r)=>Promise.resolve().then((()=>{const a=o();return a[JSON.stringify(e)]=r,n().setItem(t,JSON.stringify(a)),r})),delete:e=>Promise.resolve().then((()=>{const r=o();delete r[JSON.stringify(e)],n().setItem(t,JSON.stringify(r))})),clear:()=>Promise.resolve().then((()=>{n().removeItem(t)}))}}function Rt(e){const t=[...e.caches],r=t.shift();return void 0===r?{get:(e,t,r={miss:()=>Promise.resolve()})=>t().then((e=>Promise.all([e,r.miss(e)]))).then((([e])=>e)),set:(e,t)=>Promise.resolve(t),delete:e=>Promise.resolve(),clear:()=>Promise.resolve()}:{get:(e,n,o={miss:()=>Promise.resolve()})=>r.get(e,n,o).catch((()=>Rt({caches:t}).get(e,n,o))),set:(e,n)=>r.set(e,n).catch((()=>Rt({caches:t}).set(e,n))),delete:e=>r.delete(e).catch((()=>Rt({caches:t}).delete(e))),clear:()=>r.clear().catch((()=>Rt({caches:t}).clear()))}}function qt(e={serializable:!0}){let t={};return{get(r,n,o={miss:()=>Promise.resolve()}){const a=JSON.stringify(r);if(a in t)return Promise.resolve(e.serializable?JSON.parse(t[a]):t[a]);const c=n(),i=o&&o.miss||(()=>Promise.resolve());return c.then((e=>i(e))).then((()=>c))},set:(r,n)=>(t[JSON.stringify(r)]=e.serializable?JSON.stringify(n):n,Promise.resolve(n)),delete:e=>(delete t[JSON.stringify(e)],Promise.resolve()),clear:()=>(t={},Promise.resolve())}}function Tt(e){let t=e.length-1;for(;t>0;t--){const r=Math.floor(Math.random()*(t+1)),n=e[t];e[t]=e[r],e[r]=n}return e}function _t(e,t){return t?(Object.keys(t).forEach((r=>{e[r]=t[r](e)})),e):e}function Lt(e,...t){let r=0;return e.replace(/%s/g,(()=>encodeURIComponent(t[r++])))}const Mt="4.16.0",Ht={WithinQueryParameters:0,WithinHeaders:1};function Ft(e,t){const r=e||{},n=r.data||{};return Object.keys(r).forEach((e=>{-1===["timeout","headers","queryParameters","data","cacheable"].indexOf(e)&&(n[e]=r[e])})),{data:Object.entries(n).length>0?n:void 0,timeout:r.timeout||t,headers:r.headers||{},queryParameters:r.queryParameters||{},cacheable:r.cacheable}}const Ut={Read:1,Write:2,Any:3},Bt={Up:1,Down:2,Timeouted:3},Vt=12e4;function Kt(e,t=Bt.Up){return{...e,status:t,lastUpdate:Date.now()}}function $t(e){return"string"==typeof e?{protocol:"https",url:e,accept:Ut.Any}:{protocol:e.protocol||"https",url:e.url,accept:e.accept||Ut.Any}}const Jt={Delete:"DELETE",Get:"GET",Post:"POST",Put:"PUT"};function zt(e,t){return Promise.all(t.map((t=>e.get(t,(()=>Promise.resolve(Kt(t))))))).then((e=>{const r=e.filter((e=>function(e){return e.status===Bt.Up||Date.now()-e.lastUpdate>Vt}(e))),n=e.filter((e=>function(e){return e.status===Bt.Timeouted&&Date.now()-e.lastUpdate<=Vt}(e))),o=[...r,...n];return{getTimeout:(e,t)=>(0===n.length&&0===e?1:n.length+3+e)*t,statelessHosts:o.length>0?o.map((e=>$t(e))):t}}))}const Wt=(e,t)=>(e=>{const t=e.status;return e.isTimedOut||(({isTimedOut:e,status:t})=>!e&&0==~~t)(e)||2!=~~(t/100)&&4!=~~(t/100)})(e)?t.onRetry(e):(({status:e})=>2==~~(e/100))(e)?t.onSuccess(e):t.onFail(e);function Qt(e,t,r,n){const o=[],a=function(e,t){if(e.method===Jt.Get||void 0===e.data&&void 0===t.data)return;const r=Array.isArray(e.data)?e.data:{...e.data,...t.data};return JSON.stringify(r)}(r,n),c=function(e,t){const r={...e.headers,...t.headers},n={};return Object.keys(r).forEach((e=>{const t=r[e];n[e.toLowerCase()]=t})),n}(e,n),i=r.method,l=r.method!==Jt.Get?{}:{...r.data,...n.data},s={"x-algolia-agent":e.userAgent.value,...e.queryParameters,...l,...n.queryParameters};let u=0;const f=(t,l)=>{const m=t.pop();if(void 0===m)throw{name:"RetryError",message:"Unreachable hosts - your application id may be incorrect. If the error persists, contact support@algolia.com.",transporterStackTrace:Xt(o)};const p={data:a,headers:c,method:i,url:Gt(m,r.path,s),connectTimeout:l(u,e.timeouts.connect),responseTimeout:l(u,n.timeout)},d=e=>{const r={request:p,response:e,host:m,triesLeft:t.length};return o.push(r),r},h={onSuccess:e=>function(e){try{return JSON.parse(e.content)}catch(t){throw function(e,t){return{name:"DeserializationError",message:e,response:t}}(t.message,e)}}(e),onRetry(r){const n=d(r);return r.isTimedOut&&u++,Promise.all([e.logger.info("Retryable failure",er(n)),e.hostsCache.set(m,Kt(m,r.isTimedOut?Bt.Timeouted:Bt.Down))]).then((()=>f(t,l)))},onFail(e){throw d(e),function({content:e,status:t},r){let n=e;try{n=JSON.parse(e).message}catch(o){}return function(e,t,r){return{name:"ApiError",message:e,status:t,transporterStackTrace:r}}(n,t,r)}(e,Xt(o))}};return e.requester.send(p).then((e=>Wt(e,h)))};return zt(e.hostsCache,t).then((e=>f([...e.statelessHosts].reverse(),e.getTimeout)))}function Zt(e){const t={value:`Algolia for JavaScript (${e})`,add(e){const r=`; ${e.segment}${void 0!==e.version?` (${e.version})`:""}`;return-1===t.value.indexOf(r)&&(t.value=`${t.value}${r}`),t}};return t}function Gt(e,t,r){const n=Yt(r);let o=`${e.protocol}://${e.url}/${"/"===t.charAt(0)?t.substr(1):t}`;return n.length&&(o+=`?${n}`),o}function Yt(e){return Object.keys(e).map((t=>{return Lt("%s=%s",t,(r=e[t],"[object Object]"===Object.prototype.toString.call(r)||"[object Array]"===Object.prototype.toString.call(r)?JSON.stringify(e[t]):e[t]));var r})).join("&")}function Xt(e){return e.map((e=>er(e)))}function er(e){const t=e.request.headers["x-algolia-api-key"]?{"x-algolia-api-key":"*****"}:{};return{...e,request:{...e.request,headers:{...e.request.headers,...t}}}}const tr=e=>{const t=e.appId,r=function(e,t,r){const n={"x-algolia-api-key":r,"x-algolia-application-id":t};return{headers:()=>e===Ht.WithinHeaders?n:{},queryParameters:()=>e===Ht.WithinQueryParameters?n:{}}}(void 0!==e.authMode?e.authMode:Ht.WithinHeaders,t,e.apiKey),n=function(e){const{hostsCache:t,logger:r,requester:n,requestsCache:o,responsesCache:a,timeouts:c,userAgent:i,hosts:l,queryParameters:s,headers:u}=e,f={hostsCache:t,logger:r,requester:n,requestsCache:o,responsesCache:a,timeouts:c,userAgent:i,headers:u,queryParameters:s,hosts:l.map((e=>$t(e))),read(e,t){const r=Ft(t,f.timeouts.read),n=()=>Qt(f,f.hosts.filter((e=>0!=(e.accept&Ut.Read))),e,r);if(!0!==(void 0!==r.cacheable?r.cacheable:e.cacheable))return n();const o={request:e,mappedRequestOptions:r,transporter:{queryParameters:f.queryParameters,headers:f.headers}};return f.responsesCache.get(o,(()=>f.requestsCache.get(o,(()=>f.requestsCache.set(o,n()).then((e=>Promise.all([f.requestsCache.delete(o),e])),(e=>Promise.all([f.requestsCache.delete(o),Promise.reject(e)]))).then((([e,t])=>t))))),{miss:e=>f.responsesCache.set(o,e)})},write:(e,t)=>Qt(f,f.hosts.filter((e=>0!=(e.accept&Ut.Write))),e,Ft(t,f.timeouts.write))};return f}({hosts:[{url:`${t}-dsn.algolia.net`,accept:Ut.Read},{url:`${t}.algolia.net`,accept:Ut.Write}].concat(Tt([{url:`${t}-1.algolianet.com`},{url:`${t}-2.algolianet.com`},{url:`${t}-3.algolianet.com`}])),...e,headers:{...r.headers(),"content-type":"application/x-www-form-urlencoded",...e.headers},queryParameters:{...r.queryParameters(),...e.queryParameters}}),o={transporter:n,appId:t,addAlgoliaAgent(e,t){n.userAgent.add({segment:e,version:t})},clearCache:()=>Promise.all([n.requestsCache.clear(),n.responsesCache.clear()]).then((()=>{}))};return _t(o,e.methods)},rr=e=>(t,r)=>t.method===Jt.Get?e.transporter.read(t,r):e.transporter.write(t,r),nr=e=>(t,r={})=>_t({transporter:e.transporter,appId:e.appId,indexName:t},r.methods),or=e=>(t,r)=>{const n=t.map((e=>({...e,params:Yt(e.params||{})})));return e.transporter.read({method:Jt.Post,path:"1/indexes/*/queries",data:{requests:n},cacheable:!0},r)},ar=e=>(t,r)=>Promise.all(t.map((t=>{const{facetName:n,facetQuery:o,...a}=t.params;return nr(e)(t.indexName,{methods:{searchForFacetValues:lr}}).searchForFacetValues(n,o,{...r,...a})}))),cr=e=>(t,r,n)=>e.transporter.read({method:Jt.Post,path:Lt("1/answers/%s/prediction",e.indexName),data:{query:t,queryLanguages:r},cacheable:!0},n),ir=e=>(t,r)=>e.transporter.read({method:Jt.Post,path:Lt("1/indexes/%s/query",e.indexName),data:{query:t},cacheable:!0},r),lr=e=>(t,r,n)=>e.transporter.read({method:Jt.Post,path:Lt("1/indexes/%s/facets/%s/query",e.indexName,t),data:{facetQuery:r},cacheable:!0},n),sr={Debug:1,Info:2,Error:3};function ur(e,t,r){const n={appId:e,apiKey:t,timeouts:{connect:1,read:2,write:30},requester:{send:e=>new Promise((t=>{const r=new XMLHttpRequest;r.open(e.method,e.url,!0),Object.keys(e.headers).forEach((t=>r.setRequestHeader(t,e.headers[t])));const n=(e,n)=>setTimeout((()=>{r.abort(),t({status:0,content:n,isTimedOut:!0})}),1e3*e),o=n(e.connectTimeout,"Connection timeout");let a;r.onreadystatechange=()=>{r.readyState>r.OPENED&&void 0===a&&(clearTimeout(o),a=n(e.responseTimeout,"Socket timeout"))},r.onerror=()=>{0===r.status&&(clearTimeout(o),clearTimeout(a),t({content:r.responseText||"Network request failed",status:r.status,isTimedOut:!1}))},r.onload=()=>{clearTimeout(o),clearTimeout(a),t({content:r.responseText,status:r.status,isTimedOut:!1})},r.send(e.data)}))},logger:(o=sr.Error,{debug:(e,t)=>(sr.Debug>=o&&console.debug(e,t),Promise.resolve()),info:(e,t)=>(sr.Info>=o&&console.info(e,t),Promise.resolve()),error:(e,t)=>(console.error(e,t),Promise.resolve())}),responsesCache:qt(),requestsCache:qt({serializable:!1}),hostsCache:Rt({caches:[Nt({key:`${Mt}-${e}`}),qt()]}),userAgent:Zt(Mt).add({segment:"Browser",version:"lite"}),authMode:Ht.WithinQueryParameters};var o;return tr({...n,...r,methods:{search:or,searchForFacetValues:ar,multipleQueries:or,multipleSearchForFacetValues:ar,customRequest:rr,initIndex:e=>t=>nr(e)(t,{methods:{search:ir,searchForFacetValues:lr,findAnswers:cr}})}})}ur.version=Mt;const fr=ur;var mr="3.3.3";function pr(){}function dr(e){return e}function hr(e,t){return e.reduce((function(e,r){var n=t(r);return e.hasOwnProperty(n)||(e[n]=[]),e[n].length<5&&e[n].push(r),e}),{})}var vr=["footer","searchBox"];function yr(){return yr=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var r=arguments[t];for(var n in r)Object.prototype.hasOwnProperty.call(r,n)&&(e[n]=r[n])}return e},yr.apply(this,arguments)}function gr(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function br(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?gr(Object(r),!0).forEach((function(t){Or(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):gr(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function Or(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function Sr(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){var r=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null==r)return;var n,o,a=[],c=!0,i=!1;try{for(r=r.call(e);!(c=(n=r.next()).done)&&(a.push(n.value),!t||a.length!==t);c=!0);}catch(l){i=!0,o=l}finally{try{c||null==r.return||r.return()}finally{if(i)throw o}}return a}(e,t)||function(e,t){if(!e)return;if("string"==typeof e)return Er(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);"Object"===r&&e.constructor&&(r=e.constructor.name);if("Map"===r||"Set"===r)return Array.from(e);if("Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r))return Er(e,t)}(e,t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function Er(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,n=new Array(t);r<t;r++)n[r]=e[r];return n}function jr(e,t){if(null==e)return{};var r,n,o=function(e,t){if(null==e)return{};var r,n,o={},a=Object.keys(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}function wr(e){var t=e.appId,r=e.apiKey,n=e.indexName,o=e.placeholder,a=void 0===o?"Search docs":o,c=e.searchParameters,i=e.onClose,l=void 0===i?pr:i,s=e.transformItems,u=void 0===s?dr:s,f=e.hitComponent,m=void 0===f?qe:f,p=e.resultsFooterComponent,d=void 0===p?function(){return null}:p,h=e.navigator,v=e.initialScrollY,y=void 0===v?0:v,g=e.transformSearchClient,b=void 0===g?dr:g,O=e.disableUserPersonalization,S=void 0!==O&&O,E=e.initialQuery,j=void 0===E?"":E,w=e.translations,P=void 0===w?{}:w,I=e.getMissingResultsUrl,D=P.footer,k=P.searchBox,C=jr(P,vr),A=Sr(Ce.useState({query:"",collections:[],completion:null,context:{},isOpen:!1,activeItemId:null,status:"idle"}),2),x=A[0],N=A[1],R=Ce.useRef(null),q=Ce.useRef(null),T=Ce.useRef(null),_=Ce.useRef(null),L=Ce.useRef(null),M=Ce.useRef(10),H=Ce.useRef("undefined"!=typeof window?window.getSelection().toString().slice(0,Ae):"").current,F=Ce.useRef(j||H).current,U=function(e,t,r){return Ce.useMemo((function(){var n=fr(e,t);return n.addAlgoliaAgent("docsearch",mr),!1===/docsearch.js \(.*\)/.test(n.transporter.userAgent.value)&&n.addAlgoliaAgent("docsearch-react",mr),r(n)}),[e,t,r])}(t,r,b),B=Ce.useRef(xt({key:"__DOCSEARCH_FAVORITE_SEARCHES__".concat(n),limit:10})).current,V=Ce.useRef(xt({key:"__DOCSEARCH_RECENT_SEARCHES__".concat(n),limit:0===B.getAll().length?7:4})).current,K=Ce.useCallback((function(e){if(!S){var t="content"===e.type?e.__docsearch_parent:e;t&&-1===B.getAll().findIndex((function(e){return e.objectID===t.objectID}))&&V.add(t)}}),[B,V,S]),$=Ce.useMemo((function(){return ke({id:"docsearch",defaultActiveItemId:0,placeholder:a,openOnFocus:!0,initialState:{query:F,context:{searchSuggestions:[]}},navigator:h,onStateChange:function(e){N(e.state)},getSources:function(e){var t=e.query,r=e.state,o=e.setContext,a=e.setStatus;return t?U.search([{query:t,indexName:n,params:br({attributesToRetrieve:["hierarchy.lvl0","hierarchy.lvl1","hierarchy.lvl2","hierarchy.lvl3","hierarchy.lvl4","hierarchy.lvl5","hierarchy.lvl6","content","type","url"],attributesToSnippet:["hierarchy.lvl1:".concat(M.current),"hierarchy.lvl2:".concat(M.current),"hierarchy.lvl3:".concat(M.current),"hierarchy.lvl4:".concat(M.current),"hierarchy.lvl5:".concat(M.current),"hierarchy.lvl6:".concat(M.current),"content:".concat(M.current)],snippetEllipsisText:"\u2026",highlightPreTag:"<mark>",highlightPostTag:"</mark>",hitsPerPage:20},c)}]).catch((function(e){throw"RetryError"===e.name&&a("error"),e})).then((function(e){var t=e.results[0],n=t.hits,a=t.nbHits,c=hr(n,(function(e){return lt(e)}));return r.context.searchSuggestions.length<Object.keys(c).length&&o({searchSuggestions:Object.keys(c)}),o({nbHits:a}),Object.values(c).map((function(e,t){return{sourceId:"hits".concat(t),onSelect:function(e){var t=e.item,r=e.event;K(t),r.shiftKey||r.ctrlKey||r.metaKey||l()},getItemUrl:function(e){return e.item.url},getItems:function(){return Object.values(hr(e,(function(e){return e.hierarchy.lvl1}))).map(u).map((function(e){return e.map((function(t){return br(br({},t),{},{__docsearch_parent:"lvl1"!==t.type&&e.find((function(e){return"lvl1"===e.type&&e.hierarchy.lvl1===t.hierarchy.lvl1}))})}))})).flat()}}}))})):S?[]:[{sourceId:"recentSearches",onSelect:function(e){var t=e.item,r=e.event;K(t),r.shiftKey||r.ctrlKey||r.metaKey||l()},getItemUrl:function(e){return e.item.url},getItems:function(){return V.getAll()}},{sourceId:"favoriteSearches",onSelect:function(e){var t=e.item,r=e.event;K(t),r.shiftKey||r.ctrlKey||r.metaKey||l()},getItemUrl:function(e){return e.item.url},getItems:function(){return B.getAll()}}]}})}),[n,c,U,l,V,B,K,F,a,h,u,S]),J=$.getEnvironmentProps,z=$.getRootProps,W=$.refresh;return function(e){var t=e.getEnvironmentProps,r=e.panelElement,n=e.formElement,o=e.inputElement;Ce.useEffect((function(){if(r&&n&&o){var e=t({panelElement:r,formElement:n,inputElement:o}),a=e.onTouchStart,c=e.onTouchMove;return window.addEventListener("touchstart",a),window.addEventListener("touchmove",c),function(){window.removeEventListener("touchstart",a),window.removeEventListener("touchmove",c)}}}),[t,r,n,o])}({getEnvironmentProps:J,panelElement:_.current,formElement:T.current,inputElement:L.current}),function(e){var t=e.container;Ce.useEffect((function(){if(t){var e=t.querySelectorAll("a[href]:not([disabled]), button:not([disabled]), input:not([disabled])"),r=e[0],n=e[e.length-1];return t.addEventListener("keydown",o),function(){t.removeEventListener("keydown",o)}}function o(e){"Tab"===e.key&&(e.shiftKey?document.activeElement===r&&(e.preventDefault(),n.focus()):document.activeElement===n&&(e.preventDefault(),r.focus()))}}),[t])}({container:R.current}),Ce.useEffect((function(){return document.body.classList.add("DocSearch--active"),function(){var e,t;document.body.classList.remove("DocSearch--active"),null===(e=(t=window).scrollTo)||void 0===e||e.call(t,0,y)}}),[]),Ce.useEffect((function(){window.matchMedia("(max-width: 768px)").matches&&(M.current=5)}),[]),Ce.useEffect((function(){_.current&&(_.current.scrollTop=0)}),[x.query]),Ce.useEffect((function(){F.length>0&&(W(),L.current&&L.current.focus())}),[F,W]),Ce.useEffect((function(){function e(){if(q.current){var e=.01*window.innerHeight;q.current.style.setProperty("--docsearch-vh","".concat(e,"px"))}}return e(),window.addEventListener("resize",e),function(){window.removeEventListener("resize",e)}}),[]),Ce.createElement("div",yr({ref:R},z({"aria-expanded":!0}),{className:["DocSearch","DocSearch-Container","stalled"===x.status&&"DocSearch-Container--Stalled","error"===x.status&&"DocSearch-Container--Errored"].filter(Boolean).join(" "),role:"button",tabIndex:0,onMouseDown:function(e){e.target===e.currentTarget&&l()}}),Ce.createElement("div",{className:"DocSearch-Modal",ref:q},Ce.createElement("header",{className:"DocSearch-SearchBar",ref:T},Ce.createElement(Dt,yr({},$,{state:x,autoFocus:0===F.length,inputRef:L,isFromSelection:Boolean(F)&&F===H,translations:k,onClose:l}))),Ce.createElement("div",{className:"DocSearch-Dropdown",ref:_},Ce.createElement(St,yr({},$,{indexName:n,state:x,hitComponent:m,resultsFooterComponent:d,disableUserPersonalization:S,recentSearches:V,favoriteSearches:B,inputRef:L,translations:C,getMissingResultsUrl:I,onItemClick:function(e){K(e),l()}}))),Ce.createElement("footer",{className:"DocSearch-Footer"},Ce.createElement(Re,{translations:D}))))}}}]); \ No newline at end of file diff --git a/assets/js/68c835af.1e1795fe.js b/assets/js/68c835af.1e1795fe.js new file mode 100644 index 0000000..d28b76a --- /dev/null +++ b/assets/js/68c835af.1e1795fe.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[2e3],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>f});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){o(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,o=function(e,t){if(null==e)return{};var a,n,o={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var i=n.createContext({}),p=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},c=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=p(a),k=o,f=d["".concat(i,".").concat(k)]||d[k]||u[k]||r;return a?n.createElement(f,s(s({ref:t},c),{},{components:a})):n.createElement(f,s({ref:t},c))}));function f(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=a.length,s=new Array(r);s[0]=k;var l={};for(var i in t)hasOwnProperty.call(t,i)&&(l[i]=t[i]);l.originalType=e,l[d]="string"==typeof e?e:o,s[1]=l;for(var p=2;p<r;p++)s[p]=a[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,a)}k.displayName="MDXCreateElement"},3649:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var n=a(7462),o=(a(7294),a(3905));const r={},s="@produces basics",l={unversionedId:"guides/Guide_21_Produces_Basics",id:"version-0.7.0/guides/Guide_21_Produces_Basics",title:"@produces basics",description:"You can use @produces decorator to produce messages to Kafka topics.",source:"@site/versioned_docs/version-0.7.0/guides/Guide_21_Produces_Basics.md",sourceDirName:"guides",slug:"/guides/Guide_21_Produces_Basics",permalink:"/docs/0.7.0/guides/Guide_21_Produces_Basics",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Batch consuming",permalink:"/docs/0.7.0/guides/Guide_12_Batch_Consuming"},next:{title:"Defining a partition key",permalink:"/docs/0.7.0/guides/Guide_22_Partition_Keys"}},i={},p=[{value:"Import <code>FastKafka</code>",id:"import-fastkafka",level:2},{value:"Define the structure of the messages",id:"define-the-structure-of-the-messages",level:2},{value:"Create a base FastKafka app",id:"create-a-base-fastkafka-app",level:2},{value:"Create a producer function and decorate it with <code>@produces</code>",id:"create-a-producer-function-and-decorate-it-with-produces",level:2},{value:"Instruct the app to start sending HelloWorld messages",id:"instruct-the-app-to-start-sending-helloworld-messages",level:2},{value:"Final app",id:"final-app",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the message was sent to the Kafka topic",id:"check-if-the-message-was-sent-to-the-kafka-topic",level:2},{value:"Choosing a topic",id:"choosing-a-topic",level:2},{value:"Message data",id:"message-data",level:2}],c={toc:p},d="wrapper";function u(e){let{components:t,...a}=e;return(0,o.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"produces-basics"},"@produces basics"),(0,o.kt)("p",null,"You can use ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator to produce messages to Kafka topics."),(0,o.kt)("p",null,"In this guide we will create a simple FastKafka app that will produce\nhello world messages to hello_world topic."),(0,o.kt)("h2",{id:"import-fastkafka"},"Import ",(0,o.kt)("inlineCode",{parentName:"h2"},"FastKafka")),(0,o.kt)("p",null,"To use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator, frist we need to import the base\nFastKafka app to create our application."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka import FastKafka\n")),(0,o.kt)("h2",{id:"define-the-structure-of-the-messages"},"Define the structure of the messages"),(0,o.kt)("p",null,"Next, you need to define the structure of the messages you want to send\nto the topic using ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"pydantic"),". For the guide\nwe\u2019ll stick to something basic, but you are free to define any complex\nmessage structure you wish in your project, just make sure it can be\nJSON encoded."),(0,o.kt)("p",null,"Let\u2019s import ",(0,o.kt)("inlineCode",{parentName:"p"},"BaseModel")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"Field")," from pydantic and create a simple\n",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class containing one string parameter ",(0,o.kt)("inlineCode",{parentName:"p"},"msg")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from pydantic import BaseModel, Field\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n')),(0,o.kt)("h2",{id:"create-a-base-fastkafka-app"},"Create a base FastKafka app"),(0,o.kt)("p",null,"Now we will create and define a base FastKafka app, replace the\n",(0,o.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values of your\nKafka bootstrap server"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n')),(0,o.kt)("h2",{id:"create-a-producer-function-and-decorate-it-with-produces"},"Create a producer function and decorate it with ",(0,o.kt)("inlineCode",{parentName:"h2"},"@produces")),(0,o.kt)("p",null,"Let\u2019s create a producer function that will produce ",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages\nto ",(0,o.kt)("em",{parentName:"p"},"hello_world")," topic:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n")),(0,o.kt)("p",null,"Now you can call your defined function as any normal python function in\nyour code. The side effect of calling the function will be that the\nvalue you are returning will also be sent to a kafka topic."),(0,o.kt)("p",null,"By default, the topic is determined from your function name, the \u201cto","_",'"\nprefix is stripped and what is left over is used as a topic name. I this\ncase, that is ',(0,o.kt)("em",{parentName:"p"},"hello_world"),"."),(0,o.kt)("h2",{id:"instruct-the-app-to-start-sending-helloworld-messages"},"Instruct the app to start sending HelloWorld messages"),(0,o.kt)("p",null,"Let\u2019s use ",(0,o.kt)("inlineCode",{parentName:"p"},"@run_in_background")," decorator to instruct our app to send\nHelloWorld messages to hello_world topic every second."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,o.kt)("h2",{id:"final-app"},"Final app"),(0,o.kt)("p",null,"Your app code should look like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,o.kt)("h2",{id:"run-the-app"},"Run the app"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'script_file = "producer_example.py"\ncmd = "fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app"\nmd(\n f"Now we can run the app. Copy the code above in producer_example.py and run it by running\\n```shell\\n{cmd}\\n```"\n)\n')),(0,o.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\n")),(0,o.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.\n")),(0,o.kt)("h2",{id:"check-if-the-message-was-sent-to-the-kafka-topic"},"Check if the message was sent to the Kafka topic"),(0,o.kt)("p",null,'Lets check the topic and see if there is a \u201cHello world!" message in the\nhello_world topic. In your terminal run:'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n")),(0,o.kt)("p",null,'You should see the {\u201cmsg": \u201cHello world!"} messages in your topic.'),(0,o.kt)("h2",{id:"choosing-a-topic"},"Choosing a topic"),(0,o.kt)("p",null,"You probably noticed that you didn\u2019t define which topic you are sending\nthe message to, this is because the ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator determines the\ntopic by default from your function name. The decorator will take your\nfunction name and strip the default \u201cto","_",'" prefix from it and use the\nrest as the topic name. In this example case, the topic is\n',(0,o.kt)("em",{parentName:"p"},"hello_world"),"."),(0,o.kt)("p",null,'!!! warn "New topics"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.\n")),(0,o.kt)("p",null,"You can choose your custom prefix by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"prefix")," parameter in\nproduces decorator, like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.produces(prefix="send_to_")\nasync def send_to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n')),(0,o.kt)("p",null,"Also, you can define the topic name completely by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"topic"),"\nin parameter in produces decorator, like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.produces(topic="my_special_topic")\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n')),(0,o.kt)("h2",{id:"message-data"},"Message data"),(0,o.kt)("p",null,"The return value from your function will be translated JSON string and\nthen to bytes and sent to defined Kafka topic. The typing of the return\nvalue is used for generating the documentation for your Kafka app."),(0,o.kt)("p",null,"In this example case, the return value is HelloWorld class which will be\ntranslated into JSON formatted string and then to bytes. The translated\ndata will then be sent to Kafka. In the from of:\n",(0,o.kt)("inlineCode",{parentName:"p"},'b\'{"msg": "Hello world!"}\'')))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/68d54528.f4b4a695.js b/assets/js/68d54528.f4b4a695.js new file mode 100644 index 0000000..9143723 --- /dev/null +++ b/assets/js/68d54528.f4b4a695.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7881],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>k});var n=t(7294);function o(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function r(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function s(e){for(var a=1;a<arguments.length;a++){var t=null!=arguments[a]?arguments[a]:{};a%2?r(Object(t),!0).forEach((function(a){o(e,a,t[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):r(Object(t)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(t,a))}))}return e}function l(e,a){if(null==e)return{};var t,n,o=function(e,a){if(null==e)return{};var t,n,o={},r=Object.keys(e);for(n=0;n<r.length;n++)t=r[n],a.indexOf(t)>=0||(o[t]=e[t]);return o}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)t=r[n],a.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var i=n.createContext({}),p=function(e){var a=n.useContext(i),t=a;return e&&(t="function"==typeof e?e(a):s(s({},a),e)),t},c=function(e){var a=p(e.components);return n.createElement(i.Provider,{value:a},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},d=n.forwardRef((function(e,a){var t=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=p(t),d=o,k=u["".concat(i,".").concat(d)]||u[d]||m[d]||r;return t?n.createElement(k,s(s({ref:a},c),{},{components:t})):n.createElement(k,s({ref:a},c))}));function k(e,a){var t=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var r=t.length,s=new Array(r);s[0]=d;var l={};for(var i in a)hasOwnProperty.call(a,i)&&(l[i]=a[i]);l.originalType=e,l[u]="string"==typeof e?e:o,s[1]=l;for(var p=2;p<r;p++)s[p]=t[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,t)}d.displayName="MDXCreateElement"},9231:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>i,contentTitle:()=>s,default:()=>m,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var n=t(7462),o=(t(7294),t(3905));const r={},s="@consumes basics",l={unversionedId:"guides/Guide_11_Consumes_Basics",id:"version-0.7.0/guides/Guide_11_Consumes_Basics",title:"@consumes basics",description:"You can use @consumes decorator to consume messages from Kafka topics.",source:"@site/versioned_docs/version-0.7.0/guides/Guide_11_Consumes_Basics.md",sourceDirName:"guides",slug:"/guides/Guide_11_Consumes_Basics",permalink:"/docs/0.7.0/guides/Guide_11_Consumes_Basics",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"FastKafka",permalink:"/docs/0.7.0/"},next:{title:"Batch consuming",permalink:"/docs/0.7.0/guides/Guide_12_Batch_Consuming"}},i={},p=[{value:"Import <code>FastKafka</code>",id:"import-fastkafka",level:2},{value:"Define the structure of the messages",id:"define-the-structure-of-the-messages",level:2},{value:"Create a base FastKafka app",id:"create-a-base-fastkafka-app",level:2},{value:"Create a consumer function and decorate it with <code>@consumes</code>",id:"create-a-consumer-function-and-decorate-it-with-consumes",level:2},{value:"Final app",id:"final-app",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Send the message to kafka topic",id:"send-the-message-to-kafka-topic",level:2},{value:"Choosing a topic",id:"choosing-a-topic",level:2},{value:"Message data",id:"message-data",level:2},{value:"Message metadata",id:"message-metadata",level:2},{value:"Create a consumer function with metadata",id:"create-a-consumer-function-with-metadata",level:3},{value:"Dealing with high latency consuming functions",id:"dealing-with-high-latency-consuming-functions",level:2}],c={toc:p},u="wrapper";function m(e){let{components:a,...t}=e;return(0,o.kt)(u,(0,n.Z)({},c,t,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"consumes-basics"},"@consumes basics"),(0,o.kt)("p",null,"You can use ",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator to consume messages from Kafka topics."),(0,o.kt)("p",null,"In this guide we will create a simple FastKafka app that will consume\n",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages from hello_world topic."),(0,o.kt)("h2",{id:"import-fastkafka"},"Import ",(0,o.kt)("inlineCode",{parentName:"h2"},"FastKafka")),(0,o.kt)("p",null,"To use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator, first we need to import the base\nFastKafka app to create our application."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka import FastKafka\n")),(0,o.kt)("p",null,"In this demo we will log the messages to the output so that we can\ninspect and verify that our app is consuming properly. For that we need\nto import the logger."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n")),(0,o.kt)("h2",{id:"define-the-structure-of-the-messages"},"Define the structure of the messages"),(0,o.kt)("p",null,"Next, you need to define the structure of the messages you want to\nconsume from the topic using ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"pydantic"),". For\nthe guide we\u2019ll stick to something basic, but you are free to define any\ncomplex message structure you wish in your project, just make sure it\ncan be JSON encoded."),(0,o.kt)("p",null,"Let\u2019s import ",(0,o.kt)("inlineCode",{parentName:"p"},"BaseModel")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"Field")," from pydantic and create a simple\n",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class containing one string parameter ",(0,o.kt)("inlineCode",{parentName:"p"},"msg")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from pydantic import BaseModel, Field\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n')),(0,o.kt)("h2",{id:"create-a-base-fastkafka-app"},"Create a base FastKafka app"),(0,o.kt)("p",null,"Now we will create and define a base FastKafka app, replace the\n",(0,o.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values of your\nKafka bootstrap server"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n')),(0,o.kt)("h2",{id:"create-a-consumer-function-and-decorate-it-with-consumes"},"Create a consumer function and decorate it with ",(0,o.kt)("inlineCode",{parentName:"h2"},"@consumes")),(0,o.kt)("p",null,"Let\u2019s create a consumer function that will consume ",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages\nfrom ",(0,o.kt)("em",{parentName:"p"},"hello_world")," topic and log them."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,o.kt)("p",null,"The function decorated with the ",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will be called\nwhen a message is produced to Kafka."),(0,o.kt)("p",null,"The message will then be injected into the typed ",(0,o.kt)("em",{parentName:"p"},"msg")," argument of the\nfunction and its type will be used to parse the message."),(0,o.kt)("p",null,"In this example case, when the message is sent into a ",(0,o.kt)("em",{parentName:"p"},"hello_world"),"\ntopic, it will be parsed into a HelloWorld class and ",(0,o.kt)("inlineCode",{parentName:"p"},"on_hello_world"),"\nfunction will be called with the parsed class as ",(0,o.kt)("em",{parentName:"p"},"msg")," argument value."),(0,o.kt)("h2",{id:"final-app"},"Final app"),(0,o.kt)("p",null,"Your app code should look like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,o.kt)("h2",{id:"run-the-app"},"Run the app"),(0,o.kt)("p",null,"Now we can run the app. Copy the code above in consumer_example.py and\nrun it by running"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n")),(0,o.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[513863]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[513863]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n[513863]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[513863]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 513863...\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 513863 terminated.\n")),(0,o.kt)("h2",{id:"send-the-message-to-kafka-topic"},"Send the message to kafka topic"),(0,o.kt)("p",null,"Lets send a ",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message to the ",(0,o.kt)("em",{parentName:"p"},"hello_world")," topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'echo {\\"msg\\": \\"Hello world\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'print(consumer_task.value[1].decode("UTF-8"))\n')),(0,o.kt)("p",null,"You should see the \u201cGot msg: msg='Hello world'\" being logged by your\nconsumer."),(0,o.kt)("h2",{id:"choosing-a-topic"},"Choosing a topic"),(0,o.kt)("p",null,"You probably noticed that you didn\u2019t define which topic you are\nreceiving the message from, this is because the ",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator\ndetermines the topic by default from your function name. The decorator\nwill take your function name and strip the default \u201con","_",'" prefix from it\nand use the rest as the topic name. In this example case, the topic is\n',(0,o.kt)("em",{parentName:"p"},"hello_world"),"."),(0,o.kt)("p",null,"You can choose your custom prefix by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"prefix")," parameter in\nconsumes decorator, like this:"),(0,o.kt)("p",null,"Also, you can define the topic name completely by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"topic"),"\nin parameter in consumes decorator, like this:"),(0,o.kt)("h2",{id:"message-data"},"Message data"),(0,o.kt)("p",null,"The message received from kafka is translated from binary JSON\nrepresentation int the class defined by typing of ",(0,o.kt)("em",{parentName:"p"},"msg")," parameter in the\nfunction decorated by the ",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator."),(0,o.kt)("p",null,"In this example case, the message will be parsed into a ",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld"),"\nclass."),(0,o.kt)("h2",{id:"message-metadata"},"Message metadata"),(0,o.kt)("p",null,"If you need any of Kafka message metadata such as timestamp, partition\nor headers you can access the metadata by adding a EventMetadata typed\nargument to your consumes function and the metadata from the incoming\nmessage will be automatically injected when calling the consumes\nfunction."),(0,o.kt)("p",null,"Let\u2019s demonstrate that."),(0,o.kt)("h3",{id:"create-a-consumer-function-with-metadata"},"Create a consumer function with metadata"),(0,o.kt)("p",null,"The only difference from the original basic consume function is that we\nare now passing the ",(0,o.kt)("inlineCode",{parentName:"p"},"meta: EventMetadata")," argument to the function. The\n",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will register that and, when a message is\nconsumed, it will also pass the metadata to your function. Now you can\nuse the metadata in your consume function. Lets log it to see what it\ncontains."),(0,o.kt)("p",null,"First, we need to import the EventMetadata"),(0,o.kt)("p",null,"Now we can add the ",(0,o.kt)("inlineCode",{parentName:"p"},"meta")," argument to our consuming function."),(0,o.kt)("p",null,"Your final app should look like this:"),(0,o.kt)("p",null,"Now lets run the app and send a message to the broker to see the logged\nmessage metadata."),(0,o.kt)("p",null,"You should see a similar log as the one below and the metadata being\nlogged in your app."),(0,o.kt)("p",null,"As you can see in the log, from the metadata you now have the\ninformation about the partition, offset, timestamp, key and headers.\n\ud83c\udf89"),(0,o.kt)("h2",{id:"dealing-with-high-latency-consuming-functions"},"Dealing with high latency consuming functions"),(0,o.kt)("p",null,"If your functions have high latency due to, for example, lengthy\ndatabase calls you will notice a big decrease in performance. This is\ndue to the issue of how the consumes decorator executes your consume\nfunctions when consumeing events. By default, the consume function will\nrun the consuming funtions for one topic sequentially, this is the most\nstraightforward approach and results with the least amount of overhead."),(0,o.kt)("p",null,"But, to handle those high latency tasks and run them in parallel,\nFastKafka has a ",(0,o.kt)("inlineCode",{parentName:"p"},"DynamicTaskExecutor")," prepared for your consumers. This\nexecutor comes with additional overhead, so use it only when you need to\nhandle high latency functions."),(0,o.kt)("p",null,"Lets demonstrate how to use it."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'decorate_consumes_executor = """@app.consumes(executor="DynamicTaskExecutor")\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n"""\nmd(f"```python\\n{decorate_consumes}\\n```")\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,o.kt)("p",null,"Lets send a ",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message to the ",(0,o.kt)("em",{parentName:"p"},"hello_world")," topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'echo {\\"msg\\": \\"Hello world\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n')),(0,o.kt)("p",null,"You should see the \u201cGot msg: msg='Hello world'\" being logged by your\nconsumer."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/6945.a9a2d87d.js b/assets/js/6945.a9a2d87d.js new file mode 100644 index 0000000..9e29497 --- /dev/null +++ b/assets/js/6945.a9a2d87d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6945],{6945:(a,k,s)=>{s.r(k)}}]); \ No newline at end of file diff --git a/assets/js/69a9729f.c03205b8.js b/assets/js/69a9729f.c03205b8.js new file mode 100644 index 0000000..c8e9af4 --- /dev/null +++ b/assets/js/69a9729f.c03205b8.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1707],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>k});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){a(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function c(e,t){if(null==e)return{};var n,r,a=function(e,t){if(null==e)return{};var n,r,a={},o=Object.keys(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=r.createContext({}),p=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},s=function(e){var t=p(e.components);return r.createElement(i.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,i=e.parentName,s=c(e,["components","mdxType","originalType","parentName"]),d=p(n),f=a,k=d["".concat(i,".").concat(f)]||d[f]||u[f]||o;return n?r.createElement(k,l(l({ref:t},s),{},{components:n})):r.createElement(k,l({ref:t},s))}));function k(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,l=new Array(o);l[0]=f;var c={};for(var i in t)hasOwnProperty.call(t,i)&&(c[i]=t[i]);c.originalType=e,c[d]="string"==typeof e?e:a,l[1]=c;for(var p=2;p<o;p++)l[p]=n[p];return r.createElement.apply(null,l)}return r.createElement.apply(null,n)}f.displayName="MDXCreateElement"},1506:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>u,frontMatter:()=>o,metadata:()=>c,toc:()=>p});var r=n(7462),a=(n(7294),n(3905));const o={},l=void 0,c={unversionedId:"api/fastkafka/encoder/json_encoder",id:"api/fastkafka/encoder/json_encoder",title:"json_encoder",description:"jsonencoder {fastkafka.encoder.jsonencoder}",source:"@site/docs/api/fastkafka/encoder/json_encoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/json_encoder",permalink:"/docs/next/api/fastkafka/encoder/json_encoder",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"json_decoder",permalink:"/docs/next/api/fastkafka/encoder/json_decoder"},next:{title:"DynamicTaskExecutor",permalink:"/docs/next/api/fastkafka/executors/DynamicTaskExecutor"}},i={},p=[{value:"json_encoder",id:"fastkafka.encoder.json_encoder",level:3}],s={toc:p},d="wrapper";function u(e){let{components:t,...n}=e;return(0,a.kt)(d,(0,r.Z)({},s,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h3",{id:"fastkafka.encoder.json_encoder"},"json_encoder"),(0,a.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/encoder/json.py#L28-L38",class:"link-to-source",target:"_blank"},"View source"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-py"},"json_encoder(\n msg\n)\n")),(0,a.kt)("p",null,"Encoder to encode pydantic instances to json string"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Name"),(0,a.kt)("th",{parentName:"tr",align:null},"Type"),(0,a.kt)("th",{parentName:"tr",align:null},"Description"),(0,a.kt)("th",{parentName:"tr",align:null},"Default"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("inlineCode",{parentName:"td"},"msg")),(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("inlineCode",{parentName:"td"},"BaseModel")),(0,a.kt)("td",{parentName:"tr",align:null},"An instance of pydantic basemodel"),(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("em",{parentName:"td"},"required"))))),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Returns"),":"),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Type"),(0,a.kt)("th",{parentName:"tr",align:null},"Description"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("inlineCode",{parentName:"td"},"bytes")),(0,a.kt)("td",{parentName:"tr",align:null},"Json string in bytes which is encoded from pydantic basemodel")))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/6af17b1d.d647c97a.js b/assets/js/6af17b1d.d647c97a.js new file mode 100644 index 0000000..0b7cb39 --- /dev/null +++ b/assets/js/6af17b1d.d647c97a.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5045],{3905:(e,t,a)=>{a.d(t,{Zo:()=>p,kt:()=>k});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function i(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function r(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?i(Object(a),!0).forEach((function(t){o(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):i(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,o=function(e,t){if(null==e)return{};var a,n,o={},i=Object.keys(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var s=n.createContext({}),u=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):r(r({},t),e)),a},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},h="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},c=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),h=u(a),c=o,k=h["".concat(s,".").concat(c)]||h[c]||d[c]||i;return a?n.createElement(k,r(r({ref:t},p),{},{components:a})):n.createElement(k,r({ref:t},p))}));function k(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=a.length,r=new Array(i);r[0]=c;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[h]="string"==typeof e?e:o,r[1]=l;for(var u=2;u<i;u++)r[u]=a[u];return n.createElement.apply(null,r)}return n.createElement.apply(null,a)}c.displayName="MDXCreateElement"},5276:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>r,default:()=>d,frontMatter:()=>i,metadata:()=>l,toc:()=>u});var n=a(7462),o=(a(7294),a(3905));const i={},r="Contributing to fastkafka",l={unversionedId:"CONTRIBUTING",id:"version-0.7.0/CONTRIBUTING",title:"Contributing to fastkafka",description:"First off, thanks for taking the time to contribute! \u2764\ufe0f",source:"@site/versioned_docs/version-0.7.0/CONTRIBUTING.md",sourceDirName:".",slug:"/CONTRIBUTING",permalink:"/docs/0.7.0/CONTRIBUTING",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"LICENSE",permalink:"/docs/0.7.0/LICENSE"},next:{title:"Release notes",permalink:"/docs/0.7.0/CHANGELOG"}},s={},u=[{value:"Table of Contents",id:"table-of-contents",level:2},{value:"I Have a Question",id:"i-have-a-question",level:2},{value:"I Want To Contribute",id:"i-want-to-contribute",level:2},{value:"Reporting Bugs",id:"reporting-bugs",level:3},{value:"Before Submitting a Bug Report",id:"before-submitting-a-bug-report",level:4},{value:"How Do I Submit a Good Bug Report?",id:"how-do-i-submit-a-good-bug-report",level:4},{value:"Suggesting Enhancements",id:"suggesting-enhancements",level:3},{value:"Before Submitting an Enhancement",id:"before-submitting-an-enhancement",level:4},{value:"How Do I Submit a Good Enhancement Suggestion?",id:"how-do-i-submit-a-good-enhancement-suggestion",level:4},{value:"Your First Code Contribution",id:"your-first-code-contribution",level:3},{value:"Development",id:"development",level:2},{value:"Prepare the dev environment",id:"prepare-the-dev-environment",level:3},{value:"Clone the fastkafka repository",id:"clone-the-fastkafka-repository",level:4},{value:"Optional: create a virtual python environment",id:"optional-create-a-virtual-python-environment",level:4},{value:"Install fastkafka",id:"install-fastkafka",level:4},{value:"Install JRE and Kafka toolkit",id:"install-jre-and-kafka-toolkit",level:4},{value:"Install npm",id:"install-npm",level:4},{value:"Install docusaurus",id:"install-docusaurus",level:4},{value:"Check if everything works",id:"check-if-everything-works",level:4},{value:"Way of working",id:"way-of-working",level:3},{value:"Before a PR",id:"before-a-pr",level:3},{value:"Attribution",id:"attribution",level:2}],p={toc:u},h="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(h,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"contributing-to-fastkafka"},"Contributing to fastkafka"),(0,o.kt)("p",null,"First off, thanks for taking the time to contribute! \u2764\ufe0f"),(0,o.kt)("p",null,"All types of contributions are encouraged and valued. See the ",(0,o.kt)("a",{parentName:"p",href:"#table-of-contents"},"Table of Contents")," for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. \ud83c\udf89"),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},"And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:"),(0,o.kt)("ul",{parentName:"blockquote"},(0,o.kt)("li",{parentName:"ul"},"Star the project"),(0,o.kt)("li",{parentName:"ul"},"Tweet about it"),(0,o.kt)("li",{parentName:"ul"},"Refer this project in your project's readme"),(0,o.kt)("li",{parentName:"ul"},"Mention the project at local meetups and tell your friends/colleagues"))),(0,o.kt)("h2",{id:"table-of-contents"},"Table of Contents"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#i-have-a-question"},"I Have a Question")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#i-want-to-contribute"},"I Want To Contribute"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#reporting-bugs"},"Reporting Bugs")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#suggesting-enhancements"},"Suggesting Enhancements")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#your-first-code-contribution"},"Your First Code Contribution")))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#development"},"Development"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#prepare-the-dev-environment"},"Prepare the dev environment")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#way-of-working"},"Way of working")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#before-a-pr"},"Before a PR"))))),(0,o.kt)("h2",{id:"i-have-a-question"},"I Have a Question"),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},"If you want to ask a question, we assume that you have read the available ",(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/docs"},"Documentation"),".")),(0,o.kt)("p",null,"Before you ask a question, it is best to search for existing ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues"},"Issues")," that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue."),(0,o.kt)("p",null,"If you then still feel the need to ask a question and need clarification, we recommend the following:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Contact us on ",(0,o.kt)("a",{parentName:"li",href:"https://discord.com/invite/CJWmYpyFbc"},"Discord")),(0,o.kt)("li",{parentName:"ul"},"Open an ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/new"},"Issue"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Provide as much context as you can about what you're running into")))),(0,o.kt)("p",null,"We will then take care of the issue as soon as possible."),(0,o.kt)("h2",{id:"i-want-to-contribute"},"I Want To Contribute"),(0,o.kt)("blockquote",null,(0,o.kt)("h3",{parentName:"blockquote",id:"legal-notice"},"Legal Notice"),(0,o.kt)("p",{parentName:"blockquote"},"When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.")),(0,o.kt)("h3",{id:"reporting-bugs"},"Reporting Bugs"),(0,o.kt)("h4",{id:"before-submitting-a-bug-report"},"Before Submitting a Bug Report"),(0,o.kt)("p",null,"A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Make sure that you are using the latest version."),(0,o.kt)("li",{parentName:"ul"},"Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the ",(0,o.kt)("a",{parentName:"li",href:"https://fastkafka.airt.ai/docs"},"documentation"),". If you are looking for support, you might want to check ",(0,o.kt)("a",{parentName:"li",href:"#i-have-a-question"},"this section"),")."),(0,o.kt)("li",{parentName:"ul"},"To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues?q=label%3Abug"},"bug tracker"),"."),(0,o.kt)("li",{parentName:"ul"},"Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue."),(0,o.kt)("li",{parentName:"ul"},"Collect information about the bug:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Stack trace (Traceback)"),(0,o.kt)("li",{parentName:"ul"},"OS, Platform and Version (Windows, Linux, macOS, x86, ARM)"),(0,o.kt)("li",{parentName:"ul"},"Python version"),(0,o.kt)("li",{parentName:"ul"},"Possibly your input and the output"),(0,o.kt)("li",{parentName:"ul"},"Can you reliably reproduce the issue? And can you also reproduce it with older versions?")))),(0,o.kt)("h4",{id:"how-do-i-submit-a-good-bug-report"},"How Do I Submit a Good Bug Report?"),(0,o.kt)("p",null,"We use GitHub issues to track bugs and errors. If you run into an issue with the project:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Open an ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/new"},"Issue"),". (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)"),(0,o.kt)("li",{parentName:"ul"},"Explain the behavior you would expect and the actual behavior."),(0,o.kt)("li",{parentName:"ul"},"Please provide as much context as possible and describe the ",(0,o.kt)("em",{parentName:"li"},"reproduction steps")," that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case."),(0,o.kt)("li",{parentName:"ul"},"Provide the information you collected in the previous section.")),(0,o.kt)("p",null,"Once it's filed:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"The project team will label the issue accordingly."),(0,o.kt)("li",{parentName:"ul"},"A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-repro"),". Bugs with the ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-repro")," tag will not be addressed until they are reproduced."),(0,o.kt)("li",{parentName:"ul"},"If the team is able to reproduce the issue, it will be marked ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-fix"),", as well as possibly other tags (such as ",(0,o.kt)("inlineCode",{parentName:"li"},"critical"),"), and the issue will be left to be implemented.")),(0,o.kt)("h3",{id:"suggesting-enhancements"},"Suggesting Enhancements"),(0,o.kt)("p",null,"This section guides you through submitting an enhancement suggestion for fastkafka, ",(0,o.kt)("strong",{parentName:"p"},"including completely new features and minor improvements to existing functionality"),". Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions."),(0,o.kt)("h4",{id:"before-submitting-an-enhancement"},"Before Submitting an Enhancement"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Make sure that you are using the latest version."),(0,o.kt)("li",{parentName:"ul"},"Read the ",(0,o.kt)("a",{parentName:"li",href:"https://fastkafka.airt.ai/docs"},"documentation")," carefully and find out if the functionality is already covered, maybe by an individual configuration."),(0,o.kt)("li",{parentName:"ul"},"Perform a ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues"},"search")," to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one."),(0,o.kt)("li",{parentName:"ul"},"Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library."),(0,o.kt)("li",{parentName:"ul"},"If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on ",(0,o.kt)("a",{parentName:"li",href:"https://discord.com/invite/CJWmYpyFbc"},"Discord"))),(0,o.kt)("h4",{id:"how-do-i-submit-a-good-enhancement-suggestion"},"How Do I Submit a Good Enhancement Suggestion?"),(0,o.kt)("p",null,"Enhancement suggestions are tracked as ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues"},"GitHub issues"),"."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Use a ",(0,o.kt)("strong",{parentName:"li"},"clear and descriptive title")," for the issue to identify the suggestion."),(0,o.kt)("li",{parentName:"ul"},"Provide a ",(0,o.kt)("strong",{parentName:"li"},"step-by-step description of the suggested enhancement")," in as many details as possible."),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("strong",{parentName:"li"},"Describe the current behavior")," and ",(0,o.kt)("strong",{parentName:"li"},"explain which behavior you expected to see instead")," and why. At this point you can also tell which alternatives do not work for you."),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("strong",{parentName:"li"},"Explain why this enhancement would be useful")," to most fastkafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.")),(0,o.kt)("h3",{id:"your-first-code-contribution"},"Your First Code Contribution"),(0,o.kt)("p",null,'A great way to start contributing to FastKafka would be by solving an issue tagged with "good first issue". To find a list of issues that are tagged as "good first issue" and are suitable for newcomers, please visit the following link: ',(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/labels/good%20first%20issue"},"Good first issues")),(0,o.kt)("p",null,"These issues are beginner-friendly and provide a great opportunity to get started with contributing to FastKafka. Choose an issue that interests you, follow the contribution process mentioned in ",(0,o.kt)("a",{parentName:"p",href:"#way-of-working"},"Way of working")," and ",(0,o.kt)("a",{parentName:"p",href:"#before-a-pr"},"Before a PR"),", and help us make FastKafka even better!"),(0,o.kt)("p",null,"If you have any questions or need further assistance, feel free to reach out to us. Happy coding!"),(0,o.kt)("h2",{id:"development"},"Development"),(0,o.kt)("h3",{id:"prepare-the-dev-environment"},"Prepare the dev environment"),(0,o.kt)("p",null,"To start contributing to fastkafka, you first have to prepare the development environment."),(0,o.kt)("h4",{id:"clone-the-fastkafka-repository"},"Clone the fastkafka repository"),(0,o.kt)("p",null,"To clone the repository, run the following command in the CLI:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"git clone https://github.com/airtai/fastkafka.git\n")),(0,o.kt)("h4",{id:"optional-create-a-virtual-python-environment"},"Optional: create a virtual python environment"),(0,o.kt)("p",null,"To prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your fastkafka project by running:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"python3 -m venv fastkafka-env\n")),(0,o.kt)("p",null,"And to activate your virtual environment run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"source fastkafka-env/bin/activate\n")),(0,o.kt)("p",null,"To learn more about virtual environments, please have a look at ",(0,o.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/venv.html#:~:text=A%20virtual%20environment%20is%20created,the%20virtual%20environment%20are%20available."},"official python documentation")),(0,o.kt)("h4",{id:"install-fastkafka"},"Install fastkafka"),(0,o.kt)("p",null,"To install fastkafka, navigate to the root directory of the cloned fastkafka project and run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'pip install fastkafka -e [."dev"]\n')),(0,o.kt)("h4",{id:"install-jre-and-kafka-toolkit"},"Install JRE and Kafka toolkit"),(0,o.kt)("p",null,"To be able to run tests and use all the functionalities of fastkafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Use our ",(0,o.kt)("inlineCode",{parentName:"li"},"fastkafka testing install-deps")," CLI command which will install JRE and Kafka toolkit for you in your .local folder\nOR"),(0,o.kt)("li",{parentName:"ol"},"Install JRE and Kafka manually.\nTo do this, please refer to ",(0,o.kt)("a",{parentName:"li",href:"https://docs.oracle.com/javase/9/install/toc.htm"},"JDK and JRE installation guide")," and ",(0,o.kt)("a",{parentName:"li",href:"https://kafka.apache.org/quickstart"},"Apache Kafka quickstart"))),(0,o.kt)("h4",{id:"install-npm"},"Install npm"),(0,o.kt)("p",null,"To be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Use our ",(0,o.kt)("inlineCode",{parentName:"li"},"fastkafka docs install_deps")," CLI command which will install npm for you in your .local folder\nOR"),(0,o.kt)("li",{parentName:"ol"},"Install npm manually.\nTo do this, please refer to ",(0,o.kt)("a",{parentName:"li",href:"https://docs.npmjs.com/downloading-and-installing-node-js-and-npm"},"NPM installation guide"))),(0,o.kt)("h4",{id:"install-docusaurus"},"Install docusaurus"),(0,o.kt)("p",null,"To generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of fastkafka project."),(0,o.kt)("h4",{id:"check-if-everything-works"},"Check if everything works"),(0,o.kt)("p",null,"After installing fastkafka and all the necessary dependencies, run ",(0,o.kt)("inlineCode",{parentName:"p"},"nbdev_test"),' in the root of fastkafka project. This will take a couple of minutes as it will run all the tests on fastkafka project. If everythng is setup correctly, you will get a "Success." message in your terminal, otherwise please refer to previous steps.'),(0,o.kt)("h3",{id:"way-of-working"},"Way of working"),(0,o.kt)("p",null,"The development of fastkafka is done in Jupyter notebooks. Inside the ",(0,o.kt)("inlineCode",{parentName:"p"},"nbs")," directory you will find all the source code of fastkafka, this is where you will implement your changes."),(0,o.kt)("p",null,"The testing, cleanup and exporting of the code is being handled by ",(0,o.kt)("inlineCode",{parentName:"p"},"nbdev"),", please, before starting the work on fastkafka, get familiar with it by reading ",(0,o.kt)("a",{parentName:"p",href:"https://nbdev.fast.ai/getting_started.html"},"nbdev documentation"),"."),(0,o.kt)("p",null,"The general philosopy you should follow when writing code for fastkafka is:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Function should be an atomic functionality, short and concise",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Good rule of thumb: your function should be 5-10 lines long usually"))),(0,o.kt)("li",{parentName:"ul"},"If there are more than 2 params, enforce keywording using *",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"E.g.: ",(0,o.kt)("inlineCode",{parentName:"li"},"def function(param1, *, param2, param3): ...")))),(0,o.kt)("li",{parentName:"ul"},"Define typing of arguments and return value",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected"))),(0,o.kt)("li",{parentName:"ul"},"After the function cell, write test cells using the assert keyword",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Whenever you implement something you should test that functionality immediately in the cells below "))),(0,o.kt)("li",{parentName:"ul"},"Add Google style python docstrings when function is implemented and tested")),(0,o.kt)("h3",{id:"before-a-pr"},"Before a PR"),(0,o.kt)("p",null,"After you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as smooth for you and us, please do the following before opening the request (all the commands are to be run in the root of fastkafka project):"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Format your notebooks: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbqa black nbs")),(0,o.kt)("li",{parentName:"ol"},"Close, shutdown, and clean the metadata from your notebooks: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_clean")),(0,o.kt)("li",{parentName:"ol"},"Export your code: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_export")),(0,o.kt)("li",{parentName:"ol"},"Run the tests: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_test")),(0,o.kt)("li",{parentName:"ol"},"Test code typing: ",(0,o.kt)("inlineCode",{parentName:"li"},"mypy fastkafka")),(0,o.kt)("li",{parentName:"ol"},"Test code safety with bandit: ",(0,o.kt)("inlineCode",{parentName:"li"},"bandit -r fastkafka")),(0,o.kt)("li",{parentName:"ol"},"Test code safety with semgrep: ",(0,o.kt)("inlineCode",{parentName:"li"},"semgrep --config auto -r fastkafka"))),(0,o.kt)("p",null,"When you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge."),(0,o.kt)("h2",{id:"attribution"},"Attribution"),(0,o.kt)("p",null,"This guide is based on the ",(0,o.kt)("strong",{parentName:"p"},"contributing-gen"),". ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/bttger/contributing-gen"},"Make your own"),"!"))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/6b76d411.86c99a48.js b/assets/js/6b76d411.86c99a48.js new file mode 100644 index 0000000..6f67122 --- /dev/null +++ b/assets/js/6b76d411.86c99a48.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7083],{3905:(e,t,a)=>{a.d(t,{Zo:()=>d,kt:()=>m});var n=a(7294);function i(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){i(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function s(e,t){if(null==e)return{};var a,n,i=function(e,t){if(null==e)return{};var a,n,i={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(i[a]=e[a]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},d=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,d=s(e,["components","mdxType","originalType","parentName"]),c=l(a),u=i,m=c["".concat(p,".").concat(u)]||c[u]||k[u]||r;return a?n.createElement(m,o(o({ref:t},d),{},{components:a})):n.createElement(m,o({ref:t},d))}));function m(e,t){var a=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=a.length,o=new Array(r);o[0]=u;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[c]="string"==typeof e?e:i,o[1]=s;for(var l=2;l<r;l++)o[l]=a[l];return n.createElement.apply(null,o)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},3613:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>k,frontMatter:()=>r,metadata:()=>s,toc:()=>l});var n=a(7462),i=(a(7294),a(3905));const r={},o="Using Redpanda to test FastKafka",s={unversionedId:"guides/Guide_31_Using_redpanda_to_test_fastkafka",id:"version-0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka",title:"Using Redpanda to test FastKafka",description:"What is FastKafka?",source:"@site/versioned_docs/version-0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",sourceDirName:"guides",slug:"/guides/Guide_31_Using_redpanda_to_test_fastkafka",permalink:"/docs/0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using multiple Kafka clusters",permalink:"/docs/0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters"},next:{title:"Deploy FastKafka docs to GitHub Pages",permalink:"/docs/0.7.1/guides/Guide_04_Github_Actions_Workflow"}},p={},l=[{value:"What is FastKafka?",id:"what-is-fastkafka",level:2},{value:"What is Redpanda?",id:"what-is-redpanda",level:2},{value:"Example repo",id:"example-repo",level:2},{value:"The process",id:"the-process",level:2},{value:"1. Prerequisites",id:"1-prerequisites",level:2},{value:"2. Cloning and setting up the example repo",id:"2-cloning-and-setting-up-the-example-repo",level:2},{value:"Create a virtual environment",id:"create-a-virtual-environment",level:3},{value:"Install Python dependencies",id:"install-python-dependencies",level:3},{value:"3. Writing server code",id:"3-writing-server-code",level:2},{value:"Preparing the demo model",id:"preparing-the-demo-model",level:3},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"4. Writing the test code",id:"4-writing-the-test-code",level:2},{value:"5. Running the tests",id:"5-running-the-tests",level:2},{value:"Recap",id:"recap",level:3}],d={toc:l},c="wrapper";function k(e){let{components:t,...a}=e;return(0,i.kt)(c,(0,n.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"using-redpanda-to-test-fastkafka"},"Using Redpanda to test FastKafka"),(0,i.kt)("h2",{id:"what-is-fastkafka"},"What is FastKafka?"),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,i.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,i.kt)("h2",{id:"what-is-redpanda"},"What is Redpanda?"),(0,i.kt)("p",null,"Redpanda is a drop-in replacement for Kafka. Most of the Kafka tools\nwork out of the box with Redpanda."),(0,i.kt)("p",null,"From ",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/"},"redpanda.com"),":"),(0,i.kt)("blockquote",null,(0,i.kt)("p",{parentName:"blockquote"},"Redpanda is a Kafka\xae-compatible streaming data platform that is proven\nto be 10x faster and 6x lower in total costs. It is also JVM-free,\nZooKeeper\xae-free, Jepsen-tested and source available.")),(0,i.kt)("p",null,"Some of the advantages of Redpanda over Kafka are"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A single binary with built-in everything, no ZooKeeper\xae or JVM\nneeded."),(0,i.kt)("li",{parentName:"ol"},"Costs upto 6X less than Kafka."),(0,i.kt)("li",{parentName:"ol"},"Up to 10x lower average latencies and up to 6x faster Kafka\ntransactions without compromising correctness.")),(0,i.kt)("p",null,"To learn more about Redpanda, please visit their\n",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/"},"website")," or checkout this ",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark"},"blog\npost"),"\ncomparing Redpanda and Kafka\u2019s performance benchmarks."),(0,i.kt)("h2",{id:"example-repo"},"Example repo"),(0,i.kt)("p",null,"A sample fastkafka-based library that uses Redpanda for testing, based\non this guide, can be found\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_with_redpanda"},"here"),"."),(0,i.kt)("h2",{id:"the-process"},"The process"),(0,i.kt)("p",null,"Here are the steps we\u2019ll be walking through to build our example:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Set up the prerequisites."),(0,i.kt)("li",{parentName:"ol"},"Clone the example repo."),(0,i.kt)("li",{parentName:"ol"},"Explain how to write an application using FastKafka."),(0,i.kt)("li",{parentName:"ol"},"Explain how to write a test case to test FastKafka with Redpanda."),(0,i.kt)("li",{parentName:"ol"},"Run the test case and produce/consume messages.")),(0,i.kt)("h2",{id:"1-prerequisites"},"1. Prerequisites"),(0,i.kt)("p",null,"Before starting, make sure you have the following prerequisites set up:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Python 3.x"),": A Python 3.x installation is required to run\nFastKafka. You can download the latest version of Python from the\n",(0,i.kt)("a",{parentName:"li",href:"https://www.python.org/downloads/"},"official website"),". You\u2019ll also\nneed to have pip installed and updated, which is Python\u2019s package\ninstaller."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Docker Desktop"),": Docker is used to run Redpanda, which is\nrequired for testing FastKafka. You can download and install Docker\nDesktop from the ",(0,i.kt)("a",{parentName:"li",href:"https://www.docker.com/products/docker-desktop/"},"official\nwebsite"),"."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Git"),": You\u2019ll need to have Git installed to clone the example\nrepo. You can download Git from the ",(0,i.kt)("a",{parentName:"li",href:"https://git-scm.com/downloads"},"official\nwebsite"),".")),(0,i.kt)("h2",{id:"2-cloning-and-setting-up-the-example-repo"},"2. Cloning and setting up the example repo"),(0,i.kt)("p",null,"To get started with the example code, clone the ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_with_redpanda"},"GitHub\nrepository")," by\nrunning the following command in your terminal:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"git clone https://github.com/airtai/sample_fastkafka_with_redpanda.git\ncd sample_fastkafka_with_redpanda\n")),(0,i.kt)("p",null,"This will create a new directory called sample_fastkafka_with_redpanda\nand download all the necessary files."),(0,i.kt)("h3",{id:"create-a-virtual-environment"},"Create a virtual environment"),(0,i.kt)("p",null,"Before writing any code, let\u2019s ",(0,i.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/venv.html#module-venv"},"create a new virtual\nenvironment"),"\nfor our project."),(0,i.kt)("p",null,"A virtual environment is an isolated environment for a Python project,\nwhich allows you to manage project-specific dependencies and avoid\nconflicts between different projects."),(0,i.kt)("p",null,"To create a new virtual environment, run the following commands in your\nterminal:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"python3 -m venv venv\n")),(0,i.kt)("p",null,"This will create a new directory called ",(0,i.kt)("inlineCode",{parentName:"p"},"venv")," in your project\ndirectory, which will contain the virtual environment."),(0,i.kt)("p",null,"To activate the virtual environment, run the following command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"source venv/bin/activate\n")),(0,i.kt)("p",null,"This will change your shell\u2019s prompt to indicate that you are now\nworking inside the virtual environment."),(0,i.kt)("p",null,"Finally, run the following command to upgrade ",(0,i.kt)("inlineCode",{parentName:"p"},"pip"),", the Python package\ninstaller:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pip install --upgrade pip\n")),(0,i.kt)("h3",{id:"install-python-dependencies"},"Install Python dependencies"),(0,i.kt)("p",null,"Next, let\u2019s install the required Python dependencies. In this guide,\nwe\u2019ll be using\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nto write our application code and ",(0,i.kt)("inlineCode",{parentName:"p"},"pytest")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"pytest-asyncio")," to test\nit."),(0,i.kt)("p",null,"You can install the dependencies from the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file\nprovided in the cloned repository by running:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pip install -r requirements.txt\n")),(0,i.kt)("p",null,"This will install all the required packages and their dependencies."),(0,i.kt)("h2",{id:"3-writing-server-code"},"3. Writing server code"),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file in the cloned repository demonstrates how to\nuse FastKafka to consume messages from a Kafka topic, make predictions\nusing a predictive model, and publish the predictions to another Kafka\ntopic. Here is an explanation of the code:"),(0,i.kt)("h3",{id:"preparing-the-demo-model"},"Preparing the demo model"),(0,i.kt)("p",null,"First we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model."),(0,i.kt)("p",null,"We will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n')),(0,i.kt)("h3",{id:"messages"},"Messages"),(0,i.kt)("p",null,"FastKafka uses ",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,i.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,i.kt)("p",null,"This example defines two message classes for use in a FastKafka\napplication:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," class is used to represent input data for a\npredictive model. It has four fields of type\n",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/types/#constrained-types"},(0,i.kt)("inlineCode",{parentName:"a"},"NonNegativeFloat")),",\nwhich is a subclass of float that only allows non-negative floating\npoint values.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," class is used to represent the output of the\npredictive model. It has a single field ",(0,i.kt)("inlineCode",{parentName:"p"},"species")," of type string\nrepresenting the predicted species."))),(0,i.kt)("p",null,"These message classes will be used to parse and validate incoming data\nin Kafka consumers and producers."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("h3",{id:"application"},"Application"),(0,i.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,i.kt)("p",null,"It starts by defining a dictionary called ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,i.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,i.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used both\nto generate documentation and to later run the server against one of the\ngiven kafka broker."),(0,i.kt)("p",null,"Next, an instance of the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum required arguments:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generating documentation")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n')),(0,i.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,i.kt)("p",null,"FastKafka provides convenient function decorators ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,i.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,i.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,i.kt)("p",null,"This following example shows how to use the ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message class. Specifying the\ntype of the single argument is instructing the Pydantic to use\n",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData.parse_raw()")," on the consumed message before passing it\nto the user defined function ",(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,i.kt)("inlineCode",{parentName:"p"},"to_predictions"),' function,\nwhich specifies that this function should produce a message to the\n\u201cpredictions" Kafka topic whenever it is called. The ',(0,i.kt)("inlineCode",{parentName:"p"},"to_predictions"),"\nfunction takes a single integer argument ",(0,i.kt)("inlineCode",{parentName:"p"},"species_class")," representing\none of three possible strign values predicted by the mdoel. It creates\na new ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," message using this value and then returns it.\nThe framework will call the ",(0,i.kt)("inlineCode",{parentName:"p"},'IrisPrediction.json().encode("utf-8")'),"\nfunction on the returned value and produce it to the specified topic."))),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h2",{id:"4-writing-the-test-code"},"4. Writing the test code"),(0,i.kt)("p",null,"The service can be tested using the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstance which can be configured to start a ",(0,i.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/LocalRedpandaBroker/"},"Redpanda\nbroker")," for testing\npurposes. The ",(0,i.kt)("inlineCode",{parentName:"p"},"test.py")," file in the cloned repository contains the\nfollowing code for testing."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'import pytest\nfrom application import IrisInputData, IrisPrediction, kafka_app\n\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n sepal_length=0.1,\n sepal_width=0.2,\n petal_length=0.3,\n petal_width=0.4,\n)\n\n\n@pytest.mark.asyncio\nasync def test():\n # Start Tester app and create local Redpanda broker for testing\n async with Tester(kafka_app).using_local_redpanda(\n tag="v23.1.2", listener_port=9092\n ) as tester:\n # Send IrisInputData message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with IrisPrediction in predictions topic\n await tester.awaited_mocks.on_predictions.assert_awaited_with(\n IrisPrediction(species="setosa"), timeout=2\n )\n')),(0,i.kt)("p",null,"The\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\nmodule utilizes uses\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/testing/LocalRedpandaBroker/#fastkafka.testing.LocalRedpandaBroker"},(0,i.kt)("inlineCode",{parentName:"a"},"LocalRedpandaBroker")),"\nto start and stop a Redpanda broker for testing purposes using Docker"),(0,i.kt)("h2",{id:"5-running-the-tests"},"5. Running the tests"),(0,i.kt)("p",null,"We can run the tests which is in ",(0,i.kt)("inlineCode",{parentName:"p"},"test.py")," file by executing the\nfollowing command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pytest test.py\n")),(0,i.kt)("p",null,"This will start a Redpanda broker using Docker and executes tests. The\noutput of the command is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest\n============================== test session starts ===============================\nplatform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0\nrootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py\nplugins: asyncio-0.21.0, anyio-3.6.2\nasyncio: mode=strict\ncollected 1 item \n\ntest.py . [100%]\n\n=============================== 1 passed in 7.28s ================================\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$\n")),(0,i.kt)("p",null,"Running the tests with the Redpanda broker ensures that your code is\nworking correctly with a real Kafka-like message broker, making your\ntests more reliable."),(0,i.kt)("h3",{id:"recap"},"Recap"),(0,i.kt)("p",null,"We have created an Iris classification model and encapulated it into our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napplication. The app will consume the ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," from the\n",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic and produce the predictions to ",(0,i.kt)("inlineCode",{parentName:"p"},"predictions")," topic."),(0,i.kt)("p",null,"To test the app we have:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Created the app")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Started our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\nclass with ",(0,i.kt)("inlineCode",{parentName:"p"},"Redpanda")," broker which mirrors the developed app topics\nfor testing purposes")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Sent ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message to ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Asserted and checked that the developed iris classification service\nhas reacted to ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message"))))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/6c174e6d.40d06062.js b/assets/js/6c174e6d.40d06062.js new file mode 100644 index 0000000..701f5ac --- /dev/null +++ b/assets/js/6c174e6d.40d06062.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9511],{3905:(e,r,t)=>{t.d(r,{Zo:()=>s,kt:()=>k});var n=t(7294);function a(e,r,t){return r in e?Object.defineProperty(e,r,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[r]=t,e}function o(e,r){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);r&&(n=n.filter((function(r){return Object.getOwnPropertyDescriptor(e,r).enumerable}))),t.push.apply(t,n)}return t}function c(e){for(var r=1;r<arguments.length;r++){var t=null!=arguments[r]?arguments[r]:{};r%2?o(Object(t),!0).forEach((function(r){a(e,r,t[r])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):o(Object(t)).forEach((function(r){Object.defineProperty(e,r,Object.getOwnPropertyDescriptor(t,r))}))}return e}function i(e,r){if(null==e)return{};var t,n,a=function(e,r){if(null==e)return{};var t,n,a={},o=Object.keys(e);for(n=0;n<o.length;n++)t=o[n],r.indexOf(t)>=0||(a[t]=e[t]);return a}(e,r);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)t=o[n],r.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var d=n.createContext({}),l=function(e){var r=n.useContext(d),t=r;return e&&(t="function"==typeof e?e(r):c(c({},r),e)),t},s=function(e){var r=l(e.components);return n.createElement(d.Provider,{value:r},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var r=e.children;return n.createElement(n.Fragment,{},r)}},u=n.forwardRef((function(e,r){var t=e.components,a=e.mdxType,o=e.originalType,d=e.parentName,s=i(e,["components","mdxType","originalType","parentName"]),p=l(t),u=a,k=p["".concat(d,".").concat(u)]||p[u]||f[u]||o;return t?n.createElement(k,c(c({ref:r},s),{},{components:t})):n.createElement(k,c({ref:r},s))}));function k(e,r){var t=arguments,a=r&&r.mdxType;if("string"==typeof e||a){var o=t.length,c=new Array(o);c[0]=u;var i={};for(var d in r)hasOwnProperty.call(r,d)&&(i[d]=r[d]);i.originalType=e,i[p]="string"==typeof e?e:a,c[1]=i;for(var l=2;l<o;l++)c[l]=t[l];return n.createElement.apply(null,c)}return n.createElement.apply(null,t)}u.displayName="MDXCreateElement"},2174:(e,r,t)=>{t.r(r),t.d(r,{assets:()=>d,contentTitle:()=>c,default:()=>f,frontMatter:()=>o,metadata:()=>i,toc:()=>l});var n=t(7462),a=(t(7294),t(3905));const o={},c=void 0,i={unversionedId:"api/fastkafka/encoder/avro_encoder",id:"version-0.6.0/api/fastkafka/encoder/avro_encoder",title:"avro_encoder",description:"fastkafka.encoder.avroencoder {fastkafka.encoder.avroencoder}",source:"@site/versioned_docs/version-0.6.0/api/fastkafka/encoder/avro_encoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avro_encoder",permalink:"/docs/0.6.0/api/fastkafka/encoder/avro_encoder",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avro_decoder",permalink:"/docs/0.6.0/api/fastkafka/encoder/avro_decoder"},next:{title:"avsc_to_pydantic",permalink:"/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic"}},d={},l=[{value:"<code>fastkafka.encoder.avro_encoder</code>",id:"fastkafka.encoder.avro_encoder",level:2},{value:"<code>avro_encoder</code>",id:"avro_encoder",level:3}],s={toc:l},p="wrapper";function f(e){let{components:r,...t}=e;return(0,a.kt)(p,(0,n.Z)({},s,t,{components:r,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"fastkafka.encoder.avro_encoder"},(0,a.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.avro_encoder")),(0,a.kt)("h3",{id:"avro_encoder"},(0,a.kt)("inlineCode",{parentName:"h3"},"avro_encoder")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def avro_encoder(msg: pydantic.main.BaseModel) -> bytes")),(0,a.kt)("p",null,"Encoder to encode pydantic instances to avro message"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"msg"),": An instance of pydantic basemodel")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Returns"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"A bytes message which is encoded from pydantic basemodel")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/6c450cd6.73253a7e.js b/assets/js/6c450cd6.73253a7e.js new file mode 100644 index 0000000..82c9ac8 --- /dev/null +++ b/assets/js/6c450cd6.73253a7e.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7750],{3905:(e,n,a)=>{a.d(n,{Zo:()=>l,kt:()=>f});var t=a(7294);function s(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function i(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function o(e){for(var n=1;n<arguments.length;n++){var a=null!=arguments[n]?arguments[n]:{};n%2?i(Object(a),!0).forEach((function(n){s(e,n,a[n])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):i(Object(a)).forEach((function(n){Object.defineProperty(e,n,Object.getOwnPropertyDescriptor(a,n))}))}return e}function r(e,n){if(null==e)return{};var a,t,s=function(e,n){if(null==e)return{};var a,t,s={},i=Object.keys(e);for(t=0;t<i.length;t++)a=i[t],n.indexOf(a)>=0||(s[a]=e[a]);return s}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(t=0;t<i.length;t++)a=i[t],n.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(s[a]=e[a])}return s}var d=t.createContext({}),c=function(e){var n=t.useContext(d),a=n;return e&&(a="function"==typeof e?e(n):o(o({},n),e)),a},l=function(e){var n=c(e.components);return t.createElement(d.Provider,{value:n},e.children)},p="mdxType",m={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},u=t.forwardRef((function(e,n){var a=e.components,s=e.mdxType,i=e.originalType,d=e.parentName,l=r(e,["components","mdxType","originalType","parentName"]),p=c(a),u=s,f=p["".concat(d,".").concat(u)]||p[u]||m[u]||i;return a?t.createElement(f,o(o({ref:n},l),{},{components:a})):t.createElement(f,o({ref:n},l))}));function f(e,n){var a=arguments,s=n&&n.mdxType;if("string"==typeof e||s){var i=a.length,o=new Array(i);o[0]=u;var r={};for(var d in n)hasOwnProperty.call(n,d)&&(r[d]=n[d]);r.originalType=e,r[p]="string"==typeof e?e:s,o[1]=r;for(var c=2;c<i;c++)o[c]=a[c];return t.createElement.apply(null,o)}return t.createElement.apply(null,a)}u.displayName="MDXCreateElement"},5931:(e,n,a)=>{a.r(n),a.d(n,{assets:()=>d,contentTitle:()=>o,default:()=>m,frontMatter:()=>i,metadata:()=>r,toc:()=>c});var t=a(7462),s=(a(7294),a(3905));const i={},o="Encoding and Decoding Kafka Messages with FastKafka",r={unversionedId:"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",id:"version-0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",title:"Encoding and Decoding Kafka Messages with FastKafka",description:"Prerequisites",source:"@site/versioned_docs/version-0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",sourceDirName:"guides",slug:"/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",permalink:"/docs/0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Lifespan Events",permalink:"/docs/0.6.0/guides/Guide_05_Lifespan_Handler"},next:{title:"Using Redpanda to test FastKafka",permalink:"/docs/0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka"}},d={},c=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Ways to Encode and Decode Messages with FastKafka",id:"ways-to-encode-and-decode-messages-with-fastkafka",level:2},{value:"1. Json encoder and decoder",id:"1-json-encoder-and-decoder",level:2},{value:"2. Avro encoder and decoder",id:"2-avro-encoder-and-decoder",level:2},{value:"What is Avro?",id:"what-is-avro",level:3},{value:"Installing FastKafka with Avro dependencies",id:"installing-fastkafka-with-avro-dependencies",level:3},{value:"Defining Avro Schema Using Pydantic Models",id:"defining-avro-schema-using-pydantic-models",level:3},{value:"Reusing existing avro schema",id:"reusing-existing-avro-schema",level:3},{value:"Building pydantic models from avro schema dictionary",id:"building-pydantic-models-from-avro-schema-dictionary",level:4},{value:"Building pydantic models from <code>.avsc</code> file",id:"building-pydantic-models-from-avsc-file",level:4},{value:"Consume/Produce avro messages with FastKafka",id:"consumeproduce-avro-messages-with-fastkafka",level:3},{value:"Assembling it all together",id:"assembling-it-all-together",level:3},{value:"3. Custom encoder and decoder",id:"3-custom-encoder-and-decoder",level:2},{value:"Writing a custom encoder and decoder",id:"writing-a-custom-encoder-and-decoder",level:3},{value:"Assembling it all together",id:"assembling-it-all-together-1",level:3}],l={toc:c},p="wrapper";function m(e){let{components:n,...a}=e;return(0,s.kt)(p,(0,t.Z)({},l,a,{components:n,mdxType:"MDXLayout"}),(0,s.kt)("h1",{id:"encoding-and-decoding-kafka-messages-with-fastkafka"},"Encoding and Decoding Kafka Messages with FastKafka"),(0,s.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,s.kt)("ol",null,(0,s.kt)("li",{parentName:"ol"},"A basic knowledge of\n",(0,s.kt)("a",{parentName:"li",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nis needed to proceed with this guide. If you are not familiar with\n",(0,s.kt)("a",{parentName:"li",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),",\nplease go through the ",(0,s.kt)("a",{parentName:"li",href:"/docs#tutorial"},"tutorial")," first."),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("a",{parentName:"li",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith its dependencies installed is needed. Please install\n",(0,s.kt)("a",{parentName:"li",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nusing the command - ",(0,s.kt)("inlineCode",{parentName:"li"},"pip install fastkafka"))),(0,s.kt)("h2",{id:"ways-to-encode-and-decode-messages-with-fastkafka"},"Ways to Encode and Decode Messages with FastKafka"),(0,s.kt)("p",null,"In python, by default, we send Kafka messages as bytes. Even if our\nmessage is a string, we convert it to bytes and then send it to Kafka\ntopic. imilarly, while consuming messages, we consume them as bytes and\nthen convert them to strings."),(0,s.kt)("p",null,"In FastKafka, we specify message schema using Pydantic models as\nmentioned in ",(0,s.kt)("a",{parentName:"p",href:"/docs#messages"},"tutorial"),":"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'# Define Pydantic models for Kafka messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,s.kt)("p",null,"Then, we send and receive messages as instances of Pydantic models which\nwe defined. So, FastKafka needs a way to encode/decode to these Pydantic\nmodel messages to bytes in order to send/receive messages to/from Kafka\ntopics."),(0,s.kt)("p",null,"The ",(0,s.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"@produces")," methods of FastKafka accept a parameter\ncalled ",(0,s.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,s.kt)("inlineCode",{parentName:"p"},"encoder")," to decode/encode Kafka messages. FastKafka\nprovides three ways to encode and decode messages:"),(0,s.kt)("ol",null,(0,s.kt)("li",{parentName:"ol"},"json - This is the default encoder/decoder option in FastKafka.\nWhile producing, this option converts our instance of Pydantic model\nmessages to a JSON string and then converts it to bytes before\nsending it to the topic. While consuming, it converts bytes to a\nJSON string and then constructs an instance of Pydantic model from\nthe JSON string."),(0,s.kt)("li",{parentName:"ol"},"avro - This option uses Avro encoding/decoding to convert instances\nof Pydantic model messages to bytes while producing, and while\nconsuming, it constructs an instance of Pydantic model from bytes."),(0,s.kt)("li",{parentName:"ol"},"custom encoder/decoder - If you are not happy with the json or avro\nencoder/decoder options, you can write your own encoder/decoder\nfunctions and use them to encode/decode Pydantic messages.")),(0,s.kt)("h2",{id:"1-json-encoder-and-decoder"},"1. Json encoder and decoder"),(0,s.kt)("p",null,"The default option in FastKafka is json encoder/decoder. This option,\nwhile producing, converts our instance of pydantic model messages to\njson string and then converts to bytes before sending it to the topics.\nWhile consuming it converts bytes to json string and then constructs\ninstance of pydantic model from json string."),(0,s.kt)("p",null,"We can use the application from ",(0,s.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," as\nis, and it will use the json encoder/decoder by default. But, for\nclarity, let\u2019s modify it to explicitly accept the \u2018json\u2019 encoder/decoder\nparameter:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder="json")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder="json")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,s.kt)("p",null,"In the above code, the ",(0,s.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),' decorator sets up a\nconsumer for the \u201cinput_data" topic, using the \u2018json\u2019 decoder to convert\nthe message payload to an instance of ',(0,s.kt)("inlineCode",{parentName:"p"},"IrisInputData"),". The\n",(0,s.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces"),' decorator sets up a producer for the \u201cpredictions"\ntopic, using the \u2018json\u2019 encoder to convert the instance of\n',(0,s.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," to message payload."),(0,s.kt)("h2",{id:"2-avro-encoder-and-decoder"},"2. Avro encoder and decoder"),(0,s.kt)("h3",{id:"what-is-avro"},"What is Avro?"),(0,s.kt)("p",null,"Avro is a row-oriented remote procedure call and data serialization\nframework developed within Apache\u2019s Hadoop project. It uses JSON for\ndefining data types and protocols, and serializes data in a compact\nbinary format. To learn more about the Apache Avro, please check out the\n",(0,s.kt)("a",{parentName:"p",href:"https://avro.apache.org/docs/"},"docs"),"."),(0,s.kt)("h3",{id:"installing-fastkafka-with-avro-dependencies"},"Installing FastKafka with Avro dependencies"),(0,s.kt)("p",null,(0,s.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith dependencies for Apache Avro installed is needed to use avro\nencoder/decoder. Please install\n",(0,s.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith Avro support using the command - ",(0,s.kt)("inlineCode",{parentName:"p"},"pip install fastkafka[avro]")),(0,s.kt)("h3",{id:"defining-avro-schema-using-pydantic-models"},"Defining Avro Schema Using Pydantic Models"),(0,s.kt)("p",null,"By default, you can use Pydantic model to define your message schemas.\nFastKafka internally takes care of encoding and decoding avro messages,\nbased on the Pydantic models."),(0,s.kt)("p",null,"So, similar to the ",(0,s.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),", the message schema will\nremain as it is."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'# Define Pydantic models for Avro messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,s.kt)("p",null,"No need to change anything to support avro. You can use existing\nPydantic models as is."),(0,s.kt)("h3",{id:"reusing-existing-avro-schema"},"Reusing existing avro schema"),(0,s.kt)("p",null,"If you are using some other library to send and receive avro encoded\nmessages, it is highly likely that you already have an Avro schema\ndefined."),(0,s.kt)("h4",{id:"building-pydantic-models-from-avro-schema-dictionary"},"Building pydantic models from avro schema dictionary"),(0,s.kt)("p",null,"Let\u2019s modify the above example and let\u2019s assume we have schemas already\nfor ",(0,s.kt)("inlineCode",{parentName:"p"},"IrisInputData")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," which will look like below:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'iris_input_data_schema = {\n "type": "record",\n "namespace": "IrisInputData",\n "name": "IrisInputData",\n "fields": [\n {"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},\n {"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},\n {"doc": "Petal length in cm", "type": "double", "name": "petal_length"},\n {"doc": "Petal width in cm", "type": "double", "name": "petal_width"},\n ],\n}\niris_prediction_schema = {\n "type": "record",\n "namespace": "IrisPrediction",\n "name": "IrisPrediction",\n "fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],\n}\n')),(0,s.kt)("p",null,"We can easily construct pydantic models from avro schema using\n",(0,s.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic/#fastkafka.encoder.avsc_to_pydantic"},(0,s.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction which is included as part of\n",(0,s.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nitself."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n")),(0,s.kt)("p",null,"The above code will convert avro schema to pydantic models and will\nprint pydantic models\u2019 fields. The output of the above is:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-txt"},"{'sepal_length': ModelField(name='sepal_length', type=float, required=True),\n 'sepal_width': ModelField(name='sepal_width', type=float, required=True),\n 'petal_length': ModelField(name='petal_length', type=float, required=True),\n 'petal_width': ModelField(name='petal_width', type=float, required=True)}\n \n {'species': ModelField(name='species', type=str, required=True)}\n")),(0,s.kt)("p",null,"This is exactly same as manually defining the pydantic models ourselves.\nYou don\u2019t have to worry about not making any mistakes while converting\navro schema to pydantic models manually. You can easily and\nautomatically accomplish it by using\n",(0,s.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic/#fastkafka.encoder.avsc_to_pydantic"},(0,s.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction as demonstrated above."),(0,s.kt)("h4",{id:"building-pydantic-models-from-avsc-file"},"Building pydantic models from ",(0,s.kt)("inlineCode",{parentName:"h4"},".avsc")," file"),(0,s.kt)("p",null,"Not all cases will have avro schema conveniently defined as a python\ndictionary. You may have it stored as the proprietary ",(0,s.kt)("inlineCode",{parentName:"p"},".avsc")," files in\nfilesystem. Let\u2019s see how to convert those ",(0,s.kt)("inlineCode",{parentName:"p"},".avsc")," files to pydantic\nmodels."),(0,s.kt)("p",null,"Let\u2019s assume our avro files are stored in files called\n",(0,s.kt)("inlineCode",{parentName:"p"},"iris_input_data_schema.avsc")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"iris_prediction_schema.avsc"),". In that\ncase, following code converts the schema to pydantic models:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'import json\nfrom fastkafka.encoder import avsc_to_pydantic\n\n\nwith open("iris_input_data_schema.avsc", "rb") as f:\n iris_input_data_schema = json.load(f)\n \nwith open("iris_prediction_schema.avsc", "rb") as f:\n iris_prediction_schema = json.load(f)\n \n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n')),(0,s.kt)("h3",{id:"consumeproduce-avro-messages-with-fastkafka"},"Consume/Produce avro messages with FastKafka"),(0,s.kt)("p",null,(0,s.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nprovides ",(0,s.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"@produces")," methods to consume/produces\nmessages to/from a ",(0,s.kt)("inlineCode",{parentName:"p"},"Kafka")," topic. This is explained in\n",(0,s.kt)("a",{parentName:"p",href:"/docs#function-decorators"},"tutorial"),"."),(0,s.kt)("p",null,"The ",(0,s.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"@produces")," methods accepts a parameter called\n",(0,s.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,s.kt)("inlineCode",{parentName:"p"},"encoder")," to decode/encode avro messages."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", encoder="avro")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", decoder="avro")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,s.kt)("p",null,"In the above example, in ",(0,s.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"@produces")," methods, we\nexplicitly instruct FastKafka to ",(0,s.kt)("inlineCode",{parentName:"p"},"decode")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"encode")," messages using\nthe ",(0,s.kt)("inlineCode",{parentName:"p"},"avro")," ",(0,s.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,s.kt)("inlineCode",{parentName:"p"},"encoder")," instead of the default ",(0,s.kt)("inlineCode",{parentName:"p"},"json"),"\n",(0,s.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,s.kt)("inlineCode",{parentName:"p"},"encoder"),"."),(0,s.kt)("h3",{id:"assembling-it-all-together"},"Assembling it all together"),(0,s.kt)("p",null,"Let\u2019s rewrite the sample code found in\n",(0,s.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," to use ",(0,s.kt)("inlineCode",{parentName:"p"},"avro")," to ",(0,s.kt)("inlineCode",{parentName:"p"},"decode")," and\n",(0,s.kt)("inlineCode",{parentName:"p"},"encode")," messages:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\niris_input_data_schema = {\n "type": "record",\n "namespace": "IrisInputData",\n "name": "IrisInputData",\n "fields": [\n {"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},\n {"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},\n {"doc": "Petal length in cm", "type": "double", "name": "petal_length"},\n {"doc": "Petal width in cm", "type": "double", "name": "petal_width"},\n ],\n}\niris_prediction_schema = {\n "type": "record",\n "namespace": "IrisPrediction",\n "name": "IrisPrediction",\n "fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],\n}\n# Or load schema from avsc files\n\nfrom fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\n\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder="avro")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder="avro")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,s.kt)("p",null,"The above code is a sample implementation of using FastKafka to consume\nand produce Avro-encoded messages from/to a Kafka topic. The code\ndefines two Avro schemas for the input data and the prediction result.\nIt then uses the\n",(0,s.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic/#fastkafka.encoder.avsc_to_pydantic"},(0,s.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction from the FastKafka library to convert the Avro schema into\nPydantic models, which will be used to decode and encode Avro messages."),(0,s.kt)("p",null,"The\n",(0,s.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is then instantiated with the broker details, and two functions\ndecorated with ",(0,s.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces"),' are\ndefined to consume messages from the \u201cinput_data" topic and produce\nmessages to the \u201cpredictions" topic, respectively. The functions uses\nthe decoder=\u201cavro" and encoder=\u201cavro" parameters to decode and encode\nthe Avro messages.'),(0,s.kt)("p",null,"In summary, the above code demonstrates a straightforward way to use\nAvro-encoded messages with FastKafka to build a message processing\npipeline."),(0,s.kt)("h2",{id:"3-custom-encoder-and-decoder"},"3. Custom encoder and decoder"),(0,s.kt)("p",null,"If you are not happy with the json or avro encoder/decoder options, you\ncan write your own encoder/decoder functions and use them to\nencode/decode Pydantic messages."),(0,s.kt)("h3",{id:"writing-a-custom-encoder-and-decoder"},"Writing a custom encoder and decoder"),(0,s.kt)("p",null,"In this section, let\u2019s see how to write a custom encoder and decoder\nwhich obfuscates kafka message with simple\n",(0,s.kt)("a",{parentName:"p",href:"https://en.wikipedia.org/wiki/ROT13"},"ROT13")," cipher."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},"import codecs\nimport json\nfrom typing import Any\n\nfrom pydantic.main import ModelMetaclass\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n msg_str = msg.json()\n obfuscated = codecs.encode(msg_str, 'rot13')\n raw_bytes = obfuscated.encode(\"utf-8\")\n return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n obfuscated = raw_msg.decode(\"utf-8\")\n msg_str = codecs.decode(obfuscated, 'rot13')\n msg_dict = json.loads(msg_str)\n return cls(**msg_dict)\n")),(0,s.kt)("p",null,"The above code defines two custom functions for encoding and decoding\nmessages in a Kafka application using the FastKafka library."),(0,s.kt)("p",null,"The encoding function, ",(0,s.kt)("inlineCode",{parentName:"p"},"custom_encoder()"),", takes a message ",(0,s.kt)("inlineCode",{parentName:"p"},"msg")," which\nis an instance of a Pydantic model, converts it to a JSON string using\nthe ",(0,s.kt)("inlineCode",{parentName:"p"},"json()")," method, obfuscates the resulting string using the ROT13\nalgorithm from the ",(0,s.kt)("inlineCode",{parentName:"p"},"codecs")," module, and finally encodes the obfuscated\nstring as raw bytes using the UTF-8 encoding."),(0,s.kt)("p",null,"The decoding function, ",(0,s.kt)("inlineCode",{parentName:"p"},"custom_decoder()"),", takes a raw message ",(0,s.kt)("inlineCode",{parentName:"p"},"raw_msg"),"\nin bytes format, a Pydantic class to construct instance with cls\nparameter. It first decodes the raw message from UTF-8 encoding, then\nuses the ROT13 algorithm to de-obfuscate the string. Finally, it loads\nthe resulting JSON string using the ",(0,s.kt)("inlineCode",{parentName:"p"},"json.loads()")," method and returns a\nnew instance of the specified ",(0,s.kt)("inlineCode",{parentName:"p"},"cls")," class initialized with the decoded\ndictionary."),(0,s.kt)("p",null,"These functions can be used with FastKafka\u2019s ",(0,s.kt)("inlineCode",{parentName:"p"},"encoder")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"decoder"),"\nparameters to customize the serialization and deserialization of\nmessages in Kafka topics."),(0,s.kt)("p",null,"Let\u2019s test the above code"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},"i = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n\nencoded = custom_encoder(i)\ndisplay(encoded)\n\ndecoded = custom_decoder(encoded, IrisInputData)\ndisplay(decoded)\n")),(0,s.kt)("p",null,"This will result in following output"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-txt"},'b\'{"frcny_yratgu": 0.5, "frcny_jvqgu": 0.5, "crgny_yratgu": 0.5, "crgny_jvqgu": 0.5}\'\n\nIrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n')),(0,s.kt)("h3",{id:"assembling-it-all-together-1"},"Assembling it all together"),(0,s.kt)("p",null,"Let\u2019s rewrite the sample code found in\n",(0,s.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," to use our custom decoder and\nencoder functions:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\n\nimport codecs\nimport json\nfrom typing import Any\n\nfrom pydantic.main import ModelMetaclass\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n msg_str = msg.json()\n obfuscated = codecs.encode(msg_str, \'rot13\')\n raw_bytes = obfuscated.encode("utf-8")\n return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n obfuscated = raw_msg.decode("utf-8")\n msg_str = codecs.decode(obfuscated, \'rot13\')\n msg_dict = json.loads(msg_str)\n return cls(**msg_dict)\n\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder=custom_decoder)\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder=custom_encoder)\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,s.kt)("p",null,"This code defines a custom encoder and decoder functions for encoding\nand decoding messages sent through a Kafka messaging system."),(0,s.kt)("p",null,"The custom ",(0,s.kt)("inlineCode",{parentName:"p"},"encoder")," function takes a message represented as a\n",(0,s.kt)("inlineCode",{parentName:"p"},"BaseModel")," and encodes it as bytes by first converting it to a JSON\nstring and then obfuscating it using the ROT13 encoding. The obfuscated\nmessage is then converted to bytes using UTF-8 encoding and returned."),(0,s.kt)("p",null,"The custom ",(0,s.kt)("inlineCode",{parentName:"p"},"decoder")," function takes in the bytes representing an\nobfuscated message, decodes it using UTF-8 encoding, then decodes the\nROT13 obfuscation, and finally loads it as a dictionary using the ",(0,s.kt)("inlineCode",{parentName:"p"},"json"),"\nmodule. This dictionary is then converted to a ",(0,s.kt)("inlineCode",{parentName:"p"},"BaseModel")," instance\nusing the cls parameter."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/6cafb666.6d7d0081.js b/assets/js/6cafb666.6d7d0081.js new file mode 100644 index 0000000..dc9a3f7 --- /dev/null +++ b/assets/js/6cafb666.6d7d0081.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7883],{3905:(e,t,a)=>{a.d(t,{Zo:()=>l,kt:()=>k});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function c(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function i(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),s=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):c(c({},t),e)),a},l=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},d="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,l=i(e,["components","mdxType","originalType","parentName"]),d=s(a),u=r,k=d["".concat(p,".").concat(u)]||d[u]||f[u]||o;return a?n.createElement(k,c(c({ref:t},l),{},{components:a})):n.createElement(k,c({ref:t},l))}));function k(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,c=new Array(o);c[0]=u;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[d]="string"==typeof e?e:r,c[1]=i;for(var s=2;s<o;s++)c[s]=a[s];return n.createElement.apply(null,c)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},9373:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>c,default:()=>f,frontMatter:()=>o,metadata:()=>i,toc:()=>s});var n=a(7462),r=(a(7294),a(3905));const o={},c=void 0,i={unversionedId:"api/fastkafka/encoder/avsc_to_pydantic",id:"version-0.7.1/api/fastkafka/encoder/avsc_to_pydantic",title:"avsc_to_pydantic",description:"fastkafka.encoder.avsctopydantic {fastkafka.encoder.avsctopydantic}",source:"@site/versioned_docs/version-0.7.1/api/fastkafka/encoder/avsc_to_pydantic.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avsc_to_pydantic",permalink:"/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avro_encoder",permalink:"/docs/0.7.1/api/fastkafka/encoder/avro_encoder"},next:{title:"json_decoder",permalink:"/docs/0.7.1/api/fastkafka/encoder/json_decoder"}},p={},s=[{value:"<code>fastkafka.encoder.avsc_to_pydantic</code>",id:"fastkafka.encoder.avsc_to_pydantic",level:2},{value:"<code>avsc_to_pydantic</code>",id:"avsc_to_pydantic",level:3}],l={toc:s},d="wrapper";function f(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},l,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.encoder.avsc_to_pydantic"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.avsc_to_pydantic")),(0,r.kt)("h3",{id:"avsc_to_pydantic"},(0,r.kt)("inlineCode",{parentName:"h3"},"avsc_to_pydantic")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def avsc_to_pydantic(schema: Dict[str, Any]) -> ModelMetaclass")),(0,r.kt)("p",null,"Generate pydantic model from given Avro Schema"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"schema"),": Avro schema in dictionary format")),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Pydantic model class built from given avro schema")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/6d9c0b04.bac51284.js b/assets/js/6d9c0b04.bac51284.js new file mode 100644 index 0000000..f9de457 --- /dev/null +++ b/assets/js/6d9c0b04.bac51284.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3042],{3905:(e,t,a)=>{a.d(t,{Zo:()=>l,kt:()=>k});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function c(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function i(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),s=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):c(c({},t),e)),a},l=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},d="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,l=i(e,["components","mdxType","originalType","parentName"]),d=s(a),u=r,k=d["".concat(p,".").concat(u)]||d[u]||f[u]||o;return a?n.createElement(k,c(c({ref:t},l),{},{components:a})):n.createElement(k,c({ref:t},l))}));function k(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,c=new Array(o);c[0]=u;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[d]="string"==typeof e?e:r,c[1]=i;for(var s=2;s<o;s++)c[s]=a[s];return n.createElement.apply(null,c)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},6788:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>c,default:()=>f,frontMatter:()=>o,metadata:()=>i,toc:()=>s});var n=a(7462),r=(a(7294),a(3905));const o={},c=void 0,i={unversionedId:"api/fastkafka/encoder/avsc_to_pydantic",id:"version-0.7.0/api/fastkafka/encoder/avsc_to_pydantic",title:"avsc_to_pydantic",description:"fastkafka.encoder.avsctopydantic {fastkafka.encoder.avsctopydantic}",source:"@site/versioned_docs/version-0.7.0/api/fastkafka/encoder/avsc_to_pydantic.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avsc_to_pydantic",permalink:"/docs/0.7.0/api/fastkafka/encoder/avsc_to_pydantic",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avro_encoder",permalink:"/docs/0.7.0/api/fastkafka/encoder/avro_encoder"},next:{title:"json_decoder",permalink:"/docs/0.7.0/api/fastkafka/encoder/json_decoder"}},p={},s=[{value:"<code>fastkafka.encoder.avsc_to_pydantic</code>",id:"fastkafka.encoder.avsc_to_pydantic",level:2},{value:"<code>avsc_to_pydantic</code>",id:"avsc_to_pydantic",level:3}],l={toc:s},d="wrapper";function f(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},l,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.encoder.avsc_to_pydantic"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.avsc_to_pydantic")),(0,r.kt)("h3",{id:"avsc_to_pydantic"},(0,r.kt)("inlineCode",{parentName:"h3"},"avsc_to_pydantic")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def avsc_to_pydantic(schema: Dict[str, Any]) -> ModelMetaclass")),(0,r.kt)("p",null,"Generate pydantic model from given Avro Schema"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"schema"),": Avro schema in dictionary format")),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Pydantic model class built from given avro schema")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/6dbdf8e8.90843a66.js b/assets/js/6dbdf8e8.90843a66.js new file mode 100644 index 0000000..339e5f5 --- /dev/null +++ b/assets/js/6dbdf8e8.90843a66.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1998],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){a(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function s(e,t){if(null==e)return{};var n,r,a=function(e,t){if(null==e)return{};var n,r,a={},o=Object.keys(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var c=r.createContext({}),l=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=l(e.components);return r.createElement(c.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,c=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),p=l(n),k=a,m=p["".concat(c,".").concat(k)]||p[k]||f[k]||o;return n?r.createElement(m,i(i({ref:t},u),{},{components:n})):r.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,i=new Array(o);i[0]=k;var s={};for(var c in t)hasOwnProperty.call(t,c)&&(s[c]=t[c]);s.originalType=e,s[p]="string"==typeof e?e:a,i[1]=s;for(var l=2;l<o;l++)i[l]=n[l];return r.createElement.apply(null,i)}return r.createElement.apply(null,n)}k.displayName="MDXCreateElement"},3497:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>f,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var r=n(7462),a=(n(7294),n(3905));const o={},i=void 0,s={unversionedId:"api/fastkafka/executors/DynamicTaskExecutor",id:"version-0.7.1/api/fastkafka/executors/DynamicTaskExecutor",title:"DynamicTaskExecutor",description:"fastkafka.executors.DynamicTaskExecutor {fastkafka.executors.DynamicTaskExecutor}",source:"@site/versioned_docs/version-0.7.1/api/fastkafka/executors/DynamicTaskExecutor.md",sourceDirName:"api/fastkafka/executors",slug:"/api/fastkafka/executors/DynamicTaskExecutor",permalink:"/docs/0.7.1/api/fastkafka/executors/DynamicTaskExecutor",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"json_encoder",permalink:"/docs/0.7.1/api/fastkafka/encoder/json_encoder"},next:{title:"SequentialExecutor",permalink:"/docs/0.7.1/api/fastkafka/executors/SequentialExecutor"}},c={},l=[{value:"<code>fastkafka.executors.DynamicTaskExecutor</code>",id:"fastkafka.executors.DynamicTaskExecutor",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>run</code>",id:"run",level:3}],u={toc:l},p="wrapper";function f(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,r.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"fastkafka.executors.DynamicTaskExecutor"},(0,a.kt)("inlineCode",{parentName:"h2"},"fastkafka.executors.DynamicTaskExecutor")),(0,a.kt)("p",null,"A class that implements a dynamic task executor for processing consumer records."),(0,a.kt)("p",null,"The DynamicTaskExecutor class extends the StreamExecutor class and provides functionality\nfor running a tasks in parallel using asyncio.Task."),(0,a.kt)("h3",{id:"init"},(0,a.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000, size: int = 100000) -> None")),(0,a.kt)("p",null,"Create an instance of DynamicTaskExecutor"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"throw_exceptions"),": Flag indicating whether exceptions should be thrown ot logged.\nDefaults to False."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"max_buffer_size"),": Maximum buffer size for the memory object stream.\nDefaults to 100_000."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"size"),": Size of the task pool. Defaults to 100_000.")),(0,a.kt)("h3",{id:"run"},(0,a.kt)("inlineCode",{parentName:"h3"},"run")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None")),(0,a.kt)("p",null,"Runs the dynamic task executor."),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"is_shutting_down_f"),": Function to check if the executor is shutting down."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"generator"),": Generator function for retrieving consumer records."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"processor"),": Processor function for processing consumer records.")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Returns"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"None")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/6e7b1bc6.ac852278.js b/assets/js/6e7b1bc6.ac852278.js new file mode 100644 index 0000000..f9393ca --- /dev/null +++ b/assets/js/6e7b1bc6.ac852278.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4413],{3905:(e,r,t)=>{t.d(r,{Zo:()=>p,kt:()=>k});var a=t(7294);function n(e,r,t){return r in e?Object.defineProperty(e,r,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[r]=t,e}function o(e,r){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);r&&(a=a.filter((function(r){return Object.getOwnPropertyDescriptor(e,r).enumerable}))),t.push.apply(t,a)}return t}function c(e){for(var r=1;r<arguments.length;r++){var t=null!=arguments[r]?arguments[r]:{};r%2?o(Object(t),!0).forEach((function(r){n(e,r,t[r])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):o(Object(t)).forEach((function(r){Object.defineProperty(e,r,Object.getOwnPropertyDescriptor(t,r))}))}return e}function i(e,r){if(null==e)return{};var t,a,n=function(e,r){if(null==e)return{};var t,a,n={},o=Object.keys(e);for(a=0;a<o.length;a++)t=o[a],r.indexOf(t)>=0||(n[t]=e[t]);return n}(e,r);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)t=o[a],r.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(n[t]=e[t])}return n}var s=a.createContext({}),f=function(e){var r=a.useContext(s),t=r;return e&&(t="function"==typeof e?e(r):c(c({},r),e)),t},p=function(e){var r=f(e.components);return a.createElement(s.Provider,{value:r},e.children)},l="mdxType",d={inlineCode:"code",wrapper:function(e){var r=e.children;return a.createElement(a.Fragment,{},r)}},u=a.forwardRef((function(e,r){var t=e.components,n=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),l=f(t),u=n,k=l["".concat(s,".").concat(u)]||l[u]||d[u]||o;return t?a.createElement(k,c(c({ref:r},p),{},{components:t})):a.createElement(k,c({ref:r},p))}));function k(e,r){var t=arguments,n=r&&r.mdxType;if("string"==typeof e||n){var o=t.length,c=new Array(o);c[0]=u;var i={};for(var s in r)hasOwnProperty.call(r,s)&&(i[s]=r[s]);i.originalType=e,i[l]="string"==typeof e?e:n,c[1]=i;for(var f=2;f<o;f++)c[f]=t[f];return a.createElement.apply(null,c)}return a.createElement.apply(null,t)}u.displayName="MDXCreateElement"},9827:(e,r,t)=>{t.r(r),t.d(r,{assets:()=>s,contentTitle:()=>c,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>f});var a=t(7462),n=(t(7294),t(3905));const o={},c=void 0,i={unversionedId:"api/fastkafka/encoder/AvroBase",id:"version-0.6.0/api/fastkafka/encoder/AvroBase",title:"AvroBase",description:"fastkafka.encoder.AvroBase {fastkafka.encoder.AvroBase}",source:"@site/versioned_docs/version-0.6.0/api/fastkafka/encoder/AvroBase.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/AvroBase",permalink:"/docs/0.6.0/api/fastkafka/encoder/AvroBase",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"KafkaEvent",permalink:"/docs/0.6.0/api/fastkafka/KafkaEvent"},next:{title:"avro_decoder",permalink:"/docs/0.6.0/api/fastkafka/encoder/avro_decoder"}},s={},f=[{value:"<code>fastkafka.encoder.AvroBase</code>",id:"fastkafka.encoder.AvroBase",level:2}],p={toc:f},l="wrapper";function d(e){let{components:r,...t}=e;return(0,n.kt)(l,(0,a.Z)({},p,t,{components:r,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"fastkafka.encoder.AvroBase"},(0,n.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.AvroBase")),(0,n.kt)("p",null,"This is base pydantic class that will add some methods"))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/7107eb83.9b621a01.js b/assets/js/7107eb83.9b621a01.js new file mode 100644 index 0000000..cf22dd0 --- /dev/null +++ b/assets/js/7107eb83.9b621a01.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4457],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function s(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},u=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},k="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),k=l(a),d=r,f=k["".concat(p,".").concat(d)]||k[d]||c[d]||o;return a?n.createElement(f,i(i({ref:t},u),{},{components:a})):n.createElement(f,i({ref:t},u))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=d;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[k]="string"==typeof e?e:r,i[1]=s;for(var l=2;l<o;l++)i[l]=a[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}d.displayName="MDXCreateElement"},6252:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>c,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var n=a(7462),r=(a(7294),a(3905));const o={},i="Defining a partition key",s={unversionedId:"guides/Guide_22_Partition_Keys",id:"guides/Guide_22_Partition_Keys",title:"Defining a partition key",description:"Partition keys are used in Apache Kafka to determine which partition a",source:"@site/docs/guides/Guide_22_Partition_Keys.md",sourceDirName:"guides",slug:"/guides/Guide_22_Partition_Keys",permalink:"/docs/next/guides/Guide_22_Partition_Keys",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"@produces basics",permalink:"/docs/next/guides/Guide_21_Produces_Basics"},next:{title:"Batch producing",permalink:"/docs/next/guides/Guide_23_Batch_Producing"}},p={},l=[{value:"Return a key from the producing function",id:"return-a-key-from-the-producing-function",level:2},{value:"App example",id:"app-example",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the message was sent to the Kafka topic with the desired key",id:"check-if-the-message-was-sent-to-the-kafka-topic-with-the-desired-key",level:2}],u={toc:l},k="wrapper";function c(e){let{components:t,...a}=e;return(0,r.kt)(k,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"defining-a-partition-key"},"Defining a partition key"),(0,r.kt)("p",null,"Partition keys are used in Apache Kafka to determine which partition a\nmessage should be written to. This ensures that related messages are\nkept together in the same partition, which can be useful for ensuring\norder or for grouping related messages together for efficient\nprocessing. Additionally, partitioning data across multiple partitions\nallows Kafka to distribute load across multiple brokers and scale\nhorizontally, while replicating data across multiple brokers provides\nfault tolerance."),(0,r.kt)("p",null,"You can define your partition keys when using the ",(0,r.kt)("inlineCode",{parentName:"p"},"@produces")," decorator,\nthis guide will demonstrate to you this feature."),(0,r.kt)("h2",{id:"return-a-key-from-the-producing-function"},"Return a key from the producing function"),(0,r.kt)("p",null,"To define a key for the message that you want to produce to Kafka topic,\nyou need to wrap the response into\n",(0,r.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/KafkaEvent#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass and set the key value. Check the example below:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")\n')),(0,r.kt)("p",null,"In the example, we want to return the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message class with\nthe key defined as ",(0,r.kt)("em",{parentName:"p"},"my_key"),". So, we wrap the message and key into a\nKafkaEvent class and return it as such."),(0,r.kt)("p",null,"While generating the documentation, the\n",(0,r.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/KafkaEvent#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass will be unwrapped and the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class will be documented in\nthe definition of message type, same way if you didn\u2019t use the key."),(0,r.kt)("p",null,'!!! info "Which key to choose?"'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.\n")),(0,r.kt)("h2",{id:"app-example"},"App example"),(0,r.kt)("p",null,"We will modify the app example from ",(0,r.kt)("strong",{parentName:"p"},"@producer basics")," guide to return\nthe ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," with our key. The final app will look like this (make\nsure you replace the ",(0,r.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,r.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values):"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,r.kt)("h2",{id:"run-the-app"},"Run the app"),(0,r.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n")),(0,r.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"[347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.\n")),(0,r.kt)("h2",{id:"check-if-the-message-was-sent-to-the-kafka-topic-with-the-desired-key"},"Check if the message was sent to the Kafka topic with the desired key"),(0,r.kt)("p",null,'Lets check the topic and see if there is a \u201cHello world!" message in the\nhello_world topic with the defined key. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n")),(0,r.kt)("p",null,"You should see the ",(0,r.kt)("em",{parentName:"p"},'my_key {\u201cmsg": \u201cHello world!"}')," messages in your\ntopic appearing, the ",(0,r.kt)("em",{parentName:"p"},"my_key")," part of the message is the key that we\ndefined in our producing function."))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/7245ce96.2ad33dfd.js b/assets/js/7245ce96.2ad33dfd.js new file mode 100644 index 0000000..2a304cd --- /dev/null +++ b/assets/js/7245ce96.2ad33dfd.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[92],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>k});var a=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){i(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function r(e,t){if(null==e)return{};var n,a,i=function(e,t){if(null==e)return{};var n,a,i={},o=Object.keys(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var s=a.createContext({}),m=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=m(e.components);return a.createElement(s.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,o=e.originalType,s=e.parentName,p=r(e,["components","mdxType","originalType","parentName"]),d=m(n),u=i,k=d["".concat(s,".").concat(u)]||d[u]||c[u]||o;return n?a.createElement(k,l(l({ref:t},p),{},{components:n})):a.createElement(k,l({ref:t},p))}));function k(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=n.length,l=new Array(o);l[0]=u;var r={};for(var s in t)hasOwnProperty.call(t,s)&&(r[s]=t[s]);r.originalType=e,r[d]="string"==typeof e?e:i,l[1]=r;for(var m=2;m<o;m++)l[m]=n[m];return a.createElement.apply(null,l)}return a.createElement.apply(null,n)}u.displayName="MDXCreateElement"},3004:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>c,frontMatter:()=>o,metadata:()=>r,toc:()=>m});var a=n(7462),i=(n(7294),n(3905));const o={},l=void 0,r={unversionedId:"api/fastkafka/FastKafka",id:"version-0.7.1/api/fastkafka/FastKafka",title:"FastKafka",description:"fastkafka.FastKafka {fastkafka.FastKafka}",source:"@site/versioned_docs/version-0.7.1/api/fastkafka/FastKafka.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/",permalink:"/docs/0.7.1/api/fastkafka/",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"EventMetadata",permalink:"/docs/0.7.1/api/fastkafka/EventMetadata"},next:{title:"KafkaEvent",permalink:"/docs/0.7.1/api/fastkafka/KafkaEvent"}},s={},m=[{value:"<code>fastkafka.FastKafka</code>",id:"fastkafka.FastKafka",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>benchmark</code>",id:"benchmark",level:3},{value:"<code>consumes</code>",id:"consumes",level:3},{value:"<code>create_docs</code>",id:"create_docs",level:3},{value:"<code>create_mocks</code>",id:"create_mocks",level:3},{value:"<code>fastapi_lifespan</code>",id:"fastapi_lifespan",level:3},{value:"<code>get_topics</code>",id:"get_topics",level:3},{value:"<code>produces</code>",id:"produces",level:3},{value:"<code>run_in_background</code>",id:"run_in_background",level:3},{value:"<code>set_kafka_broker</code>",id:"set_kafka_broker",level:3}],p={toc:m},d="wrapper";function c(e){let{components:t,...n}=e;return(0,i.kt)(d,(0,a.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"h2"},"fastkafka.FastKafka")),(0,i.kt)("h3",{id:"init"},(0,i.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def __init__(self, title: Optional[str] = None, description: Optional[str] = None, version: Optional[str] = None, contact: Optional[Dict[str, str]] = None, kafka_brokers: Optional[Dict[str, Any]] = None, root_path: Optional[pathlib.Path, str] = None, lifespan: Optional[Callable[[ForwardRef('FastKafka')], AsyncContextManager[NoneType]]] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fcedfc68f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fcedec6c850>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None, group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, exclude_internal_topics=True, isolation_level='read_uncommitted') -> None")),(0,i.kt)("p",null,"Creates FastKafka application"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"title"),": optional title for the documentation. If None,\nthe title will be set to empty string"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"description"),": optional description for the documentation. If\nNone, the description will be set to empty string"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"version"),": optional version for the documentation. If None,\nthe version will be set to empty string"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"contact"),": optional contact for the documentation. If None, the\ncontact will be set to placeholder values:\nname='Author' url=HttpUrl(' ",(0,i.kt)("a",{parentName:"li",href:"https://www.google.com"},"https://www.google.com")," ', ) email='",(0,i.kt)("a",{parentName:"li",href:"mailto:noreply@gmail.com"},"noreply@gmail.com"),"'"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),': dictionary describing kafka brokers used for setting\nthe bootstrap server when running the applicationa and for\ngenerating documentation. Defaults to\n{\n"localhost": {\n"url": "localhost",\n"description": "local kafka broker",\n"port": "9092",\n}\n}'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"root_path"),": path to where documentation will be created"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"lifespan"),": asynccontextmanager that is used for setting lifespan hooks.\n",(0,i.kt)("strong",{parentName:"li"},"aenter")," is called before app start and ",(0,i.kt)("strong",{parentName:"li"},"aexit")," after app stop.\nThe lifespan is called whe application is started as async context\nmanager, e.g.:",(0,i.kt)("inlineCode",{parentName:"li"},"async with kafka_app...")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list. It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ",(0,i.kt)("inlineCode",{parentName:"li"},"localhost:9092"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-producer-#")," (appended with a unique number\nper instance)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_serializer"),": used to convert user-supplied keys to bytes\nIf not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as ",(0,i.kt)("inlineCode",{parentName:"li"},"f(key),")," should return\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_serializer"),": used to convert user-supplied message\nvalues to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),". If not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as\n",(0,i.kt)("inlineCode",{parentName:"li"},"f(value)"),", should return :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"acks"),": one of ",(0,i.kt)("inlineCode",{parentName:"li"},"0"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"1"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"0"),": Producer will not wait for any acknowledgment from the server\nat all. The message will immediately be added to the socket\nbuffer and considered sent. No guarantee can be made that the\nserver has received the record in this case, and the retries\nconfiguration will not take effect (as the client won't\ngenerally know of any failures). The offset given back for each\nrecord will always be set to -1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"1"),": The broker leader will write the record to its local log but\nwill respond without awaiting full acknowledgement from all\nfollowers. In this case should the leader fail immediately\nafter acknowledging the record but before the followers have\nreplicated it then the record will be lost."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"all"),": The broker leader will wait for the full set of in-sync\nreplicas to acknowledge the record. This guarantees that the\nrecord will not be lost as long as at least one in-sync replica\nremains alive. This is the strongest available guarantee.")),(0,i.kt)("p",null,"If unset, defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=1"),". If ",(0,i.kt)("inlineCode",{parentName:"p"},"enable_idempotence")," is\n:data:",(0,i.kt)("inlineCode",{parentName:"p"},"True")," defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=all")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"compression_type"),": The compression type for all data generated by\nthe producer. Valid values are ",(0,i.kt)("inlineCode",{parentName:"li"},"gzip"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"snappy"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"lz4"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"zstd"),"\nor :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),".\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_batch_size"),": Maximum size of buffered data per partition.\nAfter this amount :meth:",(0,i.kt)("inlineCode",{parentName:"li"},"send")," coroutine will block until batch is\ndrained.\nDefault: 16384"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),": The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),", producer will wait ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms - process_time"),".\nDefault: 0 (i.e. no delay)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partitioner"),": Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n",(0,i.kt)("inlineCode",{parentName:"li"},"partitioner(key_bytes, all_partitions, available_partitions)"),".\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_request_size"),": The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"~kafka.protocol.produce.ProduceRequest")," (it's a blocking\ncall), maximum waiting time can be up to ",(0,i.kt)("inlineCode",{parentName:"li"},"2 *\nrequest_timeout_ms"),".\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more\ninformation see :ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_idempotence"),": When set to :data:",(0,i.kt)("inlineCode",{parentName:"li"},"True"),", the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:",(0,i.kt)("inlineCode",{parentName:"li"},"False"),", producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:",(0,i.kt)("inlineCode",{parentName:"li"},"ValueError")," will be thrown.\nNew in version 0.5.0."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values\nare: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider ("),": class:",(0,i.kt)("inlineCode",{parentName:"li"},"~aiokafka.abc.AbstractTokenProvider"),"):\nOAuthBearer token provider instance. (See\n:mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"*topics"),": optional list of topics to subscribe to. If not set,\ncall :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," or :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".assign")," before consuming records.\nPassing topics directly is same as calling :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," API."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"group_id"),": name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_deserializer"),": Any callable that takes a\nraw message key and returns a deserialized key."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_deserializer"),": Any callable that takes a\nraw message value and returns a deserialized value."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_min_bytes"),": Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n",(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_bytes"),": The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms"),": The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_partition_fetch_bytes"),": The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ",(0,i.kt)("inlineCode",{parentName:"li"},"= #partitions * max_partition_fetch_bytes"),".\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_records"),": The maximum number of records returned in a\nsingle call to :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),". Defaults ",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", no limit."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_offset_reset"),": A policy for resetting offsets on\n:exc:",(0,i.kt)("inlineCode",{parentName:"li"},".OffsetOutOfRangeError")," errors: ",(0,i.kt)("inlineCode",{parentName:"li"},"earliest")," will move to the oldest\navailable message, ",(0,i.kt)("inlineCode",{parentName:"li"},"latest")," will move to the most recent, and\n",(0,i.kt)("inlineCode",{parentName:"li"},"none")," will raise an exception so you can handle this case.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"latest"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_auto_commit"),": If true the consumer's offset will be\nperiodically committed in the background. Default: True."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_commit_interval_ms"),": milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"check_crcs"),": Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partition_assignment_strategy"),": List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: ","[:class:",(0,i.kt)("inlineCode",{parentName:"li"},".RoundRobinPartitionAssignor"),"]"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_interval_ms"),": Maximum allowed time between calls to\nconsume messages (e.g., :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),"). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See ",(0,i.kt)("inlineCode",{parentName:"li"},"KIP-62"),"_ for more\ninformation. Default 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"rebalance_timeout_ms"),": The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to ",(0,i.kt)("inlineCode",{parentName:"li"},"max.poll.interval.ms")," configuration,\nbut as ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka")," will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},".ConsumerRebalanceListener")," to delay rebalacing. Defaults\nto ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),": Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(",(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe ",(0,i.kt)("strong",{parentName:"li"},"broker")," configuration properties\n",(0,i.kt)("inlineCode",{parentName:"li"},"group.min.session.timeout.ms")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"group.max.session.timeout.ms"),".\nDefault: 10000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat_interval_ms"),": The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),", but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"consumer_timeout_ms"),": maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"exclude_internal_topics"),": Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"isolation_level"),": Controls how to read messages written\ntransactionally.")),(0,i.kt)("p",null,"If set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed"),", :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\ntransactional messages which have been committed.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")," (the default), :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will\nreturn all messages, even transactional messages which have been\naborted."),(0,i.kt)("p",null,"Non-transactional messages will be returned unconditionally in\neither mode."),(0,i.kt)("p",null,"Messages will always be returned in offset order. Hence, in\n",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," mode, :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," the seek_to_end method will\nreturn the LSO. See method docs below. Default: ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider"),": OAuthBearer token provider instance. (See :mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: None")),(0,i.kt)("h3",{id:"benchmark"},(0,i.kt)("inlineCode",{parentName:"h3"},"benchmark")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]")),(0,i.kt)("p",null,"Decorator to benchmark produces/consumes functions"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"interval"),": Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sliding_window_size"),": The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated")),(0,i.kt)("h3",{id:"consumes"},(0,i.kt)("inlineCode",{parentName:"h3"},"consumes")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, prefix: str = 'on_', description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.1', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]], typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when a message is received in a topic."),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"decoder"),": Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"executor"),': Type of executor to choose for consuming tasks. Avaliable options\nare "SequentialExecutor" and "DynamicTaskExecutor". The default option is\n"SequentialExecutor" which will execute the consuming tasks sequentially.\nIf the consuming tasks have high latency it is recommended to use\n"DynamicTaskExecutor" which will wrap the consuming functions into tasks\nand run them in on asyncio loop in background. This comes with a cost of\nincreased overhead so use it only in cases when your consume functions have\nhigh latency such as database queries or some other type of networking.'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: "on_". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"brokers"),": Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"description"),": Optional description of the consuming function async docs.\nIf not provided, consuming function ",(0,i.kt)("strong",{parentName:"li"},"doc")," attr will be used."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"*topics"),": optional list of topics to subscribe to. If not set,\ncall :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," or :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".assign")," before consuming records.\nPassing topics directly is same as calling :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," API."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string (or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings) that the consumer should contact to bootstrap\ninitial cluster metadata.")),(0,i.kt)("p",null,"This does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ",(0,i.kt)("inlineCode",{parentName:"p"},"localhost:9092"),"."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~.consumer.group_coordinator.GroupCoordinator"),"\nfor logging with respect to consumer group administration. Default:\n",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-{version}")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"group_id"),": name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_deserializer"),": Any callable that takes a\nraw message key and returns a deserialized key."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_deserializer"),": Any callable that takes a\nraw message value and returns a deserialized value."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_min_bytes"),": Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n",(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_bytes"),": The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms"),": The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_partition_fetch_bytes"),": The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ",(0,i.kt)("inlineCode",{parentName:"li"},"= #partitions * max_partition_fetch_bytes"),".\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_records"),": The maximum number of records returned in a\nsingle call to :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),". Defaults ",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", no limit."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Client request timeout in milliseconds.\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_offset_reset"),": A policy for resetting offsets on\n:exc:",(0,i.kt)("inlineCode",{parentName:"li"},".OffsetOutOfRangeError")," errors: ",(0,i.kt)("inlineCode",{parentName:"li"},"earliest")," will move to the oldest\navailable message, ",(0,i.kt)("inlineCode",{parentName:"li"},"latest")," will move to the most recent, and\n",(0,i.kt)("inlineCode",{parentName:"li"},"none")," will raise an exception so you can handle this case.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"latest"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_auto_commit"),": If true the consumer's offset will be\nperiodically committed in the background. Default: True."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_commit_interval_ms"),": milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"check_crcs"),": Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partition_assignment_strategy"),": List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: ","[:class:",(0,i.kt)("inlineCode",{parentName:"li"},".RoundRobinPartitionAssignor"),"]"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_interval_ms"),": Maximum allowed time between calls to\nconsume messages (e.g., :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),"). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See ",(0,i.kt)("inlineCode",{parentName:"li"},"KIP-62"),"_ for more\ninformation. Default 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"rebalance_timeout_ms"),": The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to ",(0,i.kt)("inlineCode",{parentName:"li"},"max.poll.interval.ms")," configuration,\nbut as ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka")," will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},".ConsumerRebalanceListener")," to delay rebalacing. Defaults\nto ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),": Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(",(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe ",(0,i.kt)("strong",{parentName:"li"},"broker")," configuration properties\n",(0,i.kt)("inlineCode",{parentName:"li"},"group.min.session.timeout.ms")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"group.max.session.timeout.ms"),".\nDefault: 10000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat_interval_ms"),": The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),", but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"consumer_timeout_ms"),": maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"AIOKafkaConsumer")," supports Kafka API versions >=0.9 only.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more information see\n:ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),". Default: None."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"exclude_internal_topics"),": Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying ",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"isolation_level"),": Controls how to read messages written\ntransactionally.")),(0,i.kt)("p",null,"If set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed"),", :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\ntransactional messages which have been committed.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")," (the default), :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will\nreturn all messages, even transactional messages which have been\naborted."),(0,i.kt)("p",null,"Non-transactional messages will be returned unconditionally in\neither mode."),(0,i.kt)("p",null,"Messages will always be returned in offset order. Hence, in\n",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," mode, :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," the seek_to_end method will\nreturn the LSO. See method docs below. Default: ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values are:\n",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider"),": OAuthBearer token provider instance. (See :mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: None")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("h3",{id:"create_docs"},(0,i.kt)("inlineCode",{parentName:"h3"},"create_docs")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def create_docs(self: fastkafka.FastKafka) -> None")),(0,i.kt)("p",null,"Create the asyncapi documentation based on the configured consumers and producers."),(0,i.kt)("p",null,"This function exports the asyncapi specification based on the configured consumers\nand producers in the FastKafka instance. It generates the asyncapi documentation by\nextracting the topics and callbacks from the consumers and producers."),(0,i.kt)("p",null,"Note:\nThe asyncapi documentation is saved to the location specified by the ",(0,i.kt)("inlineCode",{parentName:"p"},"_asyncapi_path"),"\nattribute of the FastKafka instance."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"None")),(0,i.kt)("h3",{id:"create_mocks"},(0,i.kt)("inlineCode",{parentName:"h3"},"create_mocks")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def create_mocks(self: fastkafka.FastKafka) -> None")),(0,i.kt)("p",null,"Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock"),(0,i.kt)("h3",{id:"fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"h3"},"fastapi_lifespan")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def fastapi_lifespan(self: fastkafka.FastKafka, kafka_broker_name: str) -> typing.Callable[[ForwardRef('FastAPI')], typing.AsyncIterator[NoneType]]")),(0,i.kt)("p",null,"Method for managing the lifespan of a FastAPI application with a specific Kafka broker."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_broker_name"),": The name of the Kafka broker to start FastKafka")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"Lifespan function to use for initializing FastAPI")),(0,i.kt)("h3",{id:"get_topics"},(0,i.kt)("inlineCode",{parentName:"h3"},"get_topics")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def get_topics(self: fastkafka.FastKafka) -> typing.Iterable[str]")),(0,i.kt)("p",null,"Get all topics for both producing and consuming."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"A set of topics for both producing and consuming.")),(0,i.kt)("h3",{id:"produces"},(0,i.kt)("inlineCode",{parentName:"h3"},"produces")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fcedfc68f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fcedec6c850>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when delivery report for a produced message is received"),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"encoder"),": Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: "to_". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"brokers"),": Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"description"),": Optional description of the producing function async docs.\nIf not provided, producing function ",(0,i.kt)("strong",{parentName:"li"},"doc")," attr will be used."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list. It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ",(0,i.kt)("inlineCode",{parentName:"li"},"localhost:9092"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-producer-#")," (appended with a unique number\nper instance)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_serializer"),": used to convert user-supplied keys to bytes\nIf not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as ",(0,i.kt)("inlineCode",{parentName:"li"},"f(key),")," should return\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_serializer"),": used to convert user-supplied message\nvalues to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),". If not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as\n",(0,i.kt)("inlineCode",{parentName:"li"},"f(value)"),", should return :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"acks"),": one of ",(0,i.kt)("inlineCode",{parentName:"li"},"0"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"1"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"0"),": Producer will not wait for any acknowledgment from the server\nat all. The message will immediately be added to the socket\nbuffer and considered sent. No guarantee can be made that the\nserver has received the record in this case, and the retries\nconfiguration will not take effect (as the client won't\ngenerally know of any failures). The offset given back for each\nrecord will always be set to -1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"1"),": The broker leader will write the record to its local log but\nwill respond without awaiting full acknowledgement from all\nfollowers. In this case should the leader fail immediately\nafter acknowledging the record but before the followers have\nreplicated it then the record will be lost."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"all"),": The broker leader will wait for the full set of in-sync\nreplicas to acknowledge the record. This guarantees that the\nrecord will not be lost as long as at least one in-sync replica\nremains alive. This is the strongest available guarantee.")),(0,i.kt)("p",null,"If unset, defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=1"),". If ",(0,i.kt)("inlineCode",{parentName:"p"},"enable_idempotence")," is\n:data:",(0,i.kt)("inlineCode",{parentName:"p"},"True")," defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=all")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"compression_type"),": The compression type for all data generated by\nthe producer. Valid values are ",(0,i.kt)("inlineCode",{parentName:"li"},"gzip"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"snappy"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"lz4"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"zstd"),"\nor :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),".\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_batch_size"),": Maximum size of buffered data per partition.\nAfter this amount :meth:",(0,i.kt)("inlineCode",{parentName:"li"},"send")," coroutine will block until batch is\ndrained.\nDefault: 16384"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),": The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),", producer will wait ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms - process_time"),".\nDefault: 0 (i.e. no delay)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partitioner"),": Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n",(0,i.kt)("inlineCode",{parentName:"li"},"partitioner(key_bytes, all_partitions, available_partitions)"),".\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_request_size"),": The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"~kafka.protocol.produce.ProduceRequest")," (it's a blocking\ncall), maximum waiting time can be up to ",(0,i.kt)("inlineCode",{parentName:"li"},"2 *\nrequest_timeout_ms"),".\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more\ninformation see :ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_idempotence"),": When set to :data:",(0,i.kt)("inlineCode",{parentName:"li"},"True"),", the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:",(0,i.kt)("inlineCode",{parentName:"li"},"False"),", producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:",(0,i.kt)("inlineCode",{parentName:"li"},"ValueError")," will be thrown.\nNew in version 0.5.0."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values\nare: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider ("),": class:",(0,i.kt)("inlineCode",{parentName:"li"},"~aiokafka.abc.AbstractTokenProvider"),"):\nOAuthBearer token provider instance. (See\n:mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"))),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ValueError"),": when needed")),(0,i.kt)("h3",{id:"run_in_background"},(0,i.kt)("inlineCode",{parentName:"h3"},"run_in_background")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]")),(0,i.kt)("p",null,"Decorator to schedule a task to be run in the background."),(0,i.kt)("p",null,"This decorator is used to schedule a task to be run in the background when the app's ",(0,i.kt)("inlineCode",{parentName:"p"},"_on_startup")," event is triggered."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"A decorator function that takes a background task as an input and stores it to be run in the backround.")),(0,i.kt)("h3",{id:"set_kafka_broker"},(0,i.kt)("inlineCode",{parentName:"h3"},"set_kafka_broker")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def set_kafka_broker(self, kafka_broker_name: str) -> None")),(0,i.kt)("p",null,"Sets the Kafka broker to start FastKafka with"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_broker_name"),": The name of the Kafka broker to start FastKafka")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"None")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ValueError"),": If the provided kafka_broker_name is not found in dictionary of kafka_brokers")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/74e1ba0d.3a0419d8.js b/assets/js/74e1ba0d.3a0419d8.js new file mode 100644 index 0000000..351d6ce --- /dev/null +++ b/assets/js/74e1ba0d.3a0419d8.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5845],{3905:(e,a,n)=>{n.d(a,{Zo:()=>c,kt:()=>m});var t=n(7294);function i(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function o(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function r(e){for(var a=1;a<arguments.length;a++){var n=null!=arguments[a]?arguments[a]:{};a%2?o(Object(n),!0).forEach((function(a){i(e,a,n[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(n,a))}))}return e}function s(e,a){if(null==e)return{};var n,t,i=function(e,a){if(null==e)return{};var n,t,i={},o=Object.keys(e);for(t=0;t<o.length;t++)n=o[t],a.indexOf(n)>=0||(i[n]=e[n]);return i}(e,a);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(t=0;t<o.length;t++)n=o[t],a.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var p=t.createContext({}),l=function(e){var a=t.useContext(p),n=a;return e&&(n="function"==typeof e?e(a):r(r({},a),e)),n},c=function(e){var a=l(e.components);return t.createElement(p.Provider,{value:a},e.children)},d="mdxType",k={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},u=t.forwardRef((function(e,a){var n=e.components,i=e.mdxType,o=e.originalType,p=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=l(n),u=i,m=d["".concat(p,".").concat(u)]||d[u]||k[u]||o;return n?t.createElement(m,r(r({ref:a},c),{},{components:n})):t.createElement(m,r({ref:a},c))}));function m(e,a){var n=arguments,i=a&&a.mdxType;if("string"==typeof e||i){var o=n.length,r=new Array(o);r[0]=u;var s={};for(var p in a)hasOwnProperty.call(a,p)&&(s[p]=a[p]);s.originalType=e,s[d]="string"==typeof e?e:i,r[1]=s;for(var l=2;l<o;l++)r[l]=n[l];return t.createElement.apply(null,r)}return t.createElement.apply(null,n)}u.displayName="MDXCreateElement"},9017:(e,a,n)=>{n.r(a),n.d(a,{assets:()=>p,contentTitle:()=>r,default:()=>k,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var t=n(7462),i=(n(7294),n(3905));const o={},r="Benchmarking FastKafka app",s={unversionedId:"guides/Guide_06_Benchmarking_FastKafka",id:"version-0.7.0/guides/Guide_06_Benchmarking_FastKafka",title:"Benchmarking FastKafka app",description:"Prerequisites",source:"@site/versioned_docs/version-0.7.0/guides/Guide_06_Benchmarking_FastKafka.md",sourceDirName:"guides",slug:"/guides/Guide_06_Benchmarking_FastKafka",permalink:"/docs/0.7.0/guides/Guide_06_Benchmarking_FastKafka",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using FastAPI to Run FastKafka Application",permalink:"/docs/0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application"},next:{title:"EventMetadata",permalink:"/docs/0.7.0/api/fastkafka/EventMetadata"}},p={},l=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Creating FastKafka Code",id:"creating-fastkafka-code",level:3},{value:"Starting Kafka",id:"starting-kafka",level:3},{value:"Installing Java and Kafka",id:"installing-java-and-kafka",level:4},{value:"Creating configuration for Zookeeper and Kafka",id:"creating-configuration-for-zookeeper-and-kafka",level:4},{value:"Starting Zookeeper and Kafka",id:"starting-zookeeper-and-kafka",level:4},{value:"Creating topics in Kafka",id:"creating-topics-in-kafka",level:4},{value:"Populating topics with dummy data",id:"populating-topics-with-dummy-data",level:4},{value:"Benchmarking FastKafka",id:"benchmarking-fastkafka",level:3}],c={toc:l},d="wrapper";function k(e){let{components:a,...n}=e;return(0,i.kt)(d,(0,t.Z)({},c,n,{components:a,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"benchmarking-fastkafka-app"},"Benchmarking FastKafka app"),(0,i.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("p",null,"To benchmark a ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," project, you will need the following:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A library built with ",(0,i.kt)("inlineCode",{parentName:"li"},"FastKafka"),"."),(0,i.kt)("li",{parentName:"ol"},"A running ",(0,i.kt)("inlineCode",{parentName:"li"},"Kafka")," instance to benchmark the FastKafka application\nagainst.")),(0,i.kt)("h3",{id:"creating-fastkafka-code"},"Creating FastKafka Code"),(0,i.kt)("p",null,"Let\u2019s create a ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka"),"-based application and write it to the\n",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file based on the ",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," has a decorator for benchmarking which is appropriately\ncalled as ",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark"),". Let\u2019s edit our ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file and add the\n",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark")," decorator to the consumes method."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file with benchmark\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\n@kafka_app.benchmark(interval=1, sliding_window_size=5)\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"Here we are conducting a benchmark of a function that consumes data from\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic with an interval of 1 second and a sliding window\nsize of 5."),(0,i.kt)("p",null,"This ",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark")," method uses the ",(0,i.kt)("inlineCode",{parentName:"p"},"interval")," parameter to calculate the\nresults over a specific time period, and the ",(0,i.kt)("inlineCode",{parentName:"p"},"sliding_window_size"),"\nparameter to determine the maximum number of results to use in\ncalculating the average throughput and standard deviation."),(0,i.kt)("p",null,"This benchmark is important to ensure that the function is performing\noptimally and to identify any areas for improvement."),(0,i.kt)("h3",{id:"starting-kafka"},"Starting Kafka"),(0,i.kt)("p",null,"If you already have a ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," running somewhere, then you can skip this\nstep."),(0,i.kt)("p",null,"Please keep in mind that your benchmarking results may be affected by\nbottlenecks such as network, CPU cores in the Kafka machine, or even the\nKafka configuration itself."),(0,i.kt)("h4",{id:"installing-java-and-kafka"},"Installing Java and Kafka"),(0,i.kt)("p",null,"We need a working ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),"instance to benchmark our ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," app, and\nto run ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," we need ",(0,i.kt)("inlineCode",{parentName:"p"},"Java"),". Thankfully, ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," comes with a CLI\nto install both ",(0,i.kt)("inlineCode",{parentName:"p"},"Java")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," on our machine."),(0,i.kt)("p",null,"So, let\u2019s install ",(0,i.kt)("inlineCode",{parentName:"p"},"Java")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," by executing the following command."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"fastkafka testing install_deps\n")),(0,i.kt)("p",null,"The above command will extract ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),' scripts at the location\n\u201c\\$HOME/.local/kafka_2.13-3.3.2" on your machine.'),(0,i.kt)("h4",{id:"creating-configuration-for-zookeeper-and-kafka"},"Creating configuration for Zookeeper and Kafka"),(0,i.kt)("p",null,"Now we need to start ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," separately, and to start\nthem we need ",(0,i.kt)("inlineCode",{parentName:"p"},"zookeeper.properties")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka.properties")," files."),(0,i.kt)("p",null,"Let\u2019s create a folder inside the folder where ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," scripts were\nextracted and change directory into it."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"mkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir\n")),(0,i.kt)("p",null,"Let\u2019s create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"zookeeper.properties")," and write the\nfollowing content to the file:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"dataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper\nclientPort=2181\nmaxClientCnxns=0\n")),(0,i.kt)("p",null,"Similarly, let\u2019s create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka.properties")," and write the\nfollowing content to the file:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"broker.id=0\nlisteners=PLAINTEXT://:9092\n\nnum.network.threads=3\nnum.io.threads=8\nsocket.send.buffer.bytes=102400\nsocket.receive.buffer.bytes=102400\nsocket.request.max.bytes=104857600\n\nnum.partitions=1\nnum.recovery.threads.per.data.dir=1\noffsets.topic.replication.factor=1\ntransaction.state.log.replication.factor=1\ntransaction.state.log.min.isr=1\n\nlog.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs\nlog.flush.interval.messages=10000\nlog.flush.interval.ms=1000\nlog.retention.hours=168\nlog.retention.bytes=1073741824\nlog.segment.bytes=1073741824\nlog.retention.check.interval.ms=300000\n\nzookeeper.connect=localhost:2181\nzookeeper.connection.timeout.ms=18000\n")),(0,i.kt)("h4",{id:"starting-zookeeper-and-kafka"},"Starting Zookeeper and Kafka"),(0,i.kt)("p",null,"We need two different terminals to run ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," in one and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," in\nanother. Let\u2019s open a new terminal and run the following commands to\nstart ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./zookeeper-server-start.sh ../data_dir/zookeeper.properties\n")),(0,i.kt)("p",null,"Once ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," is up and running, open a new terminal and execute the\nfollwing commands to start ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-server-start.sh ../data_dir/kafka.properties\n")),(0,i.kt)("p",null,"Now we have both ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," up and running."),(0,i.kt)("h4",{id:"creating-topics-in-kafka"},"Creating topics in Kafka"),(0,i.kt)("p",null,"In a new terminal, please execute the following command to create\nnecessary topics in ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092\n./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092\n")),(0,i.kt)("h4",{id:"populating-topics-with-dummy-data"},"Populating topics with dummy data"),(0,i.kt)("p",null,"To benchmark our ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," app, we need some data in ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," topics."),(0,i.kt)("p",null,"In the same terminal, let\u2019s create some dummy data:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},'yes \'{"sepal_length": 0.7739560486, "sepal_width": 0.8636615789, "petal_length": 0.6122663046, "petal_width": 0.1338914722}\' | head -n 1000000 > /tmp/test_data\n')),(0,i.kt)("p",null,"This command will create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"test_data")," in the ",(0,i.kt)("inlineCode",{parentName:"p"},"tmp")," folder\nwith one million rows of text. This will act as dummy data to populate\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic."),(0,i.kt)("p",null,"Let\u2019s populate the created topic ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," with the dummy data which\nwe created above:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data\n")),(0,i.kt)("p",null,"Now our topic ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," has one million records/messages in it. If\nyou want more messages in topic, you can simply execute the above\ncommand again and again."),(0,i.kt)("h3",{id:"benchmarking-fastkafka"},"Benchmarking FastKafka"),(0,i.kt)("p",null,"Once ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," are ready, benchmarking ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," app is\nas simple as running the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"fastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app\n")),(0,i.kt)("p",null,"This command will start the ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," app and begin consuming messages\nfrom ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),", which we spun up earlier. Additionally, the same command\nwill output all of the benchmark throughputs based on the ",(0,i.kt)("inlineCode",{parentName:"p"},"interval")," and\n",(0,i.kt)("inlineCode",{parentName:"p"},"sliding_window_size")," values."),(0,i.kt)("p",null,"The output for the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh\nost:9092', 'max_poll_records': 100}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition\n=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)\n")),(0,i.kt)("p",null,"Based on the output, when using 1 worker, our ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," app achieved a\n",(0,i.kt)("inlineCode",{parentName:"p"},"throughput")," of 93k messages per second and an ",(0,i.kt)("inlineCode",{parentName:"p"},"average throughput")," of\n93k messages per second."))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/75af10bd.f657f121.js b/assets/js/75af10bd.f657f121.js new file mode 100644 index 0000000..f55ab7e --- /dev/null +++ b/assets/js/75af10bd.f657f121.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1783],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>f});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){o(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,o=function(e,t){if(null==e)return{};var a,n,o={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var i=n.createContext({}),p=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},c=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=p(a),k=o,f=d["".concat(i,".").concat(k)]||d[k]||u[k]||r;return a?n.createElement(f,s(s({ref:t},c),{},{components:a})):n.createElement(f,s({ref:t},c))}));function f(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=a.length,s=new Array(r);s[0]=k;var l={};for(var i in t)hasOwnProperty.call(t,i)&&(l[i]=t[i]);l.originalType=e,l[d]="string"==typeof e?e:o,s[1]=l;for(var p=2;p<r;p++)s[p]=a[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,a)}k.displayName="MDXCreateElement"},6940:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var n=a(7462),o=(a(7294),a(3905));const r={},s="@produces basics",l={unversionedId:"guides/Guide_21_Produces_Basics",id:"version-0.8.0/guides/Guide_21_Produces_Basics",title:"@produces basics",description:"You can use @produces decorator to produce messages to Kafka topics.",source:"@site/versioned_docs/version-0.8.0/guides/Guide_21_Produces_Basics.md",sourceDirName:"guides",slug:"/guides/Guide_21_Produces_Basics",permalink:"/docs/guides/Guide_21_Produces_Basics",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Batch consuming",permalink:"/docs/guides/Guide_12_Batch_Consuming"},next:{title:"Defining a partition key",permalink:"/docs/guides/Guide_22_Partition_Keys"}},i={},p=[{value:"Import <code>FastKafka</code>",id:"import-fastkafka",level:2},{value:"Define the structure of the messages",id:"define-the-structure-of-the-messages",level:2},{value:"Create a base FastKafka app",id:"create-a-base-fastkafka-app",level:2},{value:"Create a producer function and decorate it with <code>@produces</code>",id:"create-a-producer-function-and-decorate-it-with-produces",level:2},{value:"Instruct the app to start sending HelloWorld messages",id:"instruct-the-app-to-start-sending-helloworld-messages",level:2},{value:"Final app",id:"final-app",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the message was sent to the Kafka topic",id:"check-if-the-message-was-sent-to-the-kafka-topic",level:2},{value:"Choosing a topic",id:"choosing-a-topic",level:2},{value:"Message data",id:"message-data",level:2}],c={toc:p},d="wrapper";function u(e){let{components:t,...a}=e;return(0,o.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"produces-basics"},"@produces basics"),(0,o.kt)("p",null,"You can use ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator to produce messages to Kafka topics."),(0,o.kt)("p",null,"In this guide we will create a simple FastKafka app that will produce\nhello world messages to hello_world topic."),(0,o.kt)("h2",{id:"import-fastkafka"},"Import ",(0,o.kt)("a",{parentName:"h2",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka"))),(0,o.kt)("p",null,"To use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator, frist we need to import the base\nFastKafka app to create our application."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka import FastKafka\n")),(0,o.kt)("h2",{id:"define-the-structure-of-the-messages"},"Define the structure of the messages"),(0,o.kt)("p",null,"Next, you need to define the structure of the messages you want to send\nto the topic using ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"pydantic"),". For the guide\nwe\u2019ll stick to something basic, but you are free to define any complex\nmessage structure you wish in your project, just make sure it can be\nJSON encoded."),(0,o.kt)("p",null,"Let\u2019s import ",(0,o.kt)("inlineCode",{parentName:"p"},"BaseModel")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"Field")," from pydantic and create a simple\n",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class containing one string parameter ",(0,o.kt)("inlineCode",{parentName:"p"},"msg")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from pydantic import BaseModel, Field\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n')),(0,o.kt)("h2",{id:"create-a-base-fastkafka-app"},"Create a base FastKafka app"),(0,o.kt)("p",null,"Now we will create and define a base FastKafka app, replace the\n",(0,o.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values of your\nKafka bootstrap server"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n')),(0,o.kt)("h2",{id:"create-a-producer-function-and-decorate-it-with-produces"},"Create a producer function and decorate it with ",(0,o.kt)("inlineCode",{parentName:"h2"},"@produces")),(0,o.kt)("p",null,"Let\u2019s create a producer function that will produce ",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages\nto ",(0,o.kt)("em",{parentName:"p"},"hello_world")," topic:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n")),(0,o.kt)("p",null,"Now you can call your defined function as any normal python function in\nyour code. The side effect of calling the function will be that the\nvalue you are returning will also be sent to a kafka topic."),(0,o.kt)("p",null,"By default, the topic is determined from your function name, the \u201cto","_",'"\nprefix is stripped and what is left over is used as a topic name. I this\ncase, that is ',(0,o.kt)("em",{parentName:"p"},"hello_world"),"."),(0,o.kt)("h2",{id:"instruct-the-app-to-start-sending-helloworld-messages"},"Instruct the app to start sending HelloWorld messages"),(0,o.kt)("p",null,"Let\u2019s use ",(0,o.kt)("inlineCode",{parentName:"p"},"@run_in_background")," decorator to instruct our app to send\nHelloWorld messages to hello_world topic every second."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,o.kt)("h2",{id:"final-app"},"Final app"),(0,o.kt)("p",null,"Your app code should look like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,o.kt)("h2",{id:"run-the-app"},"Run the app"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'script_file = "producer_example.py"\ncmd = "fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app"\nmd(\n f"Now we can run the app. Copy the code above in producer_example.py and run it by running\\n```shell\\n{cmd}\\n```"\n)\n')),(0,o.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\n")),(0,o.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.\n")),(0,o.kt)("h2",{id:"check-if-the-message-was-sent-to-the-kafka-topic"},"Check if the message was sent to the Kafka topic"),(0,o.kt)("p",null,'Lets check the topic and see if there is a \u201cHello world!" message in the\nhello_world topic. In your terminal run:'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n")),(0,o.kt)("p",null,'You should see the {\u201cmsg": \u201cHello world!"} messages in your topic.'),(0,o.kt)("h2",{id:"choosing-a-topic"},"Choosing a topic"),(0,o.kt)("p",null,"You probably noticed that you didn\u2019t define which topic you are sending\nthe message to, this is because the ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator determines the\ntopic by default from your function name. The decorator will take your\nfunction name and strip the default \u201cto","_",'" prefix from it and use the\nrest as the topic name. In this example case, the topic is\n',(0,o.kt)("em",{parentName:"p"},"hello_world"),"."),(0,o.kt)("p",null,'!!! warn "New topics"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.\n")),(0,o.kt)("p",null,"You can choose your custom prefix by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"prefix")," parameter in\nproduces decorator, like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.produces(prefix="send_to_")\nasync def send_to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n')),(0,o.kt)("p",null,"Also, you can define the topic name completely by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"topic"),"\nin parameter in produces decorator, like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.produces(topic="my_special_topic")\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n')),(0,o.kt)("h2",{id:"message-data"},"Message data"),(0,o.kt)("p",null,"The return value from your function will be translated JSON string and\nthen to bytes and sent to defined Kafka topic. The typing of the return\nvalue is used for generating the documentation for your Kafka app."),(0,o.kt)("p",null,"In this example case, the return value is HelloWorld class which will be\ntranslated into JSON formatted string and then to bytes. The translated\ndata will then be sent to Kafka. In the from of:\n",(0,o.kt)("inlineCode",{parentName:"p"},'b\'{"msg":"Hello world!"}\'')))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/7ae5d564.6ea40e89.js b/assets/js/7ae5d564.6ea40e89.js new file mode 100644 index 0000000..37d1798 --- /dev/null +++ b/assets/js/7ae5d564.6ea40e89.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5012],{3905:(e,a,n)=>{n.d(a,{Zo:()=>c,kt:()=>u});var t=n(7294);function o(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function s(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function i(e){for(var a=1;a<arguments.length;a++){var n=null!=arguments[a]?arguments[a]:{};a%2?s(Object(n),!0).forEach((function(a){o(e,a,n[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):s(Object(n)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(n,a))}))}return e}function r(e,a){if(null==e)return{};var n,t,o=function(e,a){if(null==e)return{};var n,t,o={},s=Object.keys(e);for(t=0;t<s.length;t++)n=s[t],a.indexOf(n)>=0||(o[n]=e[n]);return o}(e,a);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(t=0;t<s.length;t++)n=s[t],a.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=t.createContext({}),l=function(e){var a=t.useContext(p),n=a;return e&&(n="function"==typeof e?e(a):i(i({},a),e)),n},c=function(e){var a=l(e.components);return t.createElement(p.Provider,{value:a},e.children)},k="mdxType",d={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},f=t.forwardRef((function(e,a){var n=e.components,o=e.mdxType,s=e.originalType,p=e.parentName,c=r(e,["components","mdxType","originalType","parentName"]),k=l(n),f=o,u=k["".concat(p,".").concat(f)]||k[f]||d[f]||s;return n?t.createElement(u,i(i({ref:a},c),{},{components:n})):t.createElement(u,i({ref:a},c))}));function u(e,a){var n=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var s=n.length,i=new Array(s);i[0]=f;var r={};for(var p in a)hasOwnProperty.call(a,p)&&(r[p]=a[p]);r.originalType=e,r[k]="string"==typeof e?e:o,i[1]=r;for(var l=2;l<s;l++)i[l]=n[l];return t.createElement.apply(null,i)}return t.createElement.apply(null,n)}f.displayName="MDXCreateElement"},399:(e,a,n)=>{n.r(a),n.d(a,{assets:()=>p,contentTitle:()=>i,default:()=>d,frontMatter:()=>s,metadata:()=>r,toc:()=>l});var t=n(7462),o=(n(7294),n(3905));const s={},i="FastKafka tutorial",r={unversionedId:"guides/Guide_00_FastKafka_Demo",id:"version-0.7.0/guides/Guide_00_FastKafka_Demo",title:"FastKafka tutorial",description:"FastKafka is a powerful and easy-to-use",source:"@site/versioned_docs/version-0.7.0/guides/Guide_00_FastKafka_Demo.md",sourceDirName:"guides",slug:"/guides/Guide_00_FastKafka_Demo",permalink:"/docs/0.7.0/guides/Guide_00_FastKafka_Demo",draft:!1,tags:[],version:"0.7.0",frontMatter:{}},p={},l=[{value:"Install",id:"install",level:2},{value:"Running in Colab",id:"running-in-colab",level:2},{value:"Writing server code",id:"writing-server-code",level:2},{value:"Preparing the demo model",id:"preparing-the-demo-model",level:3},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"Testing the service",id:"testing-the-service",level:2},{value:"Recap",id:"recap",level:3},{value:"Running the service",id:"running-the-service",level:2},{value:"Documentation",id:"documentation",level:2}],c={toc:l},k="wrapper";function d(e){let{components:a,...n}=e;return(0,o.kt)(k,(0,t.Z)({},c,n,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"fastkafka-tutorial"},"FastKafka tutorial"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,o.kt)("h2",{id:"install"},"Install"),(0,o.kt)("p",null,"FastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install it with ",(0,o.kt)("inlineCode",{parentName:"p"},"pip")," as usual:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"try:\n import fastkafka\nexcept:\n ! pip install fastkafka\n")),(0,o.kt)("h2",{id:"running-in-colab"},"Running in Colab"),(0,o.kt)("p",null,"You can start this interactive tutorial in Google Colab by clicking the\nbutton below:"),(0,o.kt)("a",{href:"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb",target:"_blank"},(0,o.kt)("img",{src:"https://colab.research.google.com/assets/colab-badge.svg",alt:"Open In Colab"})),(0,o.kt)("h2",{id:"writing-server-code"},"Writing server code"),(0,o.kt)("p",null,"Here is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic."),(0,o.kt)("h3",{id:"preparing-the-demo-model"},"Preparing the demo model"),(0,o.kt)("p",null,"First we will prepare our model using the Iris dataset so that we can\ndemonstrate the preditions using FastKafka. The following call downloads\nthe dataset and trains the model."),(0,o.kt)("p",null,"We will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n')),(0,o.kt)("h3",{id:"messages"},"Messages"),(0,o.kt)("p",null,"FastKafka uses ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,o.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,o.kt)("p",null,"This example defines two message classes for use in a FastKafka\napplication:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," class is used to represent input data for a\npredictive model. It has four fields of type\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/types/#constrained-types"},(0,o.kt)("inlineCode",{parentName:"a"},"NonNegativeFloat")),",\nwhich is a subclass of float that only allows non-negative floating\npoint values.")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," class is used to represent the output of the\npredictive model. It has a single field ",(0,o.kt)("inlineCode",{parentName:"p"},"species")," of type string\nrepresenting the predicted species."))),(0,o.kt)("p",null,"These message classes will be used to parse and validate incoming data\nin Kafka consumers and producers."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,o.kt)("h3",{id:"application"},"Application"),(0,o.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,o.kt)("p",null,"It starts by defining a dictionary called ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,o.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,o.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\ngenerating the documentation only and it is not being checked by the\nactual server."),(0,o.kt)("p",null,"Next, an object of the ",(0,o.kt)("inlineCode",{parentName:"p"},"FastKafka")," class is initialized with the minimum\nset of arguments:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generation of documentation")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n')),(0,o.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,o.kt)("p",null,"FastKafka provides convenient function decorators ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,o.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,o.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,o.kt)("p",null,"This following example shows how to use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message class. Specifying the\ntype of the single argument is instructing the Pydantic to use\n",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData.parse_raw()")," on the consumed message before passing it\nto the user defined function ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),' function,\nwhich specifies that this function should produce a message to the\n\u201cpredictions" Kafka topic whenever it is called. The ',(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),"\nfunction takes a single integer argument ",(0,o.kt)("inlineCode",{parentName:"p"},"species_class")," representing\none of three possible strign values predicted by the mdoel. It creates\na new ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," message using this value and then returns it.\nThe framework will call the ",(0,o.kt)("inlineCode",{parentName:"p"},'IrisPrediction.json().encode("utf-8")'),"\nfunction on the returned value and produce it to the specified topic."))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h2",{id:"testing-the-service"},"Testing the service"),(0,o.kt)("p",null,"The service can be tested using the ",(0,o.kt)("inlineCode",{parentName:"p"},"Tester")," instances which internally\nstarts Kafka broker and zookeeper."),(0,o.kt)("p",null,"Before running tests, we have to install Java runtime and Apache Kafka\nlocally. To simplify the process, we provide the following convenience\ncommand:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka testing install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nmsg = IrisInputData(\n sepal_length=0.1,\n sepal_width=0.2,\n petal_length=0.3,\n petal_width=0.4,\n)\n\n# Start Tester app and create local Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n # Send IrisInputData message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with IrisPrediction in predictions topic\n await tester.awaited_mocks.on_predictions.assert_awaited_with(\n IrisPrediction(species="setosa"), timeout=2\n )\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,o.kt)("h3",{id:"recap"},"Recap"),(0,o.kt)("p",null,"We have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic and produce the predictions to ",(0,o.kt)("inlineCode",{parentName:"p"},"predictions")," topic."),(0,o.kt)("p",null,"To test the app we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Created the app")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Started our Tester class which mirrors the developed app topics for\ntesting purpuoses")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Sent IrisInputData message to ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Asserted and checked that the developed iris classification service\nhas reacted to IrisInputData message"))),(0,o.kt)("h2",{id:"running-the-service"},"Running the service"),(0,o.kt)("p",null,"The service can be started using builtin ",(0,o.kt)("inlineCode",{parentName:"p"},"faskafka run")," CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file ",(0,o.kt)("inlineCode",{parentName:"p"},'"application.py"')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("p",null,"To run the service, you will need a running Kafka broker on localhost as\nspecified in the ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")," parameter above. We can start the Kafka\nbroker locally using the ",(0,o.kt)("inlineCode",{parentName:"p"},"ApacheKafkaBroker"),". Notice that the same\nhappens automatically in the ",(0,o.kt)("inlineCode",{parentName:"p"},"Tester")," as shown above."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n'127.0.0.1:9092'\n")),(0,o.kt)("p",null,"Then, we start the FastKafka service by running the following command in\nthe folder where the ",(0,o.kt)("inlineCode",{parentName:"p"},"application.py")," file is located:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n")),(0,o.kt)("p",null,"In the above command, we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--num-workers")," option to specify how many\nworkers to launch and we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--kafka-broker")," option to specify which\nkafka broker configuration to use from earlier specified ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[1200654]: [ERROR] aiokafka: Unable to update metadata from [0]\n[1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[1200656]: [ERROR] aiokafka: Unable to update metadata from [0]\n^C\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...\n")),(0,o.kt)("p",null,"You need to interupt running of the cell above by selecting\n",(0,o.kt)("inlineCode",{parentName:"p"},"Runtime->Interupt execution")," on the toolbar above."),(0,o.kt)("p",null,"Finally, we can stop the local Kafka Broker:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n")),(0,o.kt)("h2",{id:"documentation"},"Documentation"),(0,o.kt)("p",null,"The kafka app comes with builtin documentation generation using\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/tools/generator"},"AsyncApi HTML generator"),"."),(0,o.kt)("p",null,"When running in Colab, we need to update Node.js first:"),(0,o.kt)("p",null,"We need to install all dependancies for the generator using the\nfollowing command line:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n")),(0,o.kt)("p",null,"To generate the documentation programatically you just need to call the\nfolloving command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs generate application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n")),(0,o.kt)("p",null,". This will generate the ",(0,o.kt)("em",{parentName:"p"},"asyncapi")," folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"ls -l asyncapi\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"total 8\ndrwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs\ndrwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec\n")),(0,o.kt)("p",null,"In docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka docs serve"),"\nCLI command (more on that in our guides)."),(0,o.kt)("p",null,"In spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application."),(0,o.kt)("p",null,"We can locally preview the generated documentation by running the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs serve application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n\nServing documentation on http://127.0.0.1:8000\n^C\nInterupting serving of documentation and cleaning up...\n")),(0,o.kt)("p",null,"From the parameters passed to the application constructor, we get the\ndocumentation bellow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n bootstrap_servers="localhost:9092",\n)\n')),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png",alt:"Kafka_servers"})),(0,o.kt)("p",null,"The following documentation snippet are for the consumer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png",alt:"Kafka_consumer"})),(0,o.kt)("p",null,"The following documentation snippet are for the producer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png",alt:"Kafka_producer"})),(0,o.kt)("p",null,"Finally, all messages as defined as subclasses of ",(0,o.kt)("em",{parentName:"p"},"BaseModel")," are\ndocumented as well:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png",alt:"Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)"})))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/7b4381d3.483f4f09.js b/assets/js/7b4381d3.483f4f09.js new file mode 100644 index 0000000..65f1993 --- /dev/null +++ b/assets/js/7b4381d3.483f4f09.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3696],{3905:(e,t,a)=>{a.d(t,{Zo:()=>f,kt:()=>d});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){o(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function p(e,t){if(null==e)return{};var a,n,o=function(e,t){if(null==e)return{};var a,n,o={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var s=n.createContext({}),l=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},f=function(e){var t=l(e.components);return n.createElement(s.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,r=e.originalType,s=e.parentName,f=p(e,["components","mdxType","originalType","parentName"]),u=l(a),k=o,d=u["".concat(s,".").concat(k)]||u[k]||c[k]||r;return a?n.createElement(d,i(i({ref:t},f),{},{components:a})):n.createElement(d,i({ref:t},f))}));function d(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=a.length,i=new Array(r);i[0]=k;var p={};for(var s in t)hasOwnProperty.call(t,s)&&(p[s]=t[s]);p.originalType=e,p[u]="string"==typeof e?e:o,i[1]=p;for(var l=2;l<r;l++)i[l]=a[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}k.displayName="MDXCreateElement"},3658:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>c,frontMatter:()=>r,metadata:()=>p,toc:()=>l});var n=a(7462),o=(a(7294),a(3905));const r={},i="Deploy FastKafka docs to GitHub Pages",p={unversionedId:"guides/Guide_04_Github_Actions_Workflow",id:"guides/Guide_04_Github_Actions_Workflow",title:"Deploy FastKafka docs to GitHub Pages",description:"Getting started",source:"@site/docs/guides/Guide_04_Github_Actions_Workflow.md",sourceDirName:"guides",slug:"/guides/Guide_04_Github_Actions_Workflow",permalink:"/docs/next/guides/Guide_04_Github_Actions_Workflow",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using Redpanda to test FastKafka",permalink:"/docs/next/guides/Guide_31_Using_redpanda_to_test_fastkafka"},next:{title:"Deploying FastKafka using Docker",permalink:"/docs/next/guides/Guide_30_Using_docker_to_deploy_fastkafka"}},s={},l=[{value:"Getting started",id:"getting-started",level:2},{value:"Options",id:"options",level:2},{value:"Set app location",id:"set-app-location",level:3},{value:"Example Repository",id:"example-repository",level:2}],f={toc:l},u="wrapper";function c(e){let{components:t,...a}=e;return(0,o.kt)(u,(0,n.Z)({},f,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"deploy-fastkafka-docs-to-github-pages"},"Deploy FastKafka docs to GitHub Pages"),(0,o.kt)("h2",{id:"getting-started"},"Getting started"),(0,o.kt)("p",null,"Add your workflow file ",(0,o.kt)("inlineCode",{parentName:"p"},".github/workflows/fastkafka_docs_deploy.yml")," and\npush it to your remote default branch."),(0,o.kt)("p",null,"Here is an example workflow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'name: Deploy FastKafka Generated Documentation to GitHub Pages\n\non:\n push:\n branches: [ "main", "master" ]\n workflow_dispatch:\n\njobs:\n deploy:\n runs-on: ubuntu-latest\n permissions:\n contents: write\n steps:\n - uses: airtai/workflows/fastkafka-ghp@main\n with:\n app: "test_fastkafka.application:kafka_app"\n')),(0,o.kt)("h2",{id:"options"},"Options"),(0,o.kt)("h3",{id:"set-app-location"},"Set app location"),(0,o.kt)("p",null,"Input in the form of ",(0,o.kt)("inlineCode",{parentName:"p"},"path:app"),", where ",(0,o.kt)("inlineCode",{parentName:"p"},"path")," is the path to a Python\nfile and ",(0,o.kt)("inlineCode",{parentName:"p"},"app")," is an object of type\n",(0,o.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),":"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'- name: Deploy\n uses: airtai/workflows/fastkafka-ghp@main\n with:\n app: "test_fastkafka.application:kafka_app"\n')),(0,o.kt)("p",null,"In the above example,\n",(0,o.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp is named as ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_app")," and it is available in the ",(0,o.kt)("inlineCode",{parentName:"p"},"application"),"\nsubmodule of the ",(0,o.kt)("inlineCode",{parentName:"p"},"test_fastkafka")," module."),(0,o.kt)("h2",{id:"example-repository"},"Example Repository"),(0,o.kt)("p",null,"A\n",(0,o.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\nlibrary that uses the above-mentioned workfow actions to publish\nFastKafka docs to ",(0,o.kt)("inlineCode",{parentName:"p"},"Github Pages")," can be found\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_project/blob/main/.github/workflows/fastkafka_deploy.yaml"},"here"),"."))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/7b589963.32aefe0f.js b/assets/js/7b589963.32aefe0f.js new file mode 100644 index 0000000..04abba5 --- /dev/null +++ b/assets/js/7b589963.32aefe0f.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5628],{3905:(e,t,a)=>{a.d(t,{Zo:()=>d,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),p=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},d=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),c=p(a),u=r,f=c["".concat(s,".").concat(u)]||c[u]||k[u]||o;return a?n.createElement(f,i(i({ref:t},d),{},{components:a})):n.createElement(f,i({ref:t},d))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=u;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:r,i[1]=l;for(var p=2;p<o;p++)i[p]=a[p];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},1295:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>k,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={},i=void 0,l={unversionedId:"api/fastkafka/testing/LocalRedpandaBroker",id:"version-0.6.0/api/fastkafka/testing/LocalRedpandaBroker",title:"LocalRedpandaBroker",description:"fastkafka.testing.LocalRedpandaBroker {fastkafka.testing.LocalRedpandaBroker}",source:"@site/versioned_docs/version-0.6.0/api/fastkafka/testing/LocalRedpandaBroker.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/LocalRedpandaBroker",permalink:"/docs/0.6.0/api/fastkafka/testing/LocalRedpandaBroker",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"ApacheKafkaBroker",permalink:"/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker"},next:{title:"Tester",permalink:"/docs/0.6.0/api/fastkafka/testing/Tester"}},s={},p=[{value:"<code>fastkafka.testing.LocalRedpandaBroker</code>",id:"fastkafka.testing.LocalRedpandaBroker",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>get_service_config_string</code>",id:"get_service_config_string",level:3},{value:"<code>start</code>",id:"start",level:3},{value:"<code>stop</code>",id:"stop",level:3}],d={toc:p},c="wrapper";function k(e){let{components:t,...a}=e;return(0,r.kt)(c,(0,n.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.testing.LocalRedpandaBroker"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.testing.LocalRedpandaBroker")),(0,r.kt)("p",null,"LocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing."),(0,r.kt)("h3",{id:"init"},(0,r.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug', **kwargs: Dict[str, Any]) -> None")),(0,r.kt)("p",null,"Initialises the LocalRedpandaBroker object"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"tag"),": Tag of Redpanda image to use to start container"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"seastar_core"),": Core(s) to use byt Seastar (the framework Redpanda uses under the hood)"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"memory"),": The amount of memory to make available to Redpanda"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"mode"),": Mode to use to load configuration properties in container"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"default_log_level"),": Log levels to use for Redpanda")),(0,r.kt)("h3",{id:"get_service_config_string"},(0,r.kt)("inlineCode",{parentName:"h3"},"get_service_config_string")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def get_service_config_string(self, service: str, data_dir: pathlib.Path) -> str")),(0,r.kt)("p",null,"Generates a configuration for a service"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"service"),': "redpanda", defines which service to get config string for')),(0,r.kt)("h3",{id:"start"},(0,r.kt)("inlineCode",{parentName:"h3"},"start")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def start(self: fastkafka.testing.LocalRedpandaBroker) -> str")),(0,r.kt)("p",null,"Starts a local redpanda broker instance synchronously"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Redpanda broker bootstrap server address in string format: add:port")),(0,r.kt)("h3",{id:"stop"},(0,r.kt)("inlineCode",{parentName:"h3"},"stop")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def stop(self: fastkafka.testing.LocalRedpandaBroker) -> None")),(0,r.kt)("p",null,"Stops a local redpanda broker instance synchronously"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"None")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/80f42d74.392d78fb.js b/assets/js/80f42d74.392d78fb.js new file mode 100644 index 0000000..f4ef42b --- /dev/null +++ b/assets/js/80f42d74.392d78fb.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6527],{3905:(e,t,a)=>{a.d(t,{Zo:()=>p,kt:()=>m});var r=a(7294);function n(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,r)}return a}function l(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){n(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function i(e,t){if(null==e)return{};var a,r,n=function(e,t){if(null==e)return{};var a,r,n={},o=Object.keys(e);for(r=0;r<o.length;r++)a=o[r],t.indexOf(a)>=0||(n[a]=e[a]);return n}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)a=o[r],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(n[a]=e[a])}return n}var c=r.createContext({}),d=function(e){var t=r.useContext(c),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},p=function(e){var t=d(e.components);return r.createElement(c.Provider,{value:t},e.children)},s="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var a=e.components,n=e.mdxType,o=e.originalType,c=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),s=d(a),k=n,m=s["".concat(c,".").concat(k)]||s[k]||u[k]||o;return a?r.createElement(m,l(l({ref:t},p),{},{components:a})):r.createElement(m,l({ref:t},p))}));function m(e,t){var a=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var o=a.length,l=new Array(o);l[0]=k;var i={};for(var c in t)hasOwnProperty.call(t,c)&&(i[c]=t[c]);i.originalType=e,i[s]="string"==typeof e?e:n,l[1]=i;for(var d=2;d<o;d++)l[d]=a[d];return r.createElement.apply(null,l)}return r.createElement.apply(null,a)}k.displayName="MDXCreateElement"},4725:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>c,contentTitle:()=>l,default:()=>u,frontMatter:()=>o,metadata:()=>i,toc:()=>d});var r=a(7462),n=(a(7294),a(3905));const o={},l=void 0,i={unversionedId:"api/fastkafka/encoder/avro_decoder",id:"version-0.8.0/api/fastkafka/encoder/avro_decoder",title:"avro_decoder",description:"avrodecoder {fastkafka.encoder.avrodecoder}",source:"@site/versioned_docs/version-0.8.0/api/fastkafka/encoder/avro_decoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avro_decoder",permalink:"/docs/api/fastkafka/encoder/avro_decoder",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"AvroBase",permalink:"/docs/api/fastkafka/encoder/AvroBase"},next:{title:"avro_encoder",permalink:"/docs/api/fastkafka/encoder/avro_encoder"}},c={},d=[{value:"avro_decoder",id:"fastkafka.encoder.avro_decoder",level:3}],p={toc:d},s="wrapper";function u(e){let{components:t,...a}=e;return(0,n.kt)(s,(0,r.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("h3",{id:"fastkafka.encoder.avro_decoder"},"avro_decoder"),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/avro.py#L263-L279",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-py"},"avro_decoder(\n raw_msg, cls\n)\n")),(0,n.kt)("p",null,"Decoder to decode avro encoded messages to pydantic model instance"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Name"),(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"),(0,n.kt)("th",{parentName:"tr",align:null},"Default"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"raw_msg")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"bytes")),(0,n.kt)("td",{parentName:"tr",align:null},"Avro encoded bytes message received from Kafka topic"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("em",{parentName:"td"},"required"))),(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"cls")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"Type[pydantic.main.BaseModel]")),(0,n.kt)("td",{parentName:"tr",align:null},"Pydantic class; This pydantic class will be used to construct instance of same class"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("em",{parentName:"td"},"required"))))),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"Any")),(0,n.kt)("td",{parentName:"tr",align:null},"An instance of given pydantic class")))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/81b6783d.a996001b.js b/assets/js/81b6783d.a996001b.js new file mode 100644 index 0000000..c331dcb --- /dev/null +++ b/assets/js/81b6783d.a996001b.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8888],{3905:(e,t,a)=>{a.d(t,{Zo:()=>d,kt:()=>m});var n=a(7294);function i(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function r(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){i(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function s(e,t){if(null==e)return{};var a,n,i=function(e,t){if(null==e)return{};var a,n,i={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(i[a]=e[a]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):r(r({},t),e)),a},d=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,i=e.mdxType,o=e.originalType,p=e.parentName,d=s(e,["components","mdxType","originalType","parentName"]),c=l(a),u=i,m=c["".concat(p,".").concat(u)]||c[u]||k[u]||o;return a?n.createElement(m,r(r({ref:t},d),{},{components:a})):n.createElement(m,r({ref:t},d))}));function m(e,t){var a=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=a.length,r=new Array(o);r[0]=u;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[c]="string"==typeof e?e:i,r[1]=s;for(var l=2;l<o;l++)r[l]=a[l];return n.createElement.apply(null,r)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},7622:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>r,default:()=>k,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var n=a(7462),i=(a(7294),a(3905));const o={},r="Using Redpanda to test FastKafka",s={unversionedId:"guides/Guide_31_Using_redpanda_to_test_fastkafka",id:"version-0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka",title:"Using Redpanda to test FastKafka",description:"What is FastKafka?",source:"@site/versioned_docs/version-0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",sourceDirName:"guides",slug:"/guides/Guide_31_Using_redpanda_to_test_fastkafka",permalink:"/docs/0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Encoding and Decoding Kafka Messages with FastKafka",permalink:"/docs/0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"},next:{title:"Deploy FastKafka docs to GitHub Pages",permalink:"/docs/0.5.0/guides/Guide_04_Github_Actions_Workflow"}},p={},l=[{value:"What is FastKafka?",id:"what-is-fastkafka",level:2},{value:"What is Redpanda?",id:"what-is-redpanda",level:2},{value:"Example repo",id:"example-repo",level:2},{value:"The process",id:"the-process",level:2},{value:"1. Prerequisites",id:"1-prerequisites",level:2},{value:"2. Cloning and setting up the example repo",id:"2-cloning-and-setting-up-the-example-repo",level:2},{value:"Create a virtual environment",id:"create-a-virtual-environment",level:3},{value:"Install Python dependencies",id:"install-python-dependencies",level:3},{value:"3. Writing server code",id:"3-writing-server-code",level:2},{value:"Preparing the demo model",id:"preparing-the-demo-model",level:3},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"4. Writing the test code",id:"4-writing-the-test-code",level:2},{value:"5. Running the tests",id:"5-running-the-tests",level:2},{value:"Recap",id:"recap",level:3}],d={toc:l},c="wrapper";function k(e){let{components:t,...a}=e;return(0,i.kt)(c,(0,n.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"using-redpanda-to-test-fastkafka"},"Using Redpanda to test FastKafka"),(0,i.kt)("h2",{id:"what-is-fastkafka"},"What is FastKafka?"),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,i.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,i.kt)("h2",{id:"what-is-redpanda"},"What is Redpanda?"),(0,i.kt)("p",null,"Redpanda is a drop-in replacement for Kafka. Most of the Kafka tools\nwork out of the box with Redpanda."),(0,i.kt)("p",null,"From ",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/"},"redpanda.com"),":"),(0,i.kt)("blockquote",null,(0,i.kt)("p",{parentName:"blockquote"},"Redpanda is a Kafka\xae-compatible streaming data platform that is proven\nto be 10x faster and 6x lower in total costs. It is also JVM-free,\nZooKeeper\xae-free, Jepsen-tested and source available.")),(0,i.kt)("p",null,"Some of the advantages of Redpanda over Kafka are"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A single binary with built-in everything, no ZooKeeper\xae or JVM\nneeded."),(0,i.kt)("li",{parentName:"ol"},"Costs upto 6X less than Kafka."),(0,i.kt)("li",{parentName:"ol"},"Up to 10x lower average latencies and up to 6x faster Kafka\ntransactions without compromising correctness.")),(0,i.kt)("p",null,"To learn more about Redpanda, please visit their\n",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/"},"website")," or checkout this ",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark"},"blog\npost"),"\ncomparing Redpanda and Kafka\u2019s performance benchmarks."),(0,i.kt)("h2",{id:"example-repo"},"Example repo"),(0,i.kt)("p",null,"A sample fastkafka-based library that uses Redpanda for testing, based\non this guide, can be found\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_with_redpanda"},"here"),"."),(0,i.kt)("h2",{id:"the-process"},"The process"),(0,i.kt)("p",null,"Here are the steps we\u2019ll be walking through to build our example:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Set up the prerequisites."),(0,i.kt)("li",{parentName:"ol"},"Clone the example repo."),(0,i.kt)("li",{parentName:"ol"},"Explain how to write an application using FastKafka."),(0,i.kt)("li",{parentName:"ol"},"Explain how to write a test case to test FastKafka with Redpanda."),(0,i.kt)("li",{parentName:"ol"},"Run the test case and produce/consume messages.")),(0,i.kt)("h2",{id:"1-prerequisites"},"1. Prerequisites"),(0,i.kt)("p",null,"Before starting, make sure you have the following prerequisites set up:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Python 3.x"),": A Python 3.x installation is required to run\nFastKafka. You can download the latest version of Python from the\n",(0,i.kt)("a",{parentName:"li",href:"https://www.python.org/downloads/"},"official website"),". You\u2019ll also\nneed to have pip installed and updated, which is Python\u2019s package\ninstaller."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Docker Desktop"),": Docker is used to run Redpanda, which is\nrequired for testing FastKafka. You can download and install Docker\nDesktop from the ",(0,i.kt)("a",{parentName:"li",href:"https://www.docker.com/products/docker-desktop/"},"official\nwebsite"),"."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Git"),": You\u2019ll need to have Git installed to clone the example\nrepo. You can download Git from the ",(0,i.kt)("a",{parentName:"li",href:"https://git-scm.com/downloads"},"official\nwebsite"),".")),(0,i.kt)("h2",{id:"2-cloning-and-setting-up-the-example-repo"},"2. Cloning and setting up the example repo"),(0,i.kt)("p",null,"To get started with the example code, clone the ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_with_redpanda"},"GitHub\nrepository")," by\nrunning the following command in your terminal:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"git clone https://github.com/airtai/sample_fastkafka_with_redpanda.git\ncd sample_fastkafka_with_redpanda\n")),(0,i.kt)("p",null,"This will create a new directory called sample_fastkafka_with_redpanda\nand download all the necessary files."),(0,i.kt)("h3",{id:"create-a-virtual-environment"},"Create a virtual environment"),(0,i.kt)("p",null,"Before writing any code, let\u2019s ",(0,i.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/venv.html#module-venv"},"create a new virtual\nenvironment"),"\nfor our project."),(0,i.kt)("p",null,"A virtual environment is an isolated environment for a Python project,\nwhich allows you to manage project-specific dependencies and avoid\nconflicts between different projects."),(0,i.kt)("p",null,"To create a new virtual environment, run the following commands in your\nterminal:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"python3 -m venv venv\n")),(0,i.kt)("p",null,"This will create a new directory called ",(0,i.kt)("inlineCode",{parentName:"p"},"venv")," in your project\ndirectory, which will contain the virtual environment."),(0,i.kt)("p",null,"To activate the virtual environment, run the following command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"source venv/bin/activate\n")),(0,i.kt)("p",null,"This will change your shell\u2019s prompt to indicate that you are now\nworking inside the virtual environment."),(0,i.kt)("p",null,"Finally, run the following command to upgrade ",(0,i.kt)("inlineCode",{parentName:"p"},"pip"),", the Python package\ninstaller:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pip install --upgrade pip\n")),(0,i.kt)("h3",{id:"install-python-dependencies"},"Install Python dependencies"),(0,i.kt)("p",null,"Next, let\u2019s install the required Python dependencies. In this guide,\nwe\u2019ll be using\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nto write our application code and ",(0,i.kt)("inlineCode",{parentName:"p"},"pytest")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"pytest-asyncio")," to test\nit."),(0,i.kt)("p",null,"You can install the dependencies from the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file\nprovided in the cloned repository by running:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pip install -r requirements.txt\n")),(0,i.kt)("p",null,"This will install all the required packages and their dependencies."),(0,i.kt)("h2",{id:"3-writing-server-code"},"3. Writing server code"),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file in the cloned repository demonstrates how to\nuse FastKafka to consume messages from a Kafka topic, make predictions\nusing a predictive model, and publish the predictions to another Kafka\ntopic. Here is an explanation of the code:"),(0,i.kt)("h3",{id:"preparing-the-demo-model"},"Preparing the demo model"),(0,i.kt)("p",null,"First we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model."),(0,i.kt)("p",null,"We will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n')),(0,i.kt)("h3",{id:"messages"},"Messages"),(0,i.kt)("p",null,"FastKafka uses ",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,i.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,i.kt)("p",null,"This example defines two message classes for use in a FastKafka\napplication:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," class is used to represent input data for a\npredictive model. It has four fields of type\n",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/latest/api/types/#pydantic.types.NonNegativeFloat"},(0,i.kt)("inlineCode",{parentName:"a"},"NonNegativeFloat")),",\nwhich is a subclass of float that only allows non-negative floating\npoint values.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," class is used to represent the output of the\npredictive model. It has a single field ",(0,i.kt)("inlineCode",{parentName:"p"},"species")," of type string\nrepresenting the predicted species."))),(0,i.kt)("p",null,"These message classes will be used to parse and validate incoming data\nin Kafka consumers and producers."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("h3",{id:"application"},"Application"),(0,i.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,i.kt)("p",null,"It starts by defining a dictionary called ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,i.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,i.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used both\nto generate documentation and to later run the server against one of the\ngiven kafka broker."),(0,i.kt)("p",null,"Next, an instance of the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum required arguments:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generating documentation")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n')),(0,i.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,i.kt)("p",null,"FastKafka provides convenient function decorators ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,i.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,i.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,i.kt)("p",null,"This following example shows how to use the ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message class. Specifying the\ntype of the single argument is instructing the Pydantic to use\n",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData.parse_raw()")," on the consumed message before passing it\nto the user defined function ",(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,i.kt)("inlineCode",{parentName:"p"},"to_predictions"),' function,\nwhich specifies that this function should produce a message to the\n\u201cpredictions" Kafka topic whenever it is called. The ',(0,i.kt)("inlineCode",{parentName:"p"},"to_predictions"),"\nfunction takes a single integer argument ",(0,i.kt)("inlineCode",{parentName:"p"},"species_class")," representing\none of three possible strign values predicted by the mdoel. It creates\na new ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," message using this value and then returns it.\nThe framework will call the ",(0,i.kt)("inlineCode",{parentName:"p"},'IrisPrediction.json().encode("utf-8")'),"\nfunction on the returned value and produce it to the specified topic."))),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h2",{id:"4-writing-the-test-code"},"4. Writing the test code"),(0,i.kt)("p",null,"The service can be tested using the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstance which can be configured to start a ",(0,i.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/LocalRedpandaBroker/"},"Redpanda\nbroker")," for testing\npurposes. The ",(0,i.kt)("inlineCode",{parentName:"p"},"test.py")," file in the cloned repository contains the\nfollowing code for testing."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'import pytest\nfrom application import IrisInputData, IrisPrediction, kafka_app\n\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n sepal_length=0.1,\n sepal_width=0.2,\n petal_length=0.3,\n petal_width=0.4,\n)\n\n\n@pytest.mark.asyncio\nasync def test():\n # Start Tester app and create local Redpanda broker for testing\n async with Tester(kafka_app).using_local_redpanda(\n tag="v23.1.2", listener_port=9092\n ) as tester:\n # Send IrisInputData message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with IrisPrediction in predictions topic\n await tester.awaited_mocks.on_predictions.assert_awaited_with(\n IrisPrediction(species="setosa"), timeout=2\n )\n')),(0,i.kt)("p",null,"The\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\nmodule utilizes uses\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/testing/LocalRedpandaBroker/#fastkafka.testing.LocalRedpandaBroker"},(0,i.kt)("inlineCode",{parentName:"a"},"LocalRedpandaBroker")),"\nto start and stop a Redpanda broker for testing purposes using Docker"),(0,i.kt)("h2",{id:"5-running-the-tests"},"5. Running the tests"),(0,i.kt)("p",null,"We can run the tests which is in ",(0,i.kt)("inlineCode",{parentName:"p"},"test.py")," file by executing the\nfollowing command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pytest test.py\n")),(0,i.kt)("p",null,"This will start a Redpanda broker using Docker and executes tests. The\noutput of the command is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest\n============================== test session starts ===============================\nplatform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0\nrootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py\nplugins: asyncio-0.21.0, anyio-3.6.2\nasyncio: mode=strict\ncollected 1 item \n\ntest.py . [100%]\n\n=============================== 1 passed in 7.28s ================================\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$\n")),(0,i.kt)("p",null,"Running the tests with the Redpanda broker ensures that your code is\nworking correctly with a real Kafka-like message broker, making your\ntests more reliable."),(0,i.kt)("h3",{id:"recap"},"Recap"),(0,i.kt)("p",null,"We have created an Iris classification model and encapulated it into our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napplication. The app will consume the ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," from the\n",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic and produce the predictions to ",(0,i.kt)("inlineCode",{parentName:"p"},"predictions")," topic."),(0,i.kt)("p",null,"To test the app we have:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Created the app")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Started our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\nclass with ",(0,i.kt)("inlineCode",{parentName:"p"},"Redpanda")," broker which mirrors the developed app topics\nfor testing purposes")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Sent ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message to ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Asserted and checked that the developed iris classification service\nhas reacted to ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message"))))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/81bf77fc.e86cf07e.js b/assets/js/81bf77fc.e86cf07e.js new file mode 100644 index 0000000..d8d9dd7 --- /dev/null +++ b/assets/js/81bf77fc.e86cf07e.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1358],{3905:(e,t,n)=>{n.d(t,{Zo:()=>l,kt:()=>y});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function c(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){r(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function i(e,t){if(null==e)return{};var n,a,r=function(e,t){if(null==e)return{};var n,a,r={},o=Object.keys(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var p=a.createContext({}),s=function(e){var t=a.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):c(c({},t),e)),n},l=function(e){var t=s(e.components);return a.createElement(p.Provider,{value:t},e.children)},d="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,l=i(e,["components","mdxType","originalType","parentName"]),d=s(n),u=r,y=d["".concat(p,".").concat(u)]||d[u]||f[u]||o;return n?a.createElement(y,c(c({ref:t},l),{},{components:n})):a.createElement(y,c({ref:t},l))}));function y(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,c=new Array(o);c[0]=u;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[d]="string"==typeof e?e:r,c[1]=i;for(var s=2;s<o;s++)c[s]=n[s];return a.createElement.apply(null,c)}return a.createElement.apply(null,n)}u.displayName="MDXCreateElement"},8524:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>c,default:()=>f,frontMatter:()=>o,metadata:()=>i,toc:()=>s});var a=n(7462),r=(n(7294),n(3905));const o={},c=void 0,i={unversionedId:"api/fastkafka/encoder/avsc_to_pydantic",id:"version-0.5.0/api/fastkafka/encoder/avsc_to_pydantic",title:"avsc_to_pydantic",description:"fastkafka.encoder.avsctopydantic {fastkafka.encoder.avsctopydantic}",source:"@site/versioned_docs/version-0.5.0/api/fastkafka/encoder/avsc_to_pydantic.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avsc_to_pydantic",permalink:"/docs/0.5.0/api/fastkafka/encoder/avsc_to_pydantic",draft:!1,tags:[],version:"0.5.0",frontMatter:{}},p={},s=[{value:"<code>fastkafka.encoder.avsc_to_pydantic</code>",id:"fastkafka.encoder.avsc_to_pydantic",level:2},{value:"<code>avsc_to_pydantic</code>",id:"avsc_to_pydantic",level:3}],l={toc:s},d="wrapper";function f(e){let{components:t,...n}=e;return(0,r.kt)(d,(0,a.Z)({},l,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.encoder.avsc_to_pydantic"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.avsc_to_pydantic")),(0,r.kt)("h3",{id:"avsc_to_pydantic"},(0,r.kt)("inlineCode",{parentName:"h3"},"avsc_to_pydantic")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def avsc_to_pydantic(schema: Dict[str, Any]) -> ModelMetaclass")),(0,r.kt)("p",null,"Generate pydantic model from given Avro Schema"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"schema"),": Avro schema in dictionary format")),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Pydantic model class built from given avro schema")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/83ec613f.6af4e02b.js b/assets/js/83ec613f.6af4e02b.js new file mode 100644 index 0000000..1bf0834 --- /dev/null +++ b/assets/js/83ec613f.6af4e02b.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4098],{3905:(e,t,a)=>{a.d(t,{Zo:()=>k,kt:()=>d});var n=a(7294);function l(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){l(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function o(e,t){if(null==e)return{};var a,n,l=function(e,t){if(null==e)return{};var a,n,l={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(l[a]=e[a]);return l}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(l[a]=e[a])}return l}var i=n.createContext({}),p=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},k=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,k=o(e,["components","mdxType","originalType","parentName"]),f=p(a),m=l,d=f["".concat(i,".").concat(m)]||f[m]||u[m]||r;return a?n.createElement(d,s(s({ref:t},k),{},{components:a})):n.createElement(d,s({ref:t},k))}));function d(e,t){var a=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var r=a.length,s=new Array(r);s[0]=m;var o={};for(var i in t)hasOwnProperty.call(t,i)&&(o[i]=t[i]);o.originalType=e,o[f]="string"==typeof e?e:l,s[1]=o;for(var p=2;p<r;p++)s[p]=a[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,a)}m.displayName="MDXCreateElement"},1236:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var n=a(7462),l=(a(7294),a(3905));const r={},s="fastkafka",o={unversionedId:"cli/fastkafka",id:"cli/fastkafka",title:"fastkafka",description:"Usage:",source:"@site/docs/cli/fastkafka.md",sourceDirName:"cli",slug:"/cli/fastkafka",permalink:"/docs/next/cli/fastkafka",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Tester",permalink:"/docs/next/api/fastkafka/testing/Tester"},next:{title:"run_fastkafka_server_process",permalink:"/docs/next/cli/run_fastkafka_server_process"}},i={},p=[{value:"<code>fastkafka docs</code>",id:"fastkafka-docs",level:2},{value:"<code>fastkafka docs generate</code>",id:"fastkafka-docs-generate",level:3},{value:"<code>fastkafka docs install_deps</code>",id:"fastkafka-docs-install_deps",level:3},{value:"<code>fastkafka docs serve</code>",id:"fastkafka-docs-serve",level:3},{value:"<code>fastkafka run</code>",id:"fastkafka-run",level:2},{value:"<code>fastkafka testing</code>",id:"fastkafka-testing",level:2},{value:"<code>fastkafka testing install_deps</code>",id:"fastkafka-testing-install_deps",level:3}],k={toc:p},f="wrapper";function u(e){let{components:t,...a}=e;return(0,l.kt)(f,(0,n.Z)({},k,a,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h1",{id:"fastkafka"},(0,l.kt)("inlineCode",{parentName:"h1"},"fastkafka")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--install-completion"),": Install completion for the current shell."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--show-completion"),": Show completion for the current shell, to copy it or customize the installation."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"docs"),": Commands for managing FastKafka app..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"run"),": Runs Fast Kafka API application"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"testing"),": Commands for managing FastKafka testing")),(0,l.kt)("h2",{id:"fastkafka-docs"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka docs")),(0,l.kt)("p",null,"Commands for managing FastKafka app documentation"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"generate"),": Generates documentation for a FastKafka..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"install_deps"),": Installs dependencies for FastKafka..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"serve"),": Generates and serves documentation for a...")),(0,l.kt)("h3",{id:"fastkafka-docs-generate"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs generate")),(0,l.kt)("p",null,"Generates documentation for a FastKafka application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs generate [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--root-path TEXT"),": root path under which documentation will be created; default is current directory"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h3",{id:"fastkafka-docs-install_deps"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs install_deps")),(0,l.kt)("p",null,"Installs dependencies for FastKafka documentation generation"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs install_deps [OPTIONS]\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h3",{id:"fastkafka-docs-serve"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs serve")),(0,l.kt)("p",null,"Generates and serves documentation for a FastKafka application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs serve [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--root-path TEXT"),": root path under which documentation will be created; default is current directory"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--bind TEXT"),": Some info ","[default: 127.0.0.1]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--port INTEGER"),": Some info ","[default: 8000]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h2",{id:"fastkafka-run"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka run")),(0,l.kt)("p",null,"Runs Fast Kafka API application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka run [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--num-workers INTEGER"),": Number of FastKafka instances to run, defaults to number of CPU cores. ","[default: 2]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--kafka-broker TEXT"),": kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. ","[default: localhost]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h2",{id:"fastkafka-testing"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka testing")),(0,l.kt)("p",null,"Commands for managing FastKafka testing"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka testing [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"install_deps"),": Installs dependencies for FastKafka app...")),(0,l.kt)("h3",{id:"fastkafka-testing-install_deps"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka testing install_deps")),(0,l.kt)("p",null,"Installs dependencies for FastKafka app testing"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka testing install_deps [OPTIONS]\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/847c12c2.0dcae720.js b/assets/js/847c12c2.0dcae720.js new file mode 100644 index 0000000..e4821af --- /dev/null +++ b/assets/js/847c12c2.0dcae720.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1790],{3905:(e,n,a)=>{a.d(n,{Zo:()=>l,kt:()=>f});var t=a(7294);function s(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function i(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function o(e){for(var n=1;n<arguments.length;n++){var a=null!=arguments[n]?arguments[n]:{};n%2?i(Object(a),!0).forEach((function(n){s(e,n,a[n])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):i(Object(a)).forEach((function(n){Object.defineProperty(e,n,Object.getOwnPropertyDescriptor(a,n))}))}return e}function r(e,n){if(null==e)return{};var a,t,s=function(e,n){if(null==e)return{};var a,t,s={},i=Object.keys(e);for(t=0;t<i.length;t++)a=i[t],n.indexOf(a)>=0||(s[a]=e[a]);return s}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(t=0;t<i.length;t++)a=i[t],n.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(s[a]=e[a])}return s}var d=t.createContext({}),c=function(e){var n=t.useContext(d),a=n;return e&&(a="function"==typeof e?e(n):o(o({},n),e)),a},l=function(e){var n=c(e.components);return t.createElement(d.Provider,{value:n},e.children)},p="mdxType",m={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},u=t.forwardRef((function(e,n){var a=e.components,s=e.mdxType,i=e.originalType,d=e.parentName,l=r(e,["components","mdxType","originalType","parentName"]),p=c(a),u=s,f=p["".concat(d,".").concat(u)]||p[u]||m[u]||i;return a?t.createElement(f,o(o({ref:n},l),{},{components:a})):t.createElement(f,o({ref:n},l))}));function f(e,n){var a=arguments,s=n&&n.mdxType;if("string"==typeof e||s){var i=a.length,o=new Array(i);o[0]=u;var r={};for(var d in n)hasOwnProperty.call(n,d)&&(r[d]=n[d]);r.originalType=e,r[p]="string"==typeof e?e:s,o[1]=r;for(var c=2;c<i;c++)o[c]=a[c];return t.createElement.apply(null,o)}return t.createElement.apply(null,a)}u.displayName="MDXCreateElement"},1155:(e,n,a)=>{a.r(n),a.d(n,{assets:()=>d,contentTitle:()=>o,default:()=>m,frontMatter:()=>i,metadata:()=>r,toc:()=>c});var t=a(7462),s=(a(7294),a(3905));const i={},o="Encoding and Decoding Kafka Messages with FastKafka",r={unversionedId:"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",id:"version-0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",title:"Encoding and Decoding Kafka Messages with FastKafka",description:"Prerequisites",source:"@site/versioned_docs/version-0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",sourceDirName:"guides",slug:"/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",permalink:"/docs/0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Lifespan Events",permalink:"/docs/0.5.0/guides/Guide_05_Lifespan_Handler"},next:{title:"Using Redpanda to test FastKafka",permalink:"/docs/0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka"}},d={},c=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Ways to Encode and Decode Messages with FastKafka",id:"ways-to-encode-and-decode-messages-with-fastkafka",level:2},{value:"1. Json encoder and decoder",id:"1-json-encoder-and-decoder",level:2},{value:"2. Avro encoder and decoder",id:"2-avro-encoder-and-decoder",level:2},{value:"What is Avro?",id:"what-is-avro",level:3},{value:"Installing FastKafka with Avro dependencies",id:"installing-fastkafka-with-avro-dependencies",level:3},{value:"Defining Avro Schema Using Pydantic Models",id:"defining-avro-schema-using-pydantic-models",level:3},{value:"Reusing existing avro schema",id:"reusing-existing-avro-schema",level:3},{value:"Building pydantic models from avro schema dictionary",id:"building-pydantic-models-from-avro-schema-dictionary",level:4},{value:"Building pydantic models from <code>.avsc</code> file",id:"building-pydantic-models-from-avsc-file",level:4},{value:"Consume/Produce avro messages with FastKafka",id:"consumeproduce-avro-messages-with-fastkafka",level:3},{value:"Assembling it all together",id:"assembling-it-all-together",level:3},{value:"3. Custom encoder and decoder",id:"3-custom-encoder-and-decoder",level:2},{value:"Writing a custom encoder and decoder",id:"writing-a-custom-encoder-and-decoder",level:3},{value:"Assembling it all together",id:"assembling-it-all-together-1",level:3}],l={toc:c},p="wrapper";function m(e){let{components:n,...a}=e;return(0,s.kt)(p,(0,t.Z)({},l,a,{components:n,mdxType:"MDXLayout"}),(0,s.kt)("h1",{id:"encoding-and-decoding-kafka-messages-with-fastkafka"},"Encoding and Decoding Kafka Messages with FastKafka"),(0,s.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,s.kt)("ol",null,(0,s.kt)("li",{parentName:"ol"},"A basic knowledge of\n",(0,s.kt)("a",{parentName:"li",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nis needed to proceed with this guide. If you are not familiar with\n",(0,s.kt)("a",{parentName:"li",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),",\nplease go through the ",(0,s.kt)("a",{parentName:"li",href:"/docs#tutorial"},"tutorial")," first."),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("a",{parentName:"li",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith its dependencies installed is needed. Please install\n",(0,s.kt)("a",{parentName:"li",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nusing the command - ",(0,s.kt)("inlineCode",{parentName:"li"},"pip install fastkafka"))),(0,s.kt)("h2",{id:"ways-to-encode-and-decode-messages-with-fastkafka"},"Ways to Encode and Decode Messages with FastKafka"),(0,s.kt)("p",null,"In python, by default, we send Kafka messages as bytes. Even if our\nmessage is a string, we convert it to bytes and then send it to Kafka\ntopic. imilarly, while consuming messages, we consume them as bytes and\nthen convert them to strings."),(0,s.kt)("p",null,"In FastKafka, we specify message schema using Pydantic models as\nmentioned in ",(0,s.kt)("a",{parentName:"p",href:"/docs#messages"},"tutorial"),":"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'# Define Pydantic models for Kafka messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,s.kt)("p",null,"Then, we send and receive messages as instances of Pydantic models which\nwe defined. So, FastKafka needs a way to encode/decode to these Pydantic\nmodel messages to bytes in order to send/receive messages to/from Kafka\ntopics."),(0,s.kt)("p",null,"The ",(0,s.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"@produces")," methods of FastKafka accept a parameter\ncalled ",(0,s.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,s.kt)("inlineCode",{parentName:"p"},"encoder")," to decode/encode Kafka messages. FastKafka\nprovides three ways to encode and decode messages:"),(0,s.kt)("ol",null,(0,s.kt)("li",{parentName:"ol"},"json - This is the default encoder/decoder option in FastKafka.\nWhile producing, this option converts our instance of Pydantic model\nmessages to a JSON string and then converts it to bytes before\nsending it to the topic. While consuming, it converts bytes to a\nJSON string and then constructs an instance of Pydantic model from\nthe JSON string."),(0,s.kt)("li",{parentName:"ol"},"avro - This option uses Avro encoding/decoding to convert instances\nof Pydantic model messages to bytes while producing, and while\nconsuming, it constructs an instance of Pydantic model from bytes."),(0,s.kt)("li",{parentName:"ol"},"custom encoder/decoder - If you are not happy with the json or avro\nencoder/decoder options, you can write your own encoder/decoder\nfunctions and use them to encode/decode Pydantic messages.")),(0,s.kt)("h2",{id:"1-json-encoder-and-decoder"},"1. Json encoder and decoder"),(0,s.kt)("p",null,"The default option in FastKafka is json encoder/decoder. This option,\nwhile producing, converts our instance of pydantic model messages to\njson string and then converts to bytes before sending it to the topics.\nWhile consuming it converts bytes to json string and then constructs\ninstance of pydantic model from json string."),(0,s.kt)("p",null,"We can use the application from ",(0,s.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," as\nis, and it will use the json encoder/decoder by default. But, for\nclarity, let\u2019s modify it to explicitly accept the \u2018json\u2019 encoder/decoder\nparameter:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder="json")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder="json")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,s.kt)("p",null,"In the above code, the ",(0,s.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),' decorator sets up a\nconsumer for the \u201cinput_data" topic, using the \u2018json\u2019 decoder to convert\nthe message payload to an instance of ',(0,s.kt)("inlineCode",{parentName:"p"},"IrisInputData"),". The\n",(0,s.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces"),' decorator sets up a producer for the \u201cpredictions"\ntopic, using the \u2018json\u2019 encoder to convert the instance of\n',(0,s.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," to message payload."),(0,s.kt)("h2",{id:"2-avro-encoder-and-decoder"},"2. Avro encoder and decoder"),(0,s.kt)("h3",{id:"what-is-avro"},"What is Avro?"),(0,s.kt)("p",null,"Avro is a row-oriented remote procedure call and data serialization\nframework developed within Apache\u2019s Hadoop project. It uses JSON for\ndefining data types and protocols, and serializes data in a compact\nbinary format. To learn more about the Apache Avro, please check out the\n",(0,s.kt)("a",{parentName:"p",href:"https://avro.apache.org/docs/"},"docs"),"."),(0,s.kt)("h3",{id:"installing-fastkafka-with-avro-dependencies"},"Installing FastKafka with Avro dependencies"),(0,s.kt)("p",null,(0,s.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith dependencies for Apache Avro installed is needed to use avro\nencoder/decoder. Please install\n",(0,s.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith Avro support using the command - ",(0,s.kt)("inlineCode",{parentName:"p"},"pip install fastkafka[avro]")),(0,s.kt)("h3",{id:"defining-avro-schema-using-pydantic-models"},"Defining Avro Schema Using Pydantic Models"),(0,s.kt)("p",null,"By default, you can use Pydantic model to define your message schemas.\nFastKafka internally takes care of encoding and decoding avro messages,\nbased on the Pydantic models."),(0,s.kt)("p",null,"So, similar to the ",(0,s.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),", the message schema will\nremain as it is."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'# Define Pydantic models for Avro messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,s.kt)("p",null,"No need to change anything to support avro. You can use existing\nPydantic models as is."),(0,s.kt)("h3",{id:"reusing-existing-avro-schema"},"Reusing existing avro schema"),(0,s.kt)("p",null,"If you are using some other library to send and receive avro encoded\nmessages, it is highly likely that you already have an Avro schema\ndefined."),(0,s.kt)("h4",{id:"building-pydantic-models-from-avro-schema-dictionary"},"Building pydantic models from avro schema dictionary"),(0,s.kt)("p",null,"Let\u2019s modify the above example and let\u2019s assume we have schemas already\nfor ",(0,s.kt)("inlineCode",{parentName:"p"},"IrisInputData")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," which will look like below:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'iris_input_data_schema = {\n "type": "record",\n "namespace": "IrisInputData",\n "name": "IrisInputData",\n "fields": [\n {"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},\n {"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},\n {"doc": "Petal length in cm", "type": "double", "name": "petal_length"},\n {"doc": "Petal width in cm", "type": "double", "name": "petal_width"},\n ],\n}\niris_prediction_schema = {\n "type": "record",\n "namespace": "IrisPrediction",\n "name": "IrisPrediction",\n "fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],\n}\n')),(0,s.kt)("p",null,"We can easily construct pydantic models from avro schema using\n",(0,s.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/encoder/avsc_to_pydantic/#fastkafka.encoder.avsc_to_pydantic"},(0,s.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction which is included as part of\n",(0,s.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nitself."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka._components.encoder.avro import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n")),(0,s.kt)("p",null,"The above code will convert avro schema to pydantic models and will\nprint pydantic models\u2019 fields. The output of the above is:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-txt"},"{'sepal_length': ModelField(name='sepal_length', type=float, required=True),\n 'sepal_width': ModelField(name='sepal_width', type=float, required=True),\n 'petal_length': ModelField(name='petal_length', type=float, required=True),\n 'petal_width': ModelField(name='petal_width', type=float, required=True)}\n \n {'species': ModelField(name='species', type=str, required=True)}\n")),(0,s.kt)("p",null,"This is exactly same as manually defining the pydantic models ourselves.\nYou don\u2019t have to worry about not making any mistakes while converting\navro schema to pydantic models manually. You can easily and\nautomatically accomplish it by using\n",(0,s.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/encoder/avsc_to_pydantic/#fastkafka.encoder.avsc_to_pydantic"},(0,s.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction as demonstrated above."),(0,s.kt)("h4",{id:"building-pydantic-models-from-avsc-file"},"Building pydantic models from ",(0,s.kt)("inlineCode",{parentName:"h4"},".avsc")," file"),(0,s.kt)("p",null,"Not all cases will have avro schema conveniently defined as a python\ndictionary. You may have it stored as the proprietary ",(0,s.kt)("inlineCode",{parentName:"p"},".avsc")," files in\nfilesystem. Let\u2019s see how to convert those ",(0,s.kt)("inlineCode",{parentName:"p"},".avsc")," files to pydantic\nmodels."),(0,s.kt)("p",null,"Let\u2019s assume our avro files are stored in files called\n",(0,s.kt)("inlineCode",{parentName:"p"},"iris_input_data_schema.avsc")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"iris_prediction_schema.avsc"),". In that\ncase, following code converts the schema to pydantic models:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'import json\nfrom fastkafka._components.encoder.avro import avsc_to_pydantic\n\n\nwith open("iris_input_data_schema.avsc", "rb") as f:\n iris_input_data_schema = json.load(f)\n \nwith open("iris_prediction_schema.avsc", "rb") as f:\n iris_prediction_schema = json.load(f)\n \n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.__fields__)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.__fields__)\n')),(0,s.kt)("h3",{id:"consumeproduce-avro-messages-with-fastkafka"},"Consume/Produce avro messages with FastKafka"),(0,s.kt)("p",null,(0,s.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nprovides ",(0,s.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"@produces")," methods to consume/produces\nmessages to/from a ",(0,s.kt)("inlineCode",{parentName:"p"},"Kafka")," topic. This is explained in\n",(0,s.kt)("a",{parentName:"p",href:"/docs#function-decorators"},"tutorial"),"."),(0,s.kt)("p",null,"The ",(0,s.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"@produces")," methods accepts a parameter called\n",(0,s.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,s.kt)("inlineCode",{parentName:"p"},"encoder")," to decode/encode avro messages."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", encoder="avro")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", decoder="avro")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,s.kt)("p",null,"In the above example, in ",(0,s.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"@produces")," methods, we\nexplicitly instruct FastKafka to ",(0,s.kt)("inlineCode",{parentName:"p"},"decode")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"encode")," messages using\nthe ",(0,s.kt)("inlineCode",{parentName:"p"},"avro")," ",(0,s.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,s.kt)("inlineCode",{parentName:"p"},"encoder")," instead of the default ",(0,s.kt)("inlineCode",{parentName:"p"},"json"),"\n",(0,s.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,s.kt)("inlineCode",{parentName:"p"},"encoder"),"."),(0,s.kt)("h3",{id:"assembling-it-all-together"},"Assembling it all together"),(0,s.kt)("p",null,"Let\u2019s rewrite the sample code found in\n",(0,s.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," to use ",(0,s.kt)("inlineCode",{parentName:"p"},"avro")," to ",(0,s.kt)("inlineCode",{parentName:"p"},"decode")," and\n",(0,s.kt)("inlineCode",{parentName:"p"},"encode")," messages:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\niris_input_data_schema = {\n "type": "record",\n "namespace": "IrisInputData",\n "name": "IrisInputData",\n "fields": [\n {"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},\n {"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},\n {"doc": "Petal length in cm", "type": "double", "name": "petal_length"},\n {"doc": "Petal width in cm", "type": "double", "name": "petal_width"},\n ],\n}\niris_prediction_schema = {\n "type": "record",\n "namespace": "IrisPrediction",\n "name": "IrisPrediction",\n "fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],\n}\n# Or load schema from avsc files\n\nfrom fastkafka._components.encoder.avro import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\n\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder="avro")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder="avro")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,s.kt)("p",null,"The above code is a sample implementation of using FastKafka to consume\nand produce Avro-encoded messages from/to a Kafka topic. The code\ndefines two Avro schemas for the input data and the prediction result.\nIt then uses the\n",(0,s.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/encoder/avsc_to_pydantic/#fastkafka.encoder.avsc_to_pydantic"},(0,s.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction from the FastKafka library to convert the Avro schema into\nPydantic models, which will be used to decode and encode Avro messages."),(0,s.kt)("p",null,"The\n",(0,s.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,s.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is then instantiated with the broker details, and two functions\ndecorated with ",(0,s.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces"),' are\ndefined to consume messages from the \u201cinput_data" topic and produce\nmessages to the \u201cpredictions" topic, respectively. The functions uses\nthe decoder=\u201cavro" and encoder=\u201cavro" parameters to decode and encode\nthe Avro messages.'),(0,s.kt)("p",null,"In summary, the above code demonstrates a straightforward way to use\nAvro-encoded messages with FastKafka to build a message processing\npipeline."),(0,s.kt)("h2",{id:"3-custom-encoder-and-decoder"},"3. Custom encoder and decoder"),(0,s.kt)("p",null,"If you are not happy with the json or avro encoder/decoder options, you\ncan write your own encoder/decoder functions and use them to\nencode/decode Pydantic messages."),(0,s.kt)("h3",{id:"writing-a-custom-encoder-and-decoder"},"Writing a custom encoder and decoder"),(0,s.kt)("p",null,"In this section, let\u2019s see how to write a custom encoder and decoder\nwhich obfuscates kafka message with simple\n",(0,s.kt)("a",{parentName:"p",href:"https://en.wikipedia.org/wiki/ROT13"},"ROT13")," cipher."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},"import codecs\nimport json\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n msg_str = msg.json()\n obfuscated = codecs.encode(msg_str, 'rot13')\n raw_bytes = obfuscated.encode(\"utf-8\")\n return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n obfuscated = raw_msg.decode(\"utf-8\")\n msg_str = codecs.decode(obfuscated, 'rot13')\n msg_dict = json.loads(msg_str)\n return cls(**msg_dict)\n")),(0,s.kt)("p",null,"The above code defines two custom functions for encoding and decoding\nmessages in a Kafka application using the FastKafka library."),(0,s.kt)("p",null,"The encoding function, ",(0,s.kt)("inlineCode",{parentName:"p"},"custom_encoder()"),", takes a message ",(0,s.kt)("inlineCode",{parentName:"p"},"msg")," which\nis an instance of a Pydantic model, converts it to a JSON string using\nthe ",(0,s.kt)("inlineCode",{parentName:"p"},"json()")," method, obfuscates the resulting string using the ROT13\nalgorithm from the ",(0,s.kt)("inlineCode",{parentName:"p"},"codecs")," module, and finally encodes the obfuscated\nstring as raw bytes using the UTF-8 encoding."),(0,s.kt)("p",null,"The decoding function, ",(0,s.kt)("inlineCode",{parentName:"p"},"custom_decoder()"),", takes a raw message ",(0,s.kt)("inlineCode",{parentName:"p"},"raw_msg"),"\nin bytes format, a Pydantic class to construct instance with cls\nparameter. It first decodes the raw message from UTF-8 encoding, then\nuses the ROT13 algorithm to de-obfuscate the string. Finally, it loads\nthe resulting JSON string using the ",(0,s.kt)("inlineCode",{parentName:"p"},"json.loads()")," method and returns a\nnew instance of the specified ",(0,s.kt)("inlineCode",{parentName:"p"},"cls")," class initialized with the decoded\ndictionary."),(0,s.kt)("p",null,"These functions can be used with FastKafka\u2019s ",(0,s.kt)("inlineCode",{parentName:"p"},"encoder")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"decoder"),"\nparameters to customize the serialization and deserialization of\nmessages in Kafka topics."),(0,s.kt)("p",null,"Let\u2019s test the above code"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},"i = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n\nencoded = custom_encoder(i)\ndisplay(encoded)\n\ndecoded = custom_decoder(encoded, IrisInputData)\ndisplay(decoded)\n")),(0,s.kt)("p",null,"This will result in following output"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-txt"},'b\'{"frcny_yratgu": 0.5, "frcny_jvqgu": 0.5, "crgny_yratgu": 0.5, "crgny_jvqgu": 0.5}\'\n\nIrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n')),(0,s.kt)("h3",{id:"assembling-it-all-together-1"},"Assembling it all together"),(0,s.kt)("p",null,"Let\u2019s rewrite the sample code found in\n",(0,s.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," to use our custom decoder and\nencoder functions:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\n\nimport codecs\nimport json\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n msg_str = msg.json()\n obfuscated = codecs.encode(msg_str, \'rot13\')\n raw_bytes = obfuscated.encode("utf-8")\n return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:\n obfuscated = raw_msg.decode("utf-8")\n msg_str = codecs.decode(obfuscated, \'rot13\')\n msg_dict = json.loads(msg_str)\n return cls(**msg_dict)\n\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder=custom_decoder)\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder=custom_encoder)\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,s.kt)("p",null,"This code defines a custom encoder and decoder functions for encoding\nand decoding messages sent through a Kafka messaging system."),(0,s.kt)("p",null,"The custom ",(0,s.kt)("inlineCode",{parentName:"p"},"encoder")," function takes a message represented as a\n",(0,s.kt)("inlineCode",{parentName:"p"},"BaseModel")," and encodes it as bytes by first converting it to a JSON\nstring and then obfuscating it using the ROT13 encoding. The obfuscated\nmessage is then converted to bytes using UTF-8 encoding and returned."),(0,s.kt)("p",null,"The custom ",(0,s.kt)("inlineCode",{parentName:"p"},"decoder")," function takes in the bytes representing an\nobfuscated message, decodes it using UTF-8 encoding, then decodes the\nROT13 obfuscation, and finally loads it as a dictionary using the ",(0,s.kt)("inlineCode",{parentName:"p"},"json"),"\nmodule. This dictionary is then converted to a ",(0,s.kt)("inlineCode",{parentName:"p"},"BaseModel")," instance\nusing the cls parameter."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/87b29f85.960f3d83.js b/assets/js/87b29f85.960f3d83.js new file mode 100644 index 0000000..b70d716 --- /dev/null +++ b/assets/js/87b29f85.960f3d83.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8110],{3905:(e,t,r)=>{r.d(t,{Zo:()=>c,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function s(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?o(Object(r),!0).forEach((function(t){a(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):o(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function i(e,t){if(null==e)return{};var r,n,a=function(e,t){if(null==e)return{};var r,n,a={},o=Object.keys(e);for(n=0;n<o.length;n++)r=o[n],t.indexOf(r)>=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)r=o[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var l=n.createContext({}),p=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},c=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,l=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),f=p(r),k=a,m=f["".concat(l,".").concat(k)]||f[k]||u[k]||o;return r?n.createElement(m,s(s({ref:t},c),{},{components:r})):n.createElement(m,s({ref:t},c))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,s=new Array(o);s[0]=k;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[f]="string"==typeof e?e:a,s[1]=i;for(var p=2;p<o;p++)s[p]=r[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,r)}k.displayName="MDXCreateElement"},9253:(e,t,r)=>{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>u,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var n=r(7462),a=(r(7294),r(3905));const o={},s="run_fastkafka_server_process",i={unversionedId:"cli/run_fastkafka_server_process",id:"version-0.8.0/cli/run_fastkafka_server_process",title:"run_fastkafka_server_process",description:"Usage:",source:"@site/versioned_docs/version-0.8.0/cli/run_fastkafka_server_process.md",sourceDirName:"cli",slug:"/cli/run_fastkafka_server_process",permalink:"/docs/cli/run_fastkafka_server_process",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"fastkafka",permalink:"/docs/cli/fastkafka"},next:{title:"LICENSE",permalink:"/docs/LICENSE"}},l={},p=[],c={toc:p},f="wrapper";function u(e){let{components:t,...r}=e;return(0,a.kt)(f,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"run_fastkafka_server_process"},(0,a.kt)("inlineCode",{parentName:"h1"},"run_fastkafka_server_process")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Usage"),":"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-console"},"$ run_fastkafka_server_process [OPTIONS] APP\n")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"APP"),": Input in the form of 'path:app', where ",(0,a.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,a.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,a.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Options"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--kafka-broker TEXT"),": Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class. ","[required]"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--install-completion"),": Install completion for the current shell."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--show-completion"),": Show completion for the current shell, to copy it or customize the installation."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/87f59f37.6b9e11c6.js b/assets/js/87f59f37.6b9e11c6.js new file mode 100644 index 0000000..3cc7cef --- /dev/null +++ b/assets/js/87f59f37.6b9e11c6.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[733],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>u});var n=a(7294);function i(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){i(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,i=function(e,t){if(null==e)return{};var a,n,i={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(i[a]=e[a]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var p=n.createContext({}),s=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},c=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},d="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=s(a),m=i,u=d["".concat(p,".").concat(m)]||d[m]||k[m]||r;return a?n.createElement(u,o(o({ref:t},c),{},{components:a})):n.createElement(u,o({ref:t},c))}));function u(e,t){var a=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=a.length,o=new Array(r);o[0]=m;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[d]="string"==typeof e?e:i,o[1]=l;for(var s=2;s<r;s++)o[s]=a[s];return n.createElement.apply(null,o)}return n.createElement.apply(null,a)}m.displayName="MDXCreateElement"},3419:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>k,frontMatter:()=>r,metadata:()=>l,toc:()=>s});var n=a(7462),i=(a(7294),a(3905));const r={},o="Deploying FastKafka using Docker",l={unversionedId:"guides/Guide_30_Using_docker_to_deploy_fastkafka",id:"guides/Guide_30_Using_docker_to_deploy_fastkafka",title:"Deploying FastKafka using Docker",description:"Building a Docker Image",source:"@site/docs/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",sourceDirName:"guides",slug:"/guides/Guide_30_Using_docker_to_deploy_fastkafka",permalink:"/docs/next/guides/Guide_30_Using_docker_to_deploy_fastkafka",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Deploy FastKafka docs to GitHub Pages",permalink:"/docs/next/guides/Guide_04_Github_Actions_Workflow"},next:{title:"Using FastAPI to Run FastKafka Application",permalink:"/docs/next/guides/Guide_32_Using_fastapi_to_run_fastkafka_application"}},p={},s=[{value:"Building a Docker Image",id:"building-a-docker-image",level:2},{value:"Creating FastKafka Code",id:"creating-fastkafka-code",level:3},{value:"Creating requirements.txt file",id:"creating-requirementstxt-file",level:3},{value:"Creating Dockerfile",id:"creating-dockerfile",level:3},{value:"Build the Docker Image",id:"build-the-docker-image",level:3},{value:"Start the Docker Container",id:"start-the-docker-container",level:3},{value:"Additional Security",id:"additional-security",level:2},{value:"Example repo",id:"example-repo",level:2}],c={toc:s},d="wrapper";function k(e){let{components:t,...a}=e;return(0,i.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"deploying-fastkafka-using-docker"},"Deploying FastKafka using Docker"),(0,i.kt)("h2",{id:"building-a-docker-image"},"Building a Docker Image"),(0,i.kt)("p",null,"To build a Docker image for a FastKafka project, we need the following\nitems:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A library that is built using FastKafka."),(0,i.kt)("li",{parentName:"ol"},"A file in which the requirements are specified. This could be a\nrequirements.txt file, a setup.py file, or even a wheel file."),(0,i.kt)("li",{parentName:"ol"},"A Dockerfile to build an image that will include the two files\nmentioned above.")),(0,i.kt)("h3",{id:"creating-fastkafka-code"},"Creating FastKafka Code"),(0,i.kt)("p",null,"Let\u2019s create a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\napplication and write it to the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file based on the\n",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h3",{id:"creating-requirementstxt-file"},"Creating requirements.txt file"),(0,i.kt)("p",null,"The above code only requires FastKafka. So, we will add only that to the\n",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file, but you can add additional requirements to it\nas well."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"fastkafka>=0.3.0\n")),(0,i.kt)("p",null,"Here we are using ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," to store the project\u2019s\ndependencies. However, other methods like ",(0,i.kt)("inlineCode",{parentName:"p"},"setup.py"),", ",(0,i.kt)("inlineCode",{parentName:"p"},"pipenv"),", and\n",(0,i.kt)("inlineCode",{parentName:"p"},"wheel")," files can also be used. ",(0,i.kt)("inlineCode",{parentName:"p"},"setup.py")," is commonly used for\npackaging and distributing Python modules, while ",(0,i.kt)("inlineCode",{parentName:"p"},"pipenv")," is a tool used\nfor managing virtual environments and package dependencies. ",(0,i.kt)("inlineCode",{parentName:"p"},"wheel"),"\nfiles are built distributions of Python packages that can be installed\nwith pip."),(0,i.kt)("h3",{id:"creating-dockerfile"},"Creating Dockerfile"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-dockerfile"},'# (1)\nFROM python:3.9-slim-bullseye\n# (2)\nWORKDIR /project\n# (3)\nCOPY application.py requirements.txt /project/\n# (4)\nRUN pip install --no-cache-dir --upgrade -r /project/requirements.txt\n# (5)\nCMD ["fastkafka", "run", "--num-workers", "2", "--kafka-broker", "production", "application:kafka_app"]\n')),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Start from the official Python base image.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Set the current working directory to ",(0,i.kt)("inlineCode",{parentName:"p"},"/project"),"."),(0,i.kt)("p",{parentName:"li"},"This is where we\u2019ll put the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file and the\n",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Copy the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file and ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file inside\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"/project")," directory.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Install the package dependencies in the requirements file."),(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"--no-cache-dir")," option tells ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," to not save the downloaded\npackages locally, as that is only if ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," was going to be run again\nto install the same packages, but that\u2019s not the case when working\nwith containers."),(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"--upgrade")," option tells ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," to upgrade the packages if they\nare already installed.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Set the ",(0,i.kt)("strong",{parentName:"p"},"command")," to run the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command."),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"p"},"CMD")," takes a list of strings, each of these strings is what you\nwould type in the command line separated by spaces."),(0,i.kt)("p",{parentName:"li"},"This command will be run from the ",(0,i.kt)("strong",{parentName:"p"},"current working directory"),", the\nsame ",(0,i.kt)("inlineCode",{parentName:"p"},"/project")," directory you set above with ",(0,i.kt)("inlineCode",{parentName:"p"},"WORKDIR /project"),"."),(0,i.kt)("p",{parentName:"li"},"We supply additional parameters ",(0,i.kt)("inlineCode",{parentName:"p"},"--num-workers")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"--kafka-broker"),"\nfor the run command. Finally, we specify the location of our\nFastKafka application as a command argument."),(0,i.kt)("p",{parentName:"li"},"To learn more about ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command please check the ",(0,i.kt)("a",{parentName:"p",href:"../../cli/fastkafka/#fastkafka-run"},"CLI\ndocs"),"."))),(0,i.kt)("h3",{id:"build-the-docker-image"},"Build the Docker Image"),(0,i.kt)("p",null,"Now that all the files are in place, let\u2019s build the container image."),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Go to the project directory (where your ",(0,i.kt)("inlineCode",{parentName:"p"},"Dockerfile")," is, containing\nyour ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file).")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Run the following command to build the image:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"docker build -t fastkafka_project_image .\n")),(0,i.kt)("p",{parentName:"li"},"This command will create a docker image with the name\n",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," and the ",(0,i.kt)("inlineCode",{parentName:"p"},"latest")," tag."))),(0,i.kt)("p",null,"That\u2019s it! You have now built a docker image for your FastKafka project."),(0,i.kt)("h3",{id:"start-the-docker-container"},"Start the Docker Container"),(0,i.kt)("p",null,"Run a container based on the built image:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"docker run -d --name fastkafka_project_container fastkafka_project_image\n")),(0,i.kt)("h2",{id:"additional-security"},"Additional Security"),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"Trivy")," is an open-source tool that scans Docker images for\nvulnerabilities. It can be integrated into your CI/CD pipeline to ensure\nthat your images are secure and free from known vulnerabilities. Here\u2019s\nhow you can use ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," to scan your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image"),":"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Install ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," on your local machine by following the instructions\nprovided in the ",(0,i.kt)("a",{parentName:"p",href:"https://aquasecurity.github.io/trivy/latest/getting-started/installation/"},"official ",(0,i.kt)("inlineCode",{parentName:"a"},"trivy"),"\ndocumentation"),".")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Run the following command to scan your fastkafka_project_image:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"trivy image fastkafka_project_image\n")),(0,i.kt)("p",{parentName:"li"},"This command will scan your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," for any\nvulnerabilities and provide you with a report of its findings.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Fix any vulnerabilities identified by ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy"),". You can do this by\nupdating the vulnerable package to a more secure version or by using\na different package altogether.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Rebuild your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," and repeat steps 2 and 3\nuntil ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," reports no vulnerabilities."))),(0,i.kt)("p",null,"By using ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," to scan your Docker images, you can ensure that your\ncontainers are secure and free from known vulnerabilities."),(0,i.kt)("h2",{id:"example-repo"},"Example repo"),(0,i.kt)("p",null,"A\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nbased library which uses above mentioned Dockerfile to build a docker\nimage can be found\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_project/"},"here")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/8804eadc.73cda2d1.js b/assets/js/8804eadc.73cda2d1.js new file mode 100644 index 0000000..262d8dc --- /dev/null +++ b/assets/js/8804eadc.73cda2d1.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9498],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>k});var n=t(7294);function o(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function s(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function r(e){for(var a=1;a<arguments.length;a++){var t=null!=arguments[a]?arguments[a]:{};a%2?s(Object(t),!0).forEach((function(a){o(e,a,t[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(t,a))}))}return e}function l(e,a){if(null==e)return{};var t,n,o=function(e,a){if(null==e)return{};var t,n,o={},s=Object.keys(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||(o[t]=e[t]);return o}(e,a);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var i=n.createContext({}),p=function(e){var a=n.useContext(i),t=a;return e&&(t="function"==typeof e?e(a):r(r({},a),e)),t},c=function(e){var a=p(e.components);return n.createElement(i.Provider,{value:a},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},d=n.forwardRef((function(e,a){var t=e.components,o=e.mdxType,s=e.originalType,i=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=p(t),d=o,k=u["".concat(i,".").concat(d)]||u[d]||m[d]||s;return t?n.createElement(k,r(r({ref:a},c),{},{components:t})):n.createElement(k,r({ref:a},c))}));function k(e,a){var t=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var s=t.length,r=new Array(s);r[0]=d;var l={};for(var i in a)hasOwnProperty.call(a,i)&&(l[i]=a[i]);l.originalType=e,l[u]="string"==typeof e?e:o,r[1]=l;for(var p=2;p<s;p++)r[p]=t[p];return n.createElement.apply(null,r)}return n.createElement.apply(null,t)}d.displayName="MDXCreateElement"},5131:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>i,contentTitle:()=>r,default:()=>m,frontMatter:()=>s,metadata:()=>l,toc:()=>p});var n=t(7462),o=(t(7294),t(3905));const s={},r="@consumes basics",l={unversionedId:"guides/Guide_11_Consumes_Basics",id:"version-0.7.1/guides/Guide_11_Consumes_Basics",title:"@consumes basics",description:"You can use @consumes decorator to consume messages from Kafka topics.",source:"@site/versioned_docs/version-0.7.1/guides/Guide_11_Consumes_Basics.md",sourceDirName:"guides",slug:"/guides/Guide_11_Consumes_Basics",permalink:"/docs/0.7.1/guides/Guide_11_Consumes_Basics",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"FastKafka",permalink:"/docs/0.7.1/"},next:{title:"Batch consuming",permalink:"/docs/0.7.1/guides/Guide_12_Batch_Consuming"}},i={},p=[{value:"Import <code>FastKafka</code>",id:"import-fastkafka",level:2},{value:"Define the structure of the messages",id:"define-the-structure-of-the-messages",level:2},{value:"Create a base FastKafka app",id:"create-a-base-fastkafka-app",level:2},{value:"Create a consumer function and decorate it with <code>@consumes</code>",id:"create-a-consumer-function-and-decorate-it-with-consumes",level:2},{value:"Final app",id:"final-app",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Send the message to kafka topic",id:"send-the-message-to-kafka-topic",level:2},{value:"Choosing a topic",id:"choosing-a-topic",level:2},{value:"Message data",id:"message-data",level:2},{value:"Message metadata",id:"message-metadata",level:2},{value:"Create a consumer function with metadata",id:"create-a-consumer-function-with-metadata",level:3},{value:"Dealing with high latency consuming functions",id:"dealing-with-high-latency-consuming-functions",level:2}],c={toc:p},u="wrapper";function m(e){let{components:a,...t}=e;return(0,o.kt)(u,(0,n.Z)({},c,t,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"consumes-basics"},"@consumes basics"),(0,o.kt)("p",null,"You can use ",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator to consume messages from Kafka topics."),(0,o.kt)("p",null,"In this guide we will create a simple FastKafka app that will consume\n",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages from hello_world topic."),(0,o.kt)("h2",{id:"import-fastkafka"},"Import ",(0,o.kt)("a",{parentName:"h2",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka"))),(0,o.kt)("p",null,"To use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator, first we need to import the base\nFastKafka app to create our application."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka import FastKafka\n")),(0,o.kt)("p",null,"In this demo we will log the messages to the output so that we can\ninspect and verify that our app is consuming properly. For that we need\nto import the logger."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n")),(0,o.kt)("h2",{id:"define-the-structure-of-the-messages"},"Define the structure of the messages"),(0,o.kt)("p",null,"Next, you need to define the structure of the messages you want to\nconsume from the topic using ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"pydantic"),". For\nthe guide we\u2019ll stick to something basic, but you are free to define any\ncomplex message structure you wish in your project, just make sure it\ncan be JSON encoded."),(0,o.kt)("p",null,"Let\u2019s import ",(0,o.kt)("inlineCode",{parentName:"p"},"BaseModel")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"Field")," from pydantic and create a simple\n",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class containing one string parameter ",(0,o.kt)("inlineCode",{parentName:"p"},"msg")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from pydantic import BaseModel, Field\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n')),(0,o.kt)("h2",{id:"create-a-base-fastkafka-app"},"Create a base FastKafka app"),(0,o.kt)("p",null,"Now we will create and define a base FastKafka app, replace the\n",(0,o.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values of your\nKafka bootstrap server"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n')),(0,o.kt)("h2",{id:"create-a-consumer-function-and-decorate-it-with-consumes"},"Create a consumer function and decorate it with ",(0,o.kt)("inlineCode",{parentName:"h2"},"@consumes")),(0,o.kt)("p",null,"Let\u2019s create a consumer function that will consume ",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages\nfrom ",(0,o.kt)("em",{parentName:"p"},"hello_world")," topic and log them."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,o.kt)("p",null,"The function decorated with the ",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will be called\nwhen a message is produced to Kafka."),(0,o.kt)("p",null,"The message will then be injected into the typed ",(0,o.kt)("em",{parentName:"p"},"msg")," argument of the\nfunction and its type will be used to parse the message."),(0,o.kt)("p",null,"In this example case, when the message is sent into a ",(0,o.kt)("em",{parentName:"p"},"hello_world"),"\ntopic, it will be parsed into a HelloWorld class and ",(0,o.kt)("inlineCode",{parentName:"p"},"on_hello_world"),"\nfunction will be called with the parsed class as ",(0,o.kt)("em",{parentName:"p"},"msg")," argument value."),(0,o.kt)("h2",{id:"final-app"},"Final app"),(0,o.kt)("p",null,"Your app code should look like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,o.kt)("h2",{id:"run-the-app"},"Run the app"),(0,o.kt)("p",null,"Now we can run the app. Copy the code above in consumer_example.py and\nrun it by running"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n")),(0,o.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[513863]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[513863]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n[513863]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[513863]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 513863...\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 513863 terminated.\n")),(0,o.kt)("h2",{id:"send-the-message-to-kafka-topic"},"Send the message to kafka topic"),(0,o.kt)("p",null,"Lets send a ",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message to the ",(0,o.kt)("em",{parentName:"p"},"hello_world")," topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'echo {\\"msg\\": \\"Hello world\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'print(consumer_task.value[1].decode("UTF-8"))\n')),(0,o.kt)("p",null,"You should see the \u201cGot msg: msg='Hello world'\" being logged by your\nconsumer."),(0,o.kt)("h2",{id:"choosing-a-topic"},"Choosing a topic"),(0,o.kt)("p",null,"You probably noticed that you didn\u2019t define which topic you are\nreceiving the message from, this is because the ",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator\ndetermines the topic by default from your function name. The decorator\nwill take your function name and strip the default \u201con","_",'" prefix from it\nand use the rest as the topic name. In this example case, the topic is\n',(0,o.kt)("em",{parentName:"p"},"hello_world"),"."),(0,o.kt)("p",null,"You can choose your custom prefix by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"prefix")," parameter in\nconsumes decorator, like this:"),(0,o.kt)("p",null,"Also, you can define the topic name completely by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"topic"),"\nin parameter in consumes decorator, like this:"),(0,o.kt)("h2",{id:"message-data"},"Message data"),(0,o.kt)("p",null,"The message received from kafka is translated from binary JSON\nrepresentation int the class defined by typing of ",(0,o.kt)("em",{parentName:"p"},"msg")," parameter in the\nfunction decorated by the ",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator."),(0,o.kt)("p",null,"In this example case, the message will be parsed into a ",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld"),"\nclass."),(0,o.kt)("h2",{id:"message-metadata"},"Message metadata"),(0,o.kt)("p",null,"If you need any of Kafka message metadata such as timestamp, partition\nor headers you can access the metadata by adding a EventMetadata typed\nargument to your consumes function and the metadata from the incoming\nmessage will be automatically injected when calling the consumes\nfunction."),(0,o.kt)("p",null,"Let\u2019s demonstrate that."),(0,o.kt)("h3",{id:"create-a-consumer-function-with-metadata"},"Create a consumer function with metadata"),(0,o.kt)("p",null,"The only difference from the original basic consume function is that we\nare now passing the ",(0,o.kt)("inlineCode",{parentName:"p"},"meta: EventMetadata")," argument to the function. The\n",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will register that and, when a message is\nconsumed, it will also pass the metadata to your function. Now you can\nuse the metadata in your consume function. Lets log it to see what it\ncontains."),(0,o.kt)("p",null,"First, we need to import the EventMetadata"),(0,o.kt)("p",null,"Now we can add the ",(0,o.kt)("inlineCode",{parentName:"p"},"meta")," argument to our consuming function."),(0,o.kt)("p",null,"Your final app should look like this:"),(0,o.kt)("p",null,"Now lets run the app and send a message to the broker to see the logged\nmessage metadata."),(0,o.kt)("p",null,"You should see a similar log as the one below and the metadata being\nlogged in your app."),(0,o.kt)("p",null,"As you can see in the log, from the metadata you now have the\ninformation about the partition, offset, timestamp, key and headers.\n\ud83c\udf89"),(0,o.kt)("h2",{id:"dealing-with-high-latency-consuming-functions"},"Dealing with high latency consuming functions"),(0,o.kt)("p",null,"If your functions have high latency due to, for example, lengthy\ndatabase calls you will notice a big decrease in performance. This is\ndue to the issue of how the consumes decorator executes your consume\nfunctions when consumeing events. By default, the consume function will\nrun the consuming funtions for one topic sequentially, this is the most\nstraightforward approach and results with the least amount of overhead."),(0,o.kt)("p",null,"But, to handle those high latency tasks and run them in parallel,\nFastKafka has a\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/executors/DynamicTaskExecutor/#fastkafka.executors.DynamicTaskExecutor"},(0,o.kt)("inlineCode",{parentName:"a"},"DynamicTaskExecutor")),"\nprepared for your consumers. This executor comes with additional\noverhead, so use it only when you need to handle high latency functions."),(0,o.kt)("p",null,"Lets demonstrate how to use it."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'decorate_consumes_executor = """@app.consumes(executor="DynamicTaskExecutor")\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n"""\nmd(f"```python\\n{decorate_consumes}\\n```")\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,o.kt)("p",null,"Lets send a ",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message to the ",(0,o.kt)("em",{parentName:"p"},"hello_world")," topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'echo {\\"msg\\": \\"Hello world\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n')),(0,o.kt)("p",null,"You should see the \u201cGot msg: msg='Hello world'\" being logged by your\nconsumer."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/8894.674c4c01.js b/assets/js/8894.674c4c01.js new file mode 100644 index 0000000..15e5e1b --- /dev/null +++ b/assets/js/8894.674c4c01.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8894],{8894:(a,k,s)=>{s.r(k)}}]); \ No newline at end of file diff --git a/assets/js/898ba646.9c58f3c1.js b/assets/js/898ba646.9c58f3c1.js new file mode 100644 index 0000000..6930d84 --- /dev/null +++ b/assets/js/898ba646.9c58f3c1.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8279],{3905:(e,t,a)=>{a.d(t,{Zo:()=>p,kt:()=>m});var r=a(7294);function n(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,r)}return a}function l(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){n(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function c(e,t){if(null==e)return{};var a,r,n=function(e,t){if(null==e)return{};var a,r,n={},o=Object.keys(e);for(r=0;r<o.length;r++)a=o[r],t.indexOf(a)>=0||(n[a]=e[a]);return n}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)a=o[r],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(n[a]=e[a])}return n}var i=r.createContext({}),d=function(e){var t=r.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},p=function(e){var t=d(e.components);return r.createElement(i.Provider,{value:t},e.children)},s="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var a=e.components,n=e.mdxType,o=e.originalType,i=e.parentName,p=c(e,["components","mdxType","originalType","parentName"]),s=d(a),k=n,m=s["".concat(i,".").concat(k)]||s[k]||u[k]||o;return a?r.createElement(m,l(l({ref:t},p),{},{components:a})):r.createElement(m,l({ref:t},p))}));function m(e,t){var a=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var o=a.length,l=new Array(o);l[0]=k;var c={};for(var i in t)hasOwnProperty.call(t,i)&&(c[i]=t[i]);c.originalType=e,c[s]="string"==typeof e?e:n,l[1]=c;for(var d=2;d<o;d++)l[d]=a[d];return r.createElement.apply(null,l)}return r.createElement.apply(null,a)}k.displayName="MDXCreateElement"},9502:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>u,frontMatter:()=>o,metadata:()=>c,toc:()=>d});var r=a(7462),n=(a(7294),a(3905));const o={},l=void 0,c={unversionedId:"api/fastkafka/encoder/avro_decoder",id:"api/fastkafka/encoder/avro_decoder",title:"avro_decoder",description:"avrodecoder {fastkafka.encoder.avrodecoder}",source:"@site/docs/api/fastkafka/encoder/avro_decoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avro_decoder",permalink:"/docs/next/api/fastkafka/encoder/avro_decoder",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"AvroBase",permalink:"/docs/next/api/fastkafka/encoder/AvroBase"},next:{title:"avro_encoder",permalink:"/docs/next/api/fastkafka/encoder/avro_encoder"}},i={},d=[{value:"avro_decoder",id:"fastkafka.encoder.avro_decoder",level:3}],p={toc:d},s="wrapper";function u(e){let{components:t,...a}=e;return(0,n.kt)(s,(0,r.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("h3",{id:"fastkafka.encoder.avro_decoder"},"avro_decoder"),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/encoder/avro.py#L263-L279",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-py"},"avro_decoder(\n raw_msg, cls\n)\n")),(0,n.kt)("p",null,"Decoder to decode avro encoded messages to pydantic model instance"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Name"),(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"),(0,n.kt)("th",{parentName:"tr",align:null},"Default"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"raw_msg")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"bytes")),(0,n.kt)("td",{parentName:"tr",align:null},"Avro encoded bytes message received from Kafka topic"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("em",{parentName:"td"},"required"))),(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"cls")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"Type[pydantic.main.BaseModel]")),(0,n.kt)("td",{parentName:"tr",align:null},"Pydantic class; This pydantic class will be used to construct instance of same class"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("em",{parentName:"td"},"required"))))),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"Any")),(0,n.kt)("td",{parentName:"tr",align:null},"An instance of given pydantic class")))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/8ad68633.50914cab.js b/assets/js/8ad68633.50914cab.js new file mode 100644 index 0000000..1542179 --- /dev/null +++ b/assets/js/8ad68633.50914cab.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[2399],{3905:(e,t,n)=>{n.d(t,{Zo:()=>h,kt:()=>m});var i=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function a(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?r(Object(n),!0).forEach((function(t){o(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function s(e,t){if(null==e)return{};var n,i,o=function(e,t){if(null==e)return{};var n,i,o={},r=Object.keys(e);for(i=0;i<r.length;i++)n=r[i],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(i=0;i<r.length;i++)n=r[i],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=i.createContext({}),l=function(e){var t=i.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},h=function(e){var t=l(e.components);return i.createElement(c.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},p=i.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,c=e.parentName,h=s(e,["components","mdxType","originalType","parentName"]),d=l(n),p=o,m=d["".concat(c,".").concat(p)]||d[p]||u[p]||r;return n?i.createElement(m,a(a({ref:t},h),{},{components:n})):i.createElement(m,a({ref:t},h))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,a=new Array(r);a[0]=p;var s={};for(var c in t)hasOwnProperty.call(t,c)&&(s[c]=t[c]);s.originalType=e,s[d]="string"==typeof e?e:o,a[1]=s;for(var l=2;l<r;l++)a[l]=n[l];return i.createElement.apply(null,a)}return i.createElement.apply(null,n)}p.displayName="MDXCreateElement"},1606:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>a,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>l});var i=n(7462),o=(n(7294),n(3905));const r={},a=void 0,s={unversionedId:"LICENSE",id:"LICENSE",title:"LICENSE",description:"Apache License",source:"@site/docs/LICENSE.md",sourceDirName:".",slug:"/LICENSE",permalink:"/docs/next/LICENSE",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"run_fastkafka_server_process",permalink:"/docs/next/cli/run_fastkafka_server_process"},next:{title:"Contributing to FastKafka",permalink:"/docs/next/CONTRIBUTING"}},c={},l=[],h={toc:l},d="wrapper";function u(e){let{components:t,...n}=e;return(0,o.kt)(d,(0,i.Z)({},h,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Apache License\nVersion 2.0, January 2004\n",(0,o.kt)("a",{parentName:"p",href:"http://www.apache.org/licenses/"},"http://www.apache.org/licenses/")),(0,o.kt)("p",null," TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Definitions."),(0,o.kt)("p",{parentName:"li"},'"License" shall mean the terms and conditions for use, reproduction,\nand distribution as defined by Sections 1 through 9 of this document.'),(0,o.kt)("p",{parentName:"li"},'"Licensor" shall mean the copyright owner or entity authorized by\nthe copyright owner that is granting the License.'),(0,o.kt)("p",{parentName:"li"},'"Legal Entity" shall mean the union of the acting entity and all\nother entities that control, are controlled by, or are under common\ncontrol with that entity. For the purposes of this definition,\n"control" means (i) the power, direct or indirect, to cause the\ndirection or management of such entity, whether by contract or\notherwise, or (ii) ownership of fifty percent (50%) or more of the\noutstanding shares, or (iii) beneficial ownership of such entity.'),(0,o.kt)("p",{parentName:"li"},'"You" (or "Your") shall mean an individual or Legal Entity\nexercising permissions granted by this License.'),(0,o.kt)("p",{parentName:"li"},'"Source" form shall mean the preferred form for making modifications,\nincluding but not limited to software source code, documentation\nsource, and configuration files.'),(0,o.kt)("p",{parentName:"li"},'"Object" form shall mean any form resulting from mechanical\ntransformation or translation of a Source form, including but\nnot limited to compiled object code, generated documentation,\nand conversions to other media types.'),(0,o.kt)("p",{parentName:"li"},'"Work" shall mean the work of authorship, whether in Source or\nObject form, made available under the License, as indicated by a\ncopyright notice that is included in or attached to the work\n(an example is provided in the Appendix below).'),(0,o.kt)("p",{parentName:"li"},'"Derivative Works" shall mean any work, whether in Source or Object\nform, that is based on (or derived from) the Work and for which the\neditorial revisions, annotations, elaborations, or other modifications\nrepresent, as a whole, an original work of authorship. For the purposes\nof this License, Derivative Works shall not include works that remain\nseparable from, or merely link (or bind by name) to the interfaces of,\nthe Work and Derivative Works thereof.'),(0,o.kt)("p",{parentName:"li"},'"Contribution" shall mean any work of authorship, including\nthe original version of the Work and any modifications or additions\nto that Work or Derivative Works thereof, that is intentionally\nsubmitted to Licensor for inclusion in the Work by the copyright owner\nor by an individual or Legal Entity authorized to submit on behalf of\nthe copyright owner. For the purposes of this definition, "submitted"\nmeans any form of electronic, verbal, or written communication sent\nto the Licensor or its representatives, including but not limited to\ncommunication on electronic mailing lists, source code control systems,\nand issue tracking systems that are managed by, or on behalf of, the\nLicensor for the purpose of discussing and improving the Work, but\nexcluding communication that is conspicuously marked or otherwise\ndesignated in writing by the copyright owner as "Not a Contribution."'),(0,o.kt)("p",{parentName:"li"},'"Contributor" shall mean Licensor and any individual or Legal Entity\non behalf of whom a Contribution has been received by Licensor and\nsubsequently incorporated within the Work.')),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Grant of Copyright License. Subject to the terms and conditions of\nthis License, each Contributor hereby grants to You a perpetual,\nworldwide, non-exclusive, no-charge, royalty-free, irrevocable\ncopyright license to reproduce, prepare Derivative Works of,\npublicly display, publicly perform, sublicense, and distribute the\nWork and such Derivative Works in Source or Object form.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Grant of Patent License. Subject to the terms and conditions of\nthis License, each Contributor hereby grants to You a perpetual,\nworldwide, non-exclusive, no-charge, royalty-free, irrevocable\n(except as stated in this section) patent license to make, have made,\nuse, offer to sell, sell, import, and otherwise transfer the Work,\nwhere such license applies only to those patent claims licensable\nby such Contributor that are necessarily infringed by their\nContribution(s) alone or by combination of their Contribution(s)\nwith the Work to which such Contribution(s) was submitted. If You\ninstitute patent litigation against any entity (including a\ncross-claim or counterclaim in a lawsuit) alleging that the Work\nor a Contribution incorporated within the Work constitutes direct\nor contributory patent infringement, then any patent licenses\ngranted to You under this License for that Work shall terminate\nas of the date such litigation is filed.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Redistribution. You may reproduce and distribute copies of the\nWork or Derivative Works thereof in any medium, with or without\nmodifications, and in Source or Object form, provided that You\nmeet the following conditions:"),(0,o.kt)("p",{parentName:"li"},"(a) You must give any other recipients of the Work or\nDerivative Works a copy of this License; and"),(0,o.kt)("p",{parentName:"li"},"(b) You must cause any modified files to carry prominent notices\nstating that You changed the files; and"),(0,o.kt)("p",{parentName:"li"},"(c) You must retain, in the Source form of any Derivative Works\nthat You distribute, all copyright, patent, trademark, and\nattribution notices from the Source form of the Work,\nexcluding those notices that do not pertain to any part of\nthe Derivative Works; and"),(0,o.kt)("p",{parentName:"li"},'(d) If the Work includes a "NOTICE" text file as part of its\ndistribution, then any Derivative Works that You distribute must\ninclude a readable copy of the attribution notices contained\nwithin such NOTICE file, excluding those notices that do not\npertain to any part of the Derivative Works, in at least one\nof the following places: within a NOTICE text file distributed\nas part of the Derivative Works; within the Source form or\ndocumentation, if provided along with the Derivative Works; or,\nwithin a display generated by the Derivative Works, if and\nwherever such third-party notices normally appear. The contents\nof the NOTICE file are for informational purposes only and\ndo not modify the License. You may add Your own attribution\nnotices within Derivative Works that You distribute, alongside\nor as an addendum to the NOTICE text from the Work, provided\nthat such additional attribution notices cannot be construed\nas modifying the License.'),(0,o.kt)("p",{parentName:"li"},"You may add Your own copyright statement to Your modifications and\nmay provide additional or different license terms and conditions\nfor use, reproduction, or distribution of Your modifications, or\nfor any such Derivative Works as a whole, provided Your use,\nreproduction, and distribution of the Work otherwise complies with\nthe conditions stated in this License.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Submission of Contributions. Unless You explicitly state otherwise,\nany Contribution intentionally submitted for inclusion in the Work\nby You to the Licensor shall be under the terms and conditions of\nthis License, without any additional terms or conditions.\nNotwithstanding the above, nothing herein shall supersede or modify\nthe terms of any separate license agreement you may have executed\nwith Licensor regarding such Contributions.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Trademarks. This License does not grant permission to use the trade\nnames, trademarks, service marks, or product names of the Licensor,\nexcept as required for reasonable and customary use in describing the\norigin of the Work and reproducing the content of the NOTICE file.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},'Disclaimer of Warranty. Unless required by applicable law or\nagreed to in writing, Licensor provides the Work (and each\nContributor provides its Contributions) on an "AS IS" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\nimplied, including, without limitation, any warranties or conditions\nof TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\nPARTICULAR PURPOSE. You are solely responsible for determining the\nappropriateness of using or redistributing the Work and assume any\nrisks associated with Your exercise of permissions under this License.')),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Limitation of Liability. In no event and under no legal theory,\nwhether in tort (including negligence), contract, or otherwise,\nunless required by applicable law (such as deliberate and grossly\nnegligent acts) or agreed to in writing, shall any Contributor be\nliable to You for damages, including any direct, indirect, special,\nincidental, or consequential damages of any character arising as a\nresult of this License or out of the use or inability to use the\nWork (including but not limited to damages for loss of goodwill,\nwork stoppage, computer failure or malfunction, or any and all\nother commercial damages or losses), even if such Contributor\nhas been advised of the possibility of such damages.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Accepting Warranty or Additional Liability. While redistributing\nthe Work or Derivative Works thereof, You may choose to offer,\nand charge a fee for, acceptance of support, warranty, indemnity,\nor other liability obligations and/or rights consistent with this\nLicense. However, in accepting such obligations, You may act only\non Your own behalf and on Your sole responsibility, not on behalf\nof any other Contributor, and only if You agree to indemnify,\ndefend, and hold each Contributor harmless for any liability\nincurred by, or claims asserted against, such Contributor by reason\nof your accepting any such warranty or additional liability."),(0,o.kt)("p",{parentName:"li"},"END OF TERMS AND CONDITIONS"),(0,o.kt)("p",{parentName:"li"},"APPENDIX: How to apply the Apache License to your work."),(0,o.kt)("p",{parentName:"li"},' To apply the Apache License to your work, attach the following\nboilerplate notice, with the fields enclosed by brackets "[]"\nreplaced with your own identifying information. (Don\'t include\nthe brackets!) The text should be enclosed in the appropriate\ncomment syntax for the file format. We also recommend that a\nfile or class name and description of purpose be included on the\nsame "printed page" as the copyright notice for easier\nidentification within third-party archives.'),(0,o.kt)("p",{parentName:"li"},"Copyright ","[yyyy][name of copyright owner]"),(0,o.kt)("p",{parentName:"li"},'Licensed under the Apache License, Version 2.0 (the "License");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at'),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre"},"http://www.apache.org/licenses/LICENSE-2.0\n")),(0,o.kt)("p",{parentName:"li"},'Unless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an "AS IS" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.'))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/8c27608b.c4322928.js b/assets/js/8c27608b.c4322928.js new file mode 100644 index 0000000..c12ad5e --- /dev/null +++ b/assets/js/8c27608b.c4322928.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8851],{3905:(a,e,n)=>{n.d(e,{Zo:()=>k,kt:()=>m});var t=n(7294);function i(a,e,n){return e in a?Object.defineProperty(a,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):a[e]=n,a}function o(a,e){var n=Object.keys(a);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(a);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(a,e).enumerable}))),n.push.apply(n,t)}return n}function r(a){for(var e=1;e<arguments.length;e++){var n=null!=arguments[e]?arguments[e]:{};e%2?o(Object(n),!0).forEach((function(e){i(a,e,n[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(a,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(e){Object.defineProperty(a,e,Object.getOwnPropertyDescriptor(n,e))}))}return a}function s(a,e){if(null==a)return{};var n,t,i=function(a,e){if(null==a)return{};var n,t,i={},o=Object.keys(a);for(t=0;t<o.length;t++)n=o[t],e.indexOf(n)>=0||(i[n]=a[n]);return i}(a,e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(a);for(t=0;t<o.length;t++)n=o[t],e.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(a,n)&&(i[n]=a[n])}return i}var p=t.createContext({}),l=function(a){var e=t.useContext(p),n=e;return a&&(n="function"==typeof a?a(e):r(r({},e),a)),n},k=function(a){var e=l(a.components);return t.createElement(p.Provider,{value:e},a.children)},c="mdxType",d={inlineCode:"code",wrapper:function(a){var e=a.children;return t.createElement(t.Fragment,{},e)}},f=t.forwardRef((function(a,e){var n=a.components,i=a.mdxType,o=a.originalType,p=a.parentName,k=s(a,["components","mdxType","originalType","parentName"]),c=l(n),f=i,m=c["".concat(p,".").concat(f)]||c[f]||d[f]||o;return n?t.createElement(m,r(r({ref:e},k),{},{components:n})):t.createElement(m,r({ref:e},k))}));function m(a,e){var n=arguments,i=e&&e.mdxType;if("string"==typeof a||i){var o=n.length,r=new Array(o);r[0]=f;var s={};for(var p in e)hasOwnProperty.call(e,p)&&(s[p]=e[p]);s.originalType=a,s[c]="string"==typeof a?a:i,r[1]=s;for(var l=2;l<o;l++)r[l]=n[l];return t.createElement.apply(null,r)}return t.createElement.apply(null,n)}f.displayName="MDXCreateElement"},1457:(a,e,n)=>{n.r(e),n.d(e,{assets:()=>p,contentTitle:()=>r,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var t=n(7462),i=(n(7294),n(3905));const o={},r="Benchmarking FastKafka app",s={unversionedId:"guides/Guide_06_Benchmarking_FastKafka",id:"version-0.8.0/guides/Guide_06_Benchmarking_FastKafka",title:"Benchmarking FastKafka app",description:"Prerequisites",source:"@site/versioned_docs/version-0.8.0/guides/Guide_06_Benchmarking_FastKafka.md",sourceDirName:"guides",slug:"/guides/Guide_06_Benchmarking_FastKafka",permalink:"/docs/guides/Guide_06_Benchmarking_FastKafka",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using FastAPI to Run FastKafka Application",permalink:"/docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application"},next:{title:"EventMetadata",permalink:"/docs/api/fastkafka/EventMetadata"}},p={},l=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Creating FastKafka Code",id:"creating-fastkafka-code",level:3},{value:"Starting Kafka",id:"starting-kafka",level:3},{value:"Installing Java and Kafka",id:"installing-java-and-kafka",level:4},{value:"Creating configuration for Zookeeper and Kafka",id:"creating-configuration-for-zookeeper-and-kafka",level:4},{value:"Starting Zookeeper and Kafka",id:"starting-zookeeper-and-kafka",level:4},{value:"Creating topics in Kafka",id:"creating-topics-in-kafka",level:4},{value:"Populating topics with dummy data",id:"populating-topics-with-dummy-data",level:4},{value:"Benchmarking FastKafka",id:"benchmarking-fastkafka",level:3}],k={toc:l},c="wrapper";function d(a){let{components:e,...n}=a;return(0,i.kt)(c,(0,t.Z)({},k,n,{components:e,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"benchmarking-fastkafka-app"},"Benchmarking FastKafka app"),(0,i.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("p",null,"To benchmark a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nproject, you will need the following:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A library built with\n",(0,i.kt)("a",{parentName:"li",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"."),(0,i.kt)("li",{parentName:"ol"},"A running ",(0,i.kt)("inlineCode",{parentName:"li"},"Kafka")," instance to benchmark the FastKafka application\nagainst.")),(0,i.kt)("h3",{id:"creating-fastkafka-code"},"Creating FastKafka Code"),(0,i.kt)("p",null,"Let\u2019s create a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\napplication and write it to the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file based on the\n",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nhas a decorator for benchmarking which is appropriately called as\n",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark"),". Let\u2019s edit our ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file and add the\n",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark")," decorator to the consumes method."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file with benchmark\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\n@kafka_app.benchmark(interval=1, sliding_window_size=5)\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"Here we are conducting a benchmark of a function that consumes data from\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic with an interval of 1 second and a sliding window\nsize of 5."),(0,i.kt)("p",null,"This ",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark")," method uses the ",(0,i.kt)("inlineCode",{parentName:"p"},"interval")," parameter to calculate the\nresults over a specific time period, and the ",(0,i.kt)("inlineCode",{parentName:"p"},"sliding_window_size"),"\nparameter to determine the maximum number of results to use in\ncalculating the average throughput and standard deviation."),(0,i.kt)("p",null,"This benchmark is important to ensure that the function is performing\noptimally and to identify any areas for improvement."),(0,i.kt)("h3",{id:"starting-kafka"},"Starting Kafka"),(0,i.kt)("p",null,"If you already have a ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," running somewhere, then you can skip this\nstep."),(0,i.kt)("p",null,"Please keep in mind that your benchmarking results may be affected by\nbottlenecks such as network, CPU cores in the Kafka machine, or even the\nKafka configuration itself."),(0,i.kt)("h4",{id:"installing-java-and-kafka"},"Installing Java and Kafka"),(0,i.kt)("p",null,"We need a working ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),"instance to benchmark our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp, and to run ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," we need ",(0,i.kt)("inlineCode",{parentName:"p"},"Java"),". Thankfully,\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\ncomes with a CLI to install both ",(0,i.kt)("inlineCode",{parentName:"p"},"Java")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," on our machine."),(0,i.kt)("p",null,"So, let\u2019s install ",(0,i.kt)("inlineCode",{parentName:"p"},"Java")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," by executing the following command."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"fastkafka testing install_deps\n")),(0,i.kt)("p",null,"The above command will extract ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),' scripts at the location\n\u201c\\$HOME/.local/kafka_2.13-3.3.2" on your machine.'),(0,i.kt)("h4",{id:"creating-configuration-for-zookeeper-and-kafka"},"Creating configuration for Zookeeper and Kafka"),(0,i.kt)("p",null,"Now we need to start ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," separately, and to start\nthem we need ",(0,i.kt)("inlineCode",{parentName:"p"},"zookeeper.properties")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka.properties")," files."),(0,i.kt)("p",null,"Let\u2019s create a folder inside the folder where ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," scripts were\nextracted and change directory into it."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"mkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir\n")),(0,i.kt)("p",null,"Let\u2019s create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"zookeeper.properties")," and write the\nfollowing content to the file:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"dataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper\nclientPort=2181\nmaxClientCnxns=0\n")),(0,i.kt)("p",null,"Similarly, let\u2019s create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka.properties")," and write the\nfollowing content to the file:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"broker.id=0\nlisteners=PLAINTEXT://:9092\n\nnum.network.threads=3\nnum.io.threads=8\nsocket.send.buffer.bytes=102400\nsocket.receive.buffer.bytes=102400\nsocket.request.max.bytes=104857600\n\nnum.partitions=1\nnum.recovery.threads.per.data.dir=1\noffsets.topic.replication.factor=1\ntransaction.state.log.replication.factor=1\ntransaction.state.log.min.isr=1\n\nlog.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs\nlog.flush.interval.messages=10000\nlog.flush.interval.ms=1000\nlog.retention.hours=168\nlog.retention.bytes=1073741824\nlog.segment.bytes=1073741824\nlog.retention.check.interval.ms=300000\n\nzookeeper.connect=localhost:2181\nzookeeper.connection.timeout.ms=18000\n")),(0,i.kt)("h4",{id:"starting-zookeeper-and-kafka"},"Starting Zookeeper and Kafka"),(0,i.kt)("p",null,"We need two different terminals to run ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," in one and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," in\nanother. Let\u2019s open a new terminal and run the following commands to\nstart ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./zookeeper-server-start.sh ../data_dir/zookeeper.properties\n")),(0,i.kt)("p",null,"Once ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," is up and running, open a new terminal and execute the\nfollwing commands to start ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-server-start.sh ../data_dir/kafka.properties\n")),(0,i.kt)("p",null,"Now we have both ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," up and running."),(0,i.kt)("h4",{id:"creating-topics-in-kafka"},"Creating topics in Kafka"),(0,i.kt)("p",null,"In a new terminal, please execute the following command to create\nnecessary topics in ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092\n./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092\n")),(0,i.kt)("h4",{id:"populating-topics-with-dummy-data"},"Populating topics with dummy data"),(0,i.kt)("p",null,"To benchmark our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp, we need some data in ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," topics."),(0,i.kt)("p",null,"In the same terminal, let\u2019s create some dummy data:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},'yes \'{"sepal_length": 0.7739560486, "sepal_width": 0.8636615789, "petal_length": 0.6122663046, "petal_width": 0.1338914722}\' | head -n 1000000 > /tmp/test_data\n')),(0,i.kt)("p",null,"This command will create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"test_data")," in the ",(0,i.kt)("inlineCode",{parentName:"p"},"tmp")," folder\nwith one million rows of text. This will act as dummy data to populate\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic."),(0,i.kt)("p",null,"Let\u2019s populate the created topic ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," with the dummy data which\nwe created above:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data\n")),(0,i.kt)("p",null,"Now our topic ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," has one million records/messages in it. If\nyou want more messages in topic, you can simply execute the above\ncommand again and again."),(0,i.kt)("h3",{id:"benchmarking-fastkafka"},"Benchmarking FastKafka"),(0,i.kt)("p",null,"Once ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," are ready, benchmarking\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp is as simple as running the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"fastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app\n")),(0,i.kt)("p",null,"This command will start the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp and begin consuming messages from ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),", which we spun up earlier.\nAdditionally, the same command will output all of the benchmark\nthroughputs based on the ",(0,i.kt)("inlineCode",{parentName:"p"},"interval")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"sliding_window_size")," values."),(0,i.kt)("p",null,"The output for the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh\nost:9092', 'max_poll_records': 100}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition\n=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)\n")),(0,i.kt)("p",null,"Based on the output, when using 1 worker, our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp achieved a ",(0,i.kt)("inlineCode",{parentName:"p"},"throughput")," of 93k messages per second and an\n",(0,i.kt)("inlineCode",{parentName:"p"},"average throughput")," of 93k messages per second."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/8d193b98.faf1a7fe.js b/assets/js/8d193b98.faf1a7fe.js new file mode 100644 index 0000000..d80fa3e --- /dev/null +++ b/assets/js/8d193b98.faf1a7fe.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7505],{3905:(e,t,a)=>{a.d(t,{Zo:()=>p,kt:()=>m});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function i(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function r(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?i(Object(a),!0).forEach((function(t){o(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):i(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,o=function(e,t){if(null==e)return{};var a,n,o={},i=Object.keys(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var s=n.createContext({}),u=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):r(r({},t),e)),a},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},h="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},c=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),h=u(a),c=o,m=h["".concat(s,".").concat(c)]||h[c]||d[c]||i;return a?n.createElement(m,r(r({ref:t},p),{},{components:a})):n.createElement(m,r({ref:t},p))}));function m(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=a.length,r=new Array(i);r[0]=c;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[h]="string"==typeof e?e:o,r[1]=l;for(var u=2;u<i;u++)r[u]=a[u];return n.createElement.apply(null,r)}return n.createElement.apply(null,a)}c.displayName="MDXCreateElement"},676:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>r,default:()=>d,frontMatter:()=>i,metadata:()=>l,toc:()=>u});var n=a(7462),o=(a(7294),a(3905));const i={},r="Contributing to FastKafka",l={unversionedId:"CONTRIBUTING",id:"CONTRIBUTING",title:"Contributing to FastKafka",description:"First off, thanks for taking the time to contribute! \u2764\ufe0f",source:"@site/docs/CONTRIBUTING.md",sourceDirName:".",slug:"/CONTRIBUTING",permalink:"/docs/next/CONTRIBUTING",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"LICENSE",permalink:"/docs/next/LICENSE"},next:{title:"Release notes",permalink:"/docs/next/CHANGELOG"}},s={},u=[{value:"Table of Contents",id:"table-of-contents",level:2},{value:"I Have a Question",id:"i-have-a-question",level:2},{value:"I Want To Contribute",id:"i-want-to-contribute",level:2},{value:"Reporting Bugs",id:"reporting-bugs",level:3},{value:"Before Submitting a Bug Report",id:"before-submitting-a-bug-report",level:4},{value:"How Do I Submit a Good Bug Report?",id:"how-do-i-submit-a-good-bug-report",level:4},{value:"Suggesting Enhancements",id:"suggesting-enhancements",level:3},{value:"Before Submitting an Enhancement",id:"before-submitting-an-enhancement",level:4},{value:"How Do I Submit a Good Enhancement Suggestion?",id:"how-do-i-submit-a-good-enhancement-suggestion",level:4},{value:"Your First Code Contribution",id:"your-first-code-contribution",level:3},{value:"Development",id:"development",level:2},{value:"Prepare the dev environment",id:"prepare-the-dev-environment",level:3},{value:"Clone the FastKafka repository",id:"clone-the-fastkafka-repository",level:4},{value:"Optional: create a virtual python environment",id:"optional-create-a-virtual-python-environment",level:4},{value:"Install FastKafka",id:"install-fastkafka",level:4},{value:"Install JRE and Kafka toolkit",id:"install-jre-and-kafka-toolkit",level:4},{value:"Install npm",id:"install-npm",level:4},{value:"Install docusaurus",id:"install-docusaurus",level:4},{value:"Check if everything works",id:"check-if-everything-works",level:4},{value:"Way of working",id:"way-of-working",level:3},{value:"Before a PR",id:"before-a-pr",level:3},{value:"Attribution",id:"attribution",level:2}],p={toc:u},h="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(h,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"contributing-to-fastkafka"},"Contributing to FastKafka"),(0,o.kt)("p",null,"First off, thanks for taking the time to contribute! \u2764\ufe0f"),(0,o.kt)("p",null,"All types of contributions are encouraged and valued. See the ",(0,o.kt)("a",{parentName:"p",href:"#table-of-contents"},"Table of Contents")," for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. \ud83c\udf89"),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},"And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:"),(0,o.kt)("ul",{parentName:"blockquote"},(0,o.kt)("li",{parentName:"ul"},"Star the project"),(0,o.kt)("li",{parentName:"ul"},"Tweet about it"),(0,o.kt)("li",{parentName:"ul"},"Refer this project in your project's readme"),(0,o.kt)("li",{parentName:"ul"},"Mention the project at local meetups and tell your friends/colleagues"))),(0,o.kt)("h2",{id:"table-of-contents"},"Table of Contents"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#i-have-a-question"},"I Have a Question")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#i-want-to-contribute"},"I Want To Contribute"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#reporting-bugs"},"Reporting Bugs")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#suggesting-enhancements"},"Suggesting Enhancements")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#your-first-code-contribution"},"Your First Code Contribution")))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#development"},"Development"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#prepare-the-dev-environment"},"Prepare the dev environment")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#way-of-working"},"Way of working")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#before-a-pr"},"Before a PR"))))),(0,o.kt)("h2",{id:"i-have-a-question"},"I Have a Question"),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},"If you want to ask a question, we assume that you have read the available ",(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/docs"},"Documentation"),".")),(0,o.kt)("p",null,"Before you ask a question, it is best to search for existing ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues"},"Issues")," that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue."),(0,o.kt)("p",null,"If you then still feel the need to ask a question and need clarification, we recommend the following:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Contact us on ",(0,o.kt)("a",{parentName:"li",href:"https://discord.com/invite/CJWmYpyFbc"},"Discord")),(0,o.kt)("li",{parentName:"ul"},"Open an ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/new"},"Issue"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Provide as much context as you can about what you're running into")))),(0,o.kt)("p",null,"We will then take care of the issue as soon as possible."),(0,o.kt)("h2",{id:"i-want-to-contribute"},"I Want To Contribute"),(0,o.kt)("blockquote",null,(0,o.kt)("h3",{parentName:"blockquote",id:"legal-notice"},"Legal Notice"),(0,o.kt)("p",{parentName:"blockquote"},"When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.")),(0,o.kt)("h3",{id:"reporting-bugs"},"Reporting Bugs"),(0,o.kt)("h4",{id:"before-submitting-a-bug-report"},"Before Submitting a Bug Report"),(0,o.kt)("p",null,"A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Make sure that you are using the latest version."),(0,o.kt)("li",{parentName:"ul"},"Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the ",(0,o.kt)("a",{parentName:"li",href:"https://fastkafka.airt.ai/docs"},"documentation"),". If you are looking for support, you might want to check ",(0,o.kt)("a",{parentName:"li",href:"#i-have-a-question"},"this section"),")."),(0,o.kt)("li",{parentName:"ul"},"To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues?q=label%3Abug"},"bug tracker"),"."),(0,o.kt)("li",{parentName:"ul"},"Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue."),(0,o.kt)("li",{parentName:"ul"},"Collect information about the bug:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Stack trace (Traceback)"),(0,o.kt)("li",{parentName:"ul"},"OS, Platform and Version (Windows, Linux, macOS, x86, ARM)"),(0,o.kt)("li",{parentName:"ul"},"Python version"),(0,o.kt)("li",{parentName:"ul"},"Possibly your input and the output"),(0,o.kt)("li",{parentName:"ul"},"Can you reliably reproduce the issue? And can you also reproduce it with older versions?")))),(0,o.kt)("h4",{id:"how-do-i-submit-a-good-bug-report"},"How Do I Submit a Good Bug Report?"),(0,o.kt)("p",null,"We use GitHub issues to track bugs and errors. If you run into an issue with the project:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Open an ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/new"},"Issue"),". (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)"),(0,o.kt)("li",{parentName:"ul"},"Explain the behavior you would expect and the actual behavior."),(0,o.kt)("li",{parentName:"ul"},"Please provide as much context as possible and describe the ",(0,o.kt)("em",{parentName:"li"},"reproduction steps")," that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case."),(0,o.kt)("li",{parentName:"ul"},"Provide the information you collected in the previous section.")),(0,o.kt)("p",null,"Once it's filed:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"The project team will label the issue accordingly."),(0,o.kt)("li",{parentName:"ul"},"A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-repro"),". Bugs with the ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-repro")," tag will not be addressed until they are reproduced."),(0,o.kt)("li",{parentName:"ul"},"If the team is able to reproduce the issue, it will be marked ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-fix"),", as well as possibly other tags (such as ",(0,o.kt)("inlineCode",{parentName:"li"},"critical"),"), and the issue will be left to be implemented.")),(0,o.kt)("h3",{id:"suggesting-enhancements"},"Suggesting Enhancements"),(0,o.kt)("p",null,"This section guides you through submitting an enhancement suggestion for FastKafka, ",(0,o.kt)("strong",{parentName:"p"},"including completely new features and minor improvements to existing functionality"),". Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions."),(0,o.kt)("h4",{id:"before-submitting-an-enhancement"},"Before Submitting an Enhancement"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Make sure that you are using the latest version."),(0,o.kt)("li",{parentName:"ul"},"Read the ",(0,o.kt)("a",{parentName:"li",href:"https://fastkafka.airt.ai/docs"},"documentation")," carefully and find out if the functionality is already covered, maybe by an individual configuration."),(0,o.kt)("li",{parentName:"ul"},"Perform a ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues"},"search")," to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one."),(0,o.kt)("li",{parentName:"ul"},"Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library."),(0,o.kt)("li",{parentName:"ul"},"If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on ",(0,o.kt)("a",{parentName:"li",href:"https://discord.com/invite/CJWmYpyFbc"},"Discord"))),(0,o.kt)("h4",{id:"how-do-i-submit-a-good-enhancement-suggestion"},"How Do I Submit a Good Enhancement Suggestion?"),(0,o.kt)("p",null,"Enhancement suggestions are tracked as ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues"},"GitHub issues"),"."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Use a ",(0,o.kt)("strong",{parentName:"li"},"clear and descriptive title")," for the issue to identify the suggestion."),(0,o.kt)("li",{parentName:"ul"},"Provide a ",(0,o.kt)("strong",{parentName:"li"},"step-by-step description of the suggested enhancement")," in as many details as possible."),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("strong",{parentName:"li"},"Describe the current behavior")," and ",(0,o.kt)("strong",{parentName:"li"},"explain which behavior you expected to see instead")," and why. At this point you can also tell which alternatives do not work for you."),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("strong",{parentName:"li"},"Explain why this enhancement would be useful")," to most FastKafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.")),(0,o.kt)("h3",{id:"your-first-code-contribution"},"Your First Code Contribution"),(0,o.kt)("p",null,'A great way to start contributing to FastKafka would be by solving an issue tagged with "good first issue". To find a list of issues that are tagged as "good first issue" and are suitable for newcomers, please visit the following link: ',(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/labels/good%20first%20issue"},"Good first issues")),(0,o.kt)("p",null,"These issues are beginner-friendly and provide a great opportunity to get started with contributing to FastKafka. Choose an issue that interests you, follow the contribution process mentioned in ",(0,o.kt)("a",{parentName:"p",href:"#way-of-working"},"Way of working")," and ",(0,o.kt)("a",{parentName:"p",href:"#before-a-pr"},"Before a PR"),", and help us make FastKafka even better!"),(0,o.kt)("p",null,"If you have any questions or need further assistance, feel free to reach out to us. Happy coding!"),(0,o.kt)("h2",{id:"development"},"Development"),(0,o.kt)("h3",{id:"prepare-the-dev-environment"},"Prepare the dev environment"),(0,o.kt)("p",null,"To start contributing to FastKafka, you first have to prepare the development environment."),(0,o.kt)("h4",{id:"clone-the-fastkafka-repository"},"Clone the FastKafka repository"),(0,o.kt)("p",null,"To clone the repository, run the following command in the CLI:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"git clone https://github.com/airtai/fastkafka.git\n")),(0,o.kt)("h4",{id:"optional-create-a-virtual-python-environment"},"Optional: create a virtual python environment"),(0,o.kt)("p",null,"To prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your FastKafka project by running:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"python3 -m venv fastkafka-env\n")),(0,o.kt)("p",null,"And to activate your virtual environment run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"source fastkafka-env/bin/activate\n")),(0,o.kt)("p",null,"To learn more about virtual environments, please have a look at ",(0,o.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/venv.html#:~:text=A%20virtual%20environment%20is%20created,the%20virtual%20environment%20are%20available."},"official python documentation")),(0,o.kt)("h4",{id:"install-fastkafka"},"Install FastKafka"),(0,o.kt)("p",null,"To install FastKafka, navigate to the root directory of the cloned FastKafka project and run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'pip install fastkafka -e [."dev"]\n')),(0,o.kt)("h4",{id:"install-jre-and-kafka-toolkit"},"Install JRE and Kafka toolkit"),(0,o.kt)("p",null,"To be able to run tests and use all the functionalities of FastKafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Use our ",(0,o.kt)("inlineCode",{parentName:"li"},"fastkafka testing install-deps")," CLI command which will install JRE and Kafka toolkit for you in your .local folder\nOR"),(0,o.kt)("li",{parentName:"ol"},"Install JRE and Kafka manually.\nTo do this, please refer to ",(0,o.kt)("a",{parentName:"li",href:"https://docs.oracle.com/javase/9/install/toc.htm"},"JDK and JRE installation guide")," and ",(0,o.kt)("a",{parentName:"li",href:"https://kafka.apache.org/quickstart"},"Apache Kafka quickstart"))),(0,o.kt)("h4",{id:"install-npm"},"Install npm"),(0,o.kt)("p",null,"To be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Use our ",(0,o.kt)("inlineCode",{parentName:"li"},"fastkafka docs install_deps")," CLI command which will install npm for you in your .local folder\nOR"),(0,o.kt)("li",{parentName:"ol"},"Install npm manually.\nTo do this, please refer to ",(0,o.kt)("a",{parentName:"li",href:"https://docs.npmjs.com/downloading-and-installing-node-js-and-npm"},"NPM installation guide"))),(0,o.kt)("h4",{id:"install-docusaurus"},"Install docusaurus"),(0,o.kt)("p",null,"To generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of FastKafka project."),(0,o.kt)("h4",{id:"check-if-everything-works"},"Check if everything works"),(0,o.kt)("p",null,"After installing FastKafka and all the necessary dependencies, run ",(0,o.kt)("inlineCode",{parentName:"p"},"nbdev_test"),' in the root of FastKafka project. This will take a couple of minutes as it will run all the tests on FastKafka project. If everythng is setup correctly, you will get a "Success." message in your terminal, otherwise please refer to previous steps.'),(0,o.kt)("h3",{id:"way-of-working"},"Way of working"),(0,o.kt)("p",null,"The development of FastKafka is done in Jupyter notebooks. Inside the ",(0,o.kt)("inlineCode",{parentName:"p"},"nbs")," directory you will find all the source code of FastKafka, this is where you will implement your changes."),(0,o.kt)("p",null,"The testing, cleanup and exporting of the code is being handled by ",(0,o.kt)("inlineCode",{parentName:"p"},"nbdev"),", please, before starting the work on FastKafka, get familiar with it by reading ",(0,o.kt)("a",{parentName:"p",href:"https://nbdev.fast.ai/getting_started.html"},"nbdev documentation"),"."),(0,o.kt)("p",null,"The general philosopy you should follow when writing code for FastKafka is:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Function should be an atomic functionality, short and concise",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Good rule of thumb: your function should be 5-10 lines long usually"))),(0,o.kt)("li",{parentName:"ul"},"If there are more than 2 params, enforce keywording using *",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"E.g.: ",(0,o.kt)("inlineCode",{parentName:"li"},"def function(param1, *, param2, param3): ...")))),(0,o.kt)("li",{parentName:"ul"},"Define typing of arguments and return value",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected"))),(0,o.kt)("li",{parentName:"ul"},"After the function cell, write test cells using the assert keyword",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Whenever you implement something you should test that functionality immediately in the cells below "))),(0,o.kt)("li",{parentName:"ul"},"Add Google style python docstrings when function is implemented and tested")),(0,o.kt)("h3",{id:"before-a-pr"},"Before a PR"),(0,o.kt)("p",null,"After you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as smooth for you and us, please do the following before opening the request (all the commands are to be run in the root of FastKafka project):"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Format your notebooks: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbqa black nbs")),(0,o.kt)("li",{parentName:"ol"},"Close, shutdown, and clean the metadata from your notebooks: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_clean")),(0,o.kt)("li",{parentName:"ol"},"Export your code: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_export")),(0,o.kt)("li",{parentName:"ol"},"Run the tests: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_test")),(0,o.kt)("li",{parentName:"ol"},"Test code typing: ",(0,o.kt)("inlineCode",{parentName:"li"},"mypy fastkafka")),(0,o.kt)("li",{parentName:"ol"},"Test code safety with bandit: ",(0,o.kt)("inlineCode",{parentName:"li"},"bandit -r fastkafka")),(0,o.kt)("li",{parentName:"ol"},"Test code safety with semgrep: ",(0,o.kt)("inlineCode",{parentName:"li"},"semgrep --config auto -r fastkafka"))),(0,o.kt)("p",null,"When you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge."),(0,o.kt)("h2",{id:"attribution"},"Attribution"),(0,o.kt)("p",null,"This guide is based on the ",(0,o.kt)("strong",{parentName:"p"},"contributing-gen"),". ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/bttger/contributing-gen"},"Make your own"),"!"))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/8ff5d7ba.c28181ba.js b/assets/js/8ff5d7ba.c28181ba.js new file mode 100644 index 0000000..26dd9cd --- /dev/null +++ b/assets/js/8ff5d7ba.c28181ba.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7795],{3905:(t,a,e)=>{e.d(a,{Zo:()=>u,kt:()=>c});var i=e(7294);function r(t,a,e){return a in t?Object.defineProperty(t,a,{value:e,enumerable:!0,configurable:!0,writable:!0}):t[a]=e,t}function n(t,a){var e=Object.keys(t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(t);a&&(i=i.filter((function(a){return Object.getOwnPropertyDescriptor(t,a).enumerable}))),e.push.apply(e,i)}return e}function s(t){for(var a=1;a<arguments.length;a++){var e=null!=arguments[a]?arguments[a]:{};a%2?n(Object(e),!0).forEach((function(a){r(t,a,e[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(e)):n(Object(e)).forEach((function(a){Object.defineProperty(t,a,Object.getOwnPropertyDescriptor(e,a))}))}return t}function p(t,a){if(null==t)return{};var e,i,r=function(t,a){if(null==t)return{};var e,i,r={},n=Object.keys(t);for(i=0;i<n.length;i++)e=n[i],a.indexOf(e)>=0||(r[e]=t[e]);return r}(t,a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);for(i=0;i<n.length;i++)e=n[i],a.indexOf(e)>=0||Object.prototype.propertyIsEnumerable.call(t,e)&&(r[e]=t[e])}return r}var l=i.createContext({}),k=function(t){var a=i.useContext(l),e=a;return t&&(e="function"==typeof t?t(a):s(s({},a),t)),e},u=function(t){var a=k(t.components);return i.createElement(l.Provider,{value:a},t.children)},m="mdxType",o={inlineCode:"code",wrapper:function(t){var a=t.children;return i.createElement(i.Fragment,{},a)}},h=i.forwardRef((function(t,a){var e=t.components,r=t.mdxType,n=t.originalType,l=t.parentName,u=p(t,["components","mdxType","originalType","parentName"]),m=k(e),h=r,c=m["".concat(l,".").concat(h)]||m[h]||o[h]||n;return e?i.createElement(c,s(s({ref:a},u),{},{components:e})):i.createElement(c,s({ref:a},u))}));function c(t,a){var e=arguments,r=a&&a.mdxType;if("string"==typeof t||r){var n=e.length,s=new Array(n);s[0]=h;var p={};for(var l in a)hasOwnProperty.call(a,l)&&(p[l]=a[l]);p.originalType=t,p[m]="string"==typeof t?t:r,s[1]=p;for(var k=2;k<n;k++)s[k]=e[k];return i.createElement.apply(null,s)}return i.createElement.apply(null,e)}h.displayName="MDXCreateElement"},1082:(t,a,e)=>{e.r(a),e.d(a,{assets:()=>l,contentTitle:()=>s,default:()=>o,frontMatter:()=>n,metadata:()=>p,toc:()=>k});var i=e(7462),r=(e(7294),e(3905));const n={},s="Release notes",p={unversionedId:"CHANGELOG",id:"version-0.8.0/CHANGELOG",title:"Release notes",description:"0.7.0",source:"@site/versioned_docs/version-0.8.0/CHANGELOG.md",sourceDirName:".",slug:"/CHANGELOG",permalink:"/docs/CHANGELOG",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Contributing to FastKafka",permalink:"/docs/CONTRIBUTING"}},l={},k=[{value:"0.7.0",id:"070",level:2},{value:"New Features",id:"new-features",level:3},{value:"Bugs Squashed",id:"bugs-squashed",level:3},{value:"0.6.0",id:"060",level:2},{value:"New Features",id:"new-features-1",level:3},{value:"Bugs Squashed",id:"bugs-squashed-1",level:3},{value:"0.5.0",id:"050",level:2},{value:"New Features",id:"new-features-2",level:3},{value:"Bugs Squashed",id:"bugs-squashed-2",level:3},{value:"0.4.0",id:"040",level:2},{value:"New Features",id:"new-features-3",level:3},{value:"0.3.1",id:"031",level:2},{value:"0.3.0",id:"030",level:2},{value:"New Features",id:"new-features-4",level:3},{value:"Bugs Squashed",id:"bugs-squashed-3",level:3},{value:"0.2.3",id:"023",level:2},{value:"0.2.2",id:"022",level:2},{value:"New Features",id:"new-features-5",level:3},{value:"Bugs Squashed",id:"bugs-squashed-4",level:3},{value:"0.2.0",id:"020",level:2},{value:"New Features",id:"new-features-6",level:3},{value:"Bugs Squashed",id:"bugs-squashed-5",level:3},{value:"0.1.3",id:"013",level:2},{value:"0.1.2",id:"012",level:2},{value:"New Features",id:"new-features-7",level:3},{value:"Bugs Squashed",id:"bugs-squashed-6",level:3},{value:"0.1.1",id:"011",level:2},{value:"Bugs Squashed",id:"bugs-squashed-7",level:3},{value:"0.1.0",id:"010",level:2}],u={toc:k},m="wrapper";function o(t){let{components:a,...e}=t;return(0,r.kt)(m,(0,i.Z)({},u,e,{components:a,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"release-notes"},"Release notes"),(0,r.kt)("h2",{id:"070"},"0.7.0"),(0,r.kt)("h3",{id:"new-features"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Optional description argument to consumes and produces decorator implemented (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/338"},"#338"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Consumes and produces decorators now have optional ",(0,r.kt)("inlineCode",{parentName:"li"},"description")," argument that is used instead of function docstring in async doc generation when specified"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"FastKafka Windows OS support enabled (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/326"},"#326"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"FastKafka can now run on Windows"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"FastKafka and FastAPI integration implemented (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/304"},"#304"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"FastKafka can now be run alongside FastAPI"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Batch consuming option to consumers implemented (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/298"},"#298"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Consumers can consume events in batches by specifying msg type of consuming function as ",(0,r.kt)("inlineCode",{parentName:"li"},"List[YourMsgType]")," "))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Removed support for synchronous produce functions (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/295"},"#295"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Added default broker values and update docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/292"},"#292"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("h3",{id:"bugs-squashed"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix index.ipynb to be runnable in colab (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/342"},"#342"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Use cli option root_path docs generate and serve CLI commands (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/341"},"#341"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix incorrect asyncapi docs path on fastkafka docs serve command (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/335"},"#335"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Serve docs now takes app ",(0,r.kt)("inlineCode",{parentName:"li"},"root_path")," argument into consideration when specified in app"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix typo (supress_timestamps->suppress_timestamps) and remove fix for enabling timestamps (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/315"},"#315"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix logs printing timestamps (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/308"},"#308"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix topics with dots causing failure of tester instantiation (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/306"},"#306"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},'Specified topics can now have "." in their names')))),(0,r.kt)("h2",{id:"060"},"0.6.0"),(0,r.kt)("h3",{id:"new-features-1"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Timestamps added to CLI commands (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/283"},"#283"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/davorrunje"},"@davorrunje"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Added option to process messages concurrently (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/278"},"#278"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"A new ",(0,r.kt)("inlineCode",{parentName:"li"},"executor")," option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add consumes and produces functions to app (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/274"},"#274"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add batching for producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/273"},"#273"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirement(batch): batch support is a real need! and i see it on the issue list.... so hope we do not need to wait too long"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("a",{parentName:"p",href:"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"},"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix broken links in guides (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/272"},"#272"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Generate the docusaurus sidebar dynamically by parsing summary.md (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/270"},"#270"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Metadata passed to consumer (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/269"},"#269"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirement(key): read the key value somehow..Maybe I missed something in the docs\nrequirement(header): read header values, Reason: I use CDC | Debezium and in the current system the header values are important to differentiate between the CRUD operations."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("a",{parentName:"p",href:"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"},"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Contribution with instructions how to build and test added (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/255"},"#255"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Export encoders, decoders from fastkafka.encoder (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/246"},"#246"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/239"},"#239"),")")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"UI Improvement: Post screenshots with links to the actual messages in testimonials section (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/228"},"#228"),")")),(0,r.kt)("h3",{id:"bugs-squashed-1"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Batch testing fix (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/280"},"#280"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Tester breaks when using Batching or KafkaEvent producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/279"},"#279"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Consumer loop callbacks are not executing in parallel (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/276"},"#276"),")"))),(0,r.kt)("h2",{id:"050"},"0.5.0"),(0,r.kt)("h3",{id:"new-features-2"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Significant speedup of Kafka producer (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/236"},"#236"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Added support for AVRO encoding/decoding (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/231"},"#231"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("h3",{id:"bugs-squashed-2"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fixed sidebar to include guides in docusaurus documentation (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/238"},"#238"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fixed link to symbols in docusaurus docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/227"},"#227"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Removed bootstrap servers from constructor (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/220"},"#220"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl")))),(0,r.kt)("h2",{id:"040"},"0.4.0"),(0,r.kt)("h3",{id:"new-features-3"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Integrate FastKafka chat (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/208"},"#208"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add benchmarking (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/206"},"#206"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Enable fast testing without running kafka locally (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/198"},"#198"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Generate docs using Docusaurus (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/194"},"#194"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add test cases for LocalRedpandaBroker (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/189"},"#189"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Reimplement patch and delegates from fastcore (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/188"},"#188"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Rename existing functions into start and stop and add lifespan handler (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/117"},"#117"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios"},"https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios"))))),(0,r.kt)("h2",{id:"031"},"0.3.1"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"README.md file updated")),(0,r.kt)("h2",{id:"030"},"0.3.0"),(0,r.kt)("h3",{id:"new-features-4"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Guide for FastKafka produces using partition key (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/172"},"#172"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Closes #161"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add support for Redpanda for testing and deployment (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/181"},"#181"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Remove bootstrap_servers from ",(0,r.kt)("strong",{parentName:"p"},"init")," and use the name of broker as an option when running/testing (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/134"},"#134"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add a GH action file to check for broken links in the docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/163"},"#163"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Optimize requirements for testing and docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/151"},"#151"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Break requirements into base and optional for testing and dev (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/124"},"#124"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Minimize base requirements needed just for running the service."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add link to example git repo into guide for building docs using actions (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/81"},"#81"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add logging for run_in_background (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/46"},"#46"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Implement partition Key mechanism for producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/16"},"#16"),")"))),(0,r.kt)("h3",{id:"bugs-squashed-3"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Implement checks for npm installation and version (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/176"},"#176"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Closes #158 by checking if the npx is installed and more verbose error handling"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix the helper.py link in CHANGELOG.md (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/165"},"#165"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"fastkafka docs install_deps fails (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/157"},"#157"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Unexpected internal error: ","[Errno 2]"," No such file or directory: 'npx'"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Broken links in docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/141"},"#141"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"fastkafka run is not showing up in CLI docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/132"},"#132"),")"))),(0,r.kt)("h2",{id:"023"},"0.2.3"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Fixed broken links on PyPi index page")),(0,r.kt)("h2",{id:"022"},"0.2.2"),(0,r.kt)("h3",{id:"new-features-5"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Extract JDK and Kafka installation out of LocalKafkaBroker (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/131"},"#131"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"PyYAML version relaxed (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/119"},"#119"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/davorrunje"},"@davorrunje"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Replace docker based kafka with local (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/68"},"#68"),")"),(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","replace docker compose with a simple docker run (standard run_jupyter.sh should do)"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","replace all tests to use LocalKafkaBroker"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","update documentation")))),(0,r.kt)("h3",{id:"bugs-squashed-4"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix broken link for FastKafka docs in index notebook (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/145"},"#145"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix encoding issues when loading setup.py on windows OS (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/135"},"#135"),")"))),(0,r.kt)("h2",{id:"020"},"0.2.0"),(0,r.kt)("h3",{id:"new-features-6"},"New Features"),(0,r.kt)("ul",{className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul"},"Replace kafka container with LocalKafkaBroker (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/112"},"#112"),")",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Replace kafka container with LocalKafkaBroker in tests"))))),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Remove kafka container from tests environment"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Fix failing tests")),(0,r.kt)("h3",{id:"bugs-squashed-5"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Fix random failing in CI (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/109"},"#109"),")")),(0,r.kt)("h2",{id:"013"},"0.1.3"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"version update in ",(0,r.kt)("strong",{parentName:"li"},"init"),".py")),(0,r.kt)("h2",{id:"012"},"0.1.2"),(0,r.kt)("h3",{id:"new-features-7"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Git workflow action for publishing Kafka docs (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/78"},"#78"),")")),(0,r.kt)("h3",{id:"bugs-squashed-6"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Include missing requirement (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/110"},"#110"),")",(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Typer is imported in this ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/helpers.py"},"file")," but it is not included in ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/settings.ini"},"settings.ini")),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add aiohttp which is imported in this ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_helpers.py"},"file")),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add nbformat which is imported in _components/helpers.py"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add nbconvert which is imported in _components/helpers.py")))),(0,r.kt)("h2",{id:"011"},"0.1.1"),(0,r.kt)("h3",{id:"bugs-squashed-7"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"JDK install fails on Python 3.8 (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/106"},"#106"),")")),(0,r.kt)("h2",{id:"010"},"0.1.0"),(0,r.kt)("p",null,"Initial release"))}o.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/935f2afb.c2b93cd4.js b/assets/js/935f2afb.c2b93cd4.js new file mode 100644 index 0000000..10f1a72 --- /dev/null +++ b/assets/js/935f2afb.c2b93cd4.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[53],{1109:e=>{e.exports=JSON.parse('{"pluginId":"default","version":"current","label":"dev \ud83d\udea7","banner":"unreleased","badge":true,"noIndex":false,"className":"docs-version-current","isLast":false,"docsSidebars":{"tutorialSidebar":[{"type":"link","label":"FastKafka","href":"/docs/next/","docId":"index"},{"type":"category","label":"Guides","items":[{"type":"category","label":"Writing services","items":[{"type":"link","label":"@consumes basics","href":"/docs/next/guides/Guide_11_Consumes_Basics","docId":"guides/Guide_11_Consumes_Basics"},{"type":"link","label":"Batch consuming","href":"/docs/next/guides/Guide_12_Batch_Consuming","docId":"guides/Guide_12_Batch_Consuming"},{"type":"link","label":"@produces basics","href":"/docs/next/guides/Guide_21_Produces_Basics","docId":"guides/Guide_21_Produces_Basics"},{"type":"link","label":"Defining a partition key","href":"/docs/next/guides/Guide_22_Partition_Keys","docId":"guides/Guide_22_Partition_Keys"},{"type":"link","label":"Batch producing","href":"/docs/next/guides/Guide_23_Batch_Producing","docId":"guides/Guide_23_Batch_Producing"},{"type":"link","label":"Lifespan Events","href":"/docs/next/guides/Guide_05_Lifespan_Handler","docId":"guides/Guide_05_Lifespan_Handler"},{"type":"link","label":"Encoding and Decoding Kafka Messages with FastKafka","href":"/docs/next/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","docId":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"},{"type":"link","label":"Using multiple Kafka clusters","href":"/docs/next/guides/Guide_24_Using_Multiple_Kafka_Clusters","docId":"guides/Guide_24_Using_Multiple_Kafka_Clusters"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Testing","items":[{"type":"link","label":"Using Tester to test FastKafka","href":"/docs/next/guides/Guide_33_Using_Tester_class_to_test_fastkafka","docId":"guides/Guide_33_Using_Tester_class_to_test_fastkafka"},{"type":"link","label":"Using Redpanda to test FastKafka","href":"/docs/next/guides/Guide_31_Using_redpanda_to_test_fastkafka","docId":"guides/Guide_31_Using_redpanda_to_test_fastkafka"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Documentation generation","items":[{"type":"link","label":"Deploy FastKafka docs to GitHub Pages","href":"/docs/next/guides/Guide_04_Github_Actions_Workflow","docId":"guides/Guide_04_Github_Actions_Workflow"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Deployment","items":[{"type":"link","label":"Deploying FastKafka using Docker","href":"/docs/next/guides/Guide_30_Using_docker_to_deploy_fastkafka","docId":"guides/Guide_30_Using_docker_to_deploy_fastkafka"},{"type":"link","label":"Using FastAPI to Run FastKafka Application","href":"/docs/next/guides/Guide_32_Using_fastapi_to_run_fastkafka_application","docId":"guides/Guide_32_Using_fastapi_to_run_fastkafka_application"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Benchmarking","items":[{"type":"link","label":"Benchmarking FastKafka app","href":"/docs/next/guides/Guide_06_Benchmarking_FastKafka","docId":"guides/Guide_06_Benchmarking_FastKafka"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"API","items":[{"type":"link","label":"EventMetadata","href":"/docs/next/api/fastkafka/EventMetadata","docId":"api/fastkafka/EventMetadata"},{"type":"link","label":"FastKafka","href":"/docs/next/api/fastkafka/","docId":"api/fastkafka/FastKafka"},{"type":"link","label":"KafkaEvent","href":"/docs/next/api/fastkafka/KafkaEvent","docId":"api/fastkafka/KafkaEvent"},{"type":"category","label":"encoder","items":[{"type":"link","label":"AvroBase","href":"/docs/next/api/fastkafka/encoder/AvroBase","docId":"api/fastkafka/encoder/AvroBase"},{"type":"link","label":"avro_decoder","href":"/docs/next/api/fastkafka/encoder/avro_decoder","docId":"api/fastkafka/encoder/avro_decoder"},{"type":"link","label":"avro_encoder","href":"/docs/next/api/fastkafka/encoder/avro_encoder","docId":"api/fastkafka/encoder/avro_encoder"},{"type":"link","label":"avsc_to_pydantic","href":"/docs/next/api/fastkafka/encoder/avsc_to_pydantic","docId":"api/fastkafka/encoder/avsc_to_pydantic"},{"type":"link","label":"json_decoder","href":"/docs/next/api/fastkafka/encoder/json_decoder","docId":"api/fastkafka/encoder/json_decoder"},{"type":"link","label":"json_encoder","href":"/docs/next/api/fastkafka/encoder/json_encoder","docId":"api/fastkafka/encoder/json_encoder"}],"collapsed":true,"collapsible":true},{"type":"category","label":"executors","items":[{"type":"link","label":"DynamicTaskExecutor","href":"/docs/next/api/fastkafka/executors/DynamicTaskExecutor","docId":"api/fastkafka/executors/DynamicTaskExecutor"},{"type":"link","label":"SequentialExecutor","href":"/docs/next/api/fastkafka/executors/SequentialExecutor","docId":"api/fastkafka/executors/SequentialExecutor"}],"collapsed":true,"collapsible":true},{"type":"category","label":"testing","items":[{"type":"link","label":"ApacheKafkaBroker","href":"/docs/next/api/fastkafka/testing/ApacheKafkaBroker","docId":"api/fastkafka/testing/ApacheKafkaBroker"},{"type":"link","label":"LocalRedpandaBroker","href":"/docs/next/api/fastkafka/testing/LocalRedpandaBroker","docId":"api/fastkafka/testing/LocalRedpandaBroker"},{"type":"link","label":"Tester","href":"/docs/next/api/fastkafka/testing/Tester","docId":"api/fastkafka/testing/Tester"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"CLI","items":[{"type":"link","label":"fastkafka","href":"/docs/next/cli/fastkafka","docId":"cli/fastkafka"},{"type":"link","label":"run_fastkafka_server_process","href":"/docs/next/cli/run_fastkafka_server_process","docId":"cli/run_fastkafka_server_process"}],"collapsed":true,"collapsible":true},{"type":"link","label":"LICENSE","href":"/docs/next/LICENSE","docId":"LICENSE"},{"type":"link","label":"Contributing to FastKafka","href":"/docs/next/CONTRIBUTING","docId":"CONTRIBUTING"},{"type":"link","label":"Release notes","href":"/docs/next/CHANGELOG","docId":"CHANGELOG"}]},"docs":{"api/fastkafka/encoder/avro_decoder":{"id":"api/fastkafka/encoder/avro_decoder","title":"avro_decoder","description":"avrodecoder {fastkafka.encoder.avrodecoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/avro_encoder":{"id":"api/fastkafka/encoder/avro_encoder","title":"avro_encoder","description":"avroencoder {fastkafka.encoder.avroencoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/AvroBase":{"id":"api/fastkafka/encoder/AvroBase","title":"AvroBase","description":"fastkafka.encoder.AvroBase {fastkafka.encoder.AvroBase}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/avsc_to_pydantic":{"id":"api/fastkafka/encoder/avsc_to_pydantic","title":"avsc_to_pydantic","description":"avsctopydantic {fastkafka.encoder.avsctopydantic}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/json_decoder":{"id":"api/fastkafka/encoder/json_decoder","title":"json_decoder","description":"jsondecoder {fastkafka.encoder.jsondecoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/json_encoder":{"id":"api/fastkafka/encoder/json_encoder","title":"json_encoder","description":"jsonencoder {fastkafka.encoder.jsonencoder}","sidebar":"tutorialSidebar"},"api/fastkafka/EventMetadata":{"id":"api/fastkafka/EventMetadata","title":"EventMetadata","description":"fastkafka.EventMetadata {fastkafka.EventMetadata}","sidebar":"tutorialSidebar"},"api/fastkafka/executors/DynamicTaskExecutor":{"id":"api/fastkafka/executors/DynamicTaskExecutor","title":"DynamicTaskExecutor","description":"fastkafka.executors.DynamicTaskExecutor {fastkafka.executors.DynamicTaskExecutor}","sidebar":"tutorialSidebar"},"api/fastkafka/executors/SequentialExecutor":{"id":"api/fastkafka/executors/SequentialExecutor","title":"SequentialExecutor","description":"fastkafka.executors.SequentialExecutor {fastkafka.executors.SequentialExecutor}","sidebar":"tutorialSidebar"},"api/fastkafka/FastKafka":{"id":"api/fastkafka/FastKafka","title":"FastKafka","description":"fastkafka.FastKafka {fastkafka.FastKafka}","sidebar":"tutorialSidebar"},"api/fastkafka/KafkaEvent":{"id":"api/fastkafka/KafkaEvent","title":"KafkaEvent","description":"fastkafka.KafkaEvent {fastkafka.KafkaEvent}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/ApacheKafkaBroker":{"id":"api/fastkafka/testing/ApacheKafkaBroker","title":"ApacheKafkaBroker","description":"fastkafka.testing.ApacheKafkaBroker {fastkafka.testing.ApacheKafkaBroker}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/LocalRedpandaBroker":{"id":"api/fastkafka/testing/LocalRedpandaBroker","title":"LocalRedpandaBroker","description":"fastkafka.testing.LocalRedpandaBroker {fastkafka.testing.LocalRedpandaBroker}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/Tester":{"id":"api/fastkafka/testing/Tester","title":"Tester","description":"fastkafka.testing.Tester {fastkafka.testing.Tester}","sidebar":"tutorialSidebar"},"CHANGELOG":{"id":"CHANGELOG","title":"Release notes","description":"0.8.0","sidebar":"tutorialSidebar"},"cli/fastkafka":{"id":"cli/fastkafka","title":"fastkafka","description":"Usage:","sidebar":"tutorialSidebar"},"cli/run_fastkafka_server_process":{"id":"cli/run_fastkafka_server_process","title":"run_fastkafka_server_process","description":"Usage:","sidebar":"tutorialSidebar"},"CONTRIBUTING":{"id":"CONTRIBUTING","title":"Contributing to FastKafka","description":"First off, thanks for taking the time to contribute! \u2764\ufe0f","sidebar":"tutorialSidebar"},"guides/Guide_00_FastKafka_Demo":{"id":"guides/Guide_00_FastKafka_Demo","title":"FastKafka tutorial","description":"FastKafka is a powerful and easy-to-use"},"guides/Guide_01_Intro":{"id":"guides/Guide_01_Intro","title":"Intro","description":"This tutorial will show you how to use FastKafkaAPI, step by"},"guides/Guide_02_First_Steps":{"id":"guides/Guide_02_First_Steps","title":"First Steps","description":"Creating a simple Kafka consumer app"},"guides/Guide_03_Authentication":{"id":"guides/Guide_03_Authentication","title":"Authentication","description":"TLS Authentication"},"guides/Guide_04_Github_Actions_Workflow":{"id":"guides/Guide_04_Github_Actions_Workflow","title":"Deploy FastKafka docs to GitHub Pages","description":"Getting started","sidebar":"tutorialSidebar"},"guides/Guide_05_Lifespan_Handler":{"id":"guides/Guide_05_Lifespan_Handler","title":"Lifespan Events","description":"Did you know that you can define some special code that runs before and","sidebar":"tutorialSidebar"},"guides/Guide_06_Benchmarking_FastKafka":{"id":"guides/Guide_06_Benchmarking_FastKafka","title":"Benchmarking FastKafka app","description":"Prerequisites","sidebar":"tutorialSidebar"},"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka":{"id":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","title":"Encoding and Decoding Kafka Messages with FastKafka","description":"Prerequisites","sidebar":"tutorialSidebar"},"guides/Guide_11_Consumes_Basics":{"id":"guides/Guide_11_Consumes_Basics","title":"@consumes basics","description":"You can use @consumes decorator to consume messages from Kafka topics.","sidebar":"tutorialSidebar"},"guides/Guide_12_Batch_Consuming":{"id":"guides/Guide_12_Batch_Consuming","title":"Batch consuming","description":"If you want to consume data in batches @consumes decorator makes that","sidebar":"tutorialSidebar"},"guides/Guide_21_Produces_Basics":{"id":"guides/Guide_21_Produces_Basics","title":"@produces basics","description":"You can use @produces decorator to produce messages to Kafka topics.","sidebar":"tutorialSidebar"},"guides/Guide_22_Partition_Keys":{"id":"guides/Guide_22_Partition_Keys","title":"Defining a partition key","description":"Partition keys are used in Apache Kafka to determine which partition a","sidebar":"tutorialSidebar"},"guides/Guide_23_Batch_Producing":{"id":"guides/Guide_23_Batch_Producing","title":"Batch producing","description":"If you want to send your data in batches @produces decorator makes","sidebar":"tutorialSidebar"},"guides/Guide_24_Using_Multiple_Kafka_Clusters":{"id":"guides/Guide_24_Using_Multiple_Kafka_Clusters","title":"Using multiple Kafka clusters","description":"Ready to take your FastKafka app to the next level? This guide shows you","sidebar":"tutorialSidebar"},"guides/Guide_30_Using_docker_to_deploy_fastkafka":{"id":"guides/Guide_30_Using_docker_to_deploy_fastkafka","title":"Deploying FastKafka using Docker","description":"Building a Docker Image","sidebar":"tutorialSidebar"},"guides/Guide_31_Using_redpanda_to_test_fastkafka":{"id":"guides/Guide_31_Using_redpanda_to_test_fastkafka","title":"Using Redpanda to test FastKafka","description":"What is FastKafka?","sidebar":"tutorialSidebar"},"guides/Guide_32_Using_fastapi_to_run_fastkafka_application":{"id":"guides/Guide_32_Using_fastapi_to_run_fastkafka_application","title":"Using FastAPI to Run FastKafka Application","description":"When deploying a FastKafka application, the default approach is to","sidebar":"tutorialSidebar"},"guides/Guide_33_Using_Tester_class_to_test_fastkafka":{"id":"guides/Guide_33_Using_Tester_class_to_test_fastkafka","title":"Using Tester to test FastKafka","description":"In order to speed up development and make testing easier, we have","sidebar":"tutorialSidebar"},"index":{"id":"index","title":"FastKafka","description":"Effortless Kafka integration for your web services","sidebar":"tutorialSidebar"},"LICENSE":{"id":"LICENSE","title":"LICENSE","description":"Apache License","sidebar":"tutorialSidebar"}}}')}}]); \ No newline at end of file diff --git a/assets/js/9440fd12.93ff0ee3.js b/assets/js/9440fd12.93ff0ee3.js new file mode 100644 index 0000000..2462f08 --- /dev/null +++ b/assets/js/9440fd12.93ff0ee3.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1604],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>m});var n=t(7294);function o(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function s(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function r(e){for(var a=1;a<arguments.length;a++){var t=null!=arguments[a]?arguments[a]:{};a%2?s(Object(t),!0).forEach((function(a){o(e,a,t[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(t,a))}))}return e}function i(e,a){if(null==e)return{};var t,n,o=function(e,a){if(null==e)return{};var t,n,o={},s=Object.keys(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||(o[t]=e[t]);return o}(e,a);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var p=n.createContext({}),l=function(e){var a=n.useContext(p),t=a;return e&&(t="function"==typeof e?e(a):r(r({},a),e)),t},c=function(e){var a=l(e.components);return n.createElement(p.Provider,{value:a},e.children)},k="mdxType",d={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},f=n.forwardRef((function(e,a){var t=e.components,o=e.mdxType,s=e.originalType,p=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),k=l(t),f=o,m=k["".concat(p,".").concat(f)]||k[f]||d[f]||s;return t?n.createElement(m,r(r({ref:a},c),{},{components:t})):n.createElement(m,r({ref:a},c))}));function m(e,a){var t=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var s=t.length,r=new Array(s);r[0]=f;var i={};for(var p in a)hasOwnProperty.call(a,p)&&(i[p]=a[p]);i.originalType=e,i[k]="string"==typeof e?e:o,r[1]=i;for(var l=2;l<s;l++)r[l]=t[l];return n.createElement.apply(null,r)}return n.createElement.apply(null,t)}f.displayName="MDXCreateElement"},2218:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>p,contentTitle:()=>r,default:()=>d,frontMatter:()=>s,metadata:()=>i,toc:()=>l});var n=t(7462),o=(t(7294),t(3905));const s={},r="FastKafka tutorial",i={unversionedId:"guides/Guide_00_FastKafka_Demo",id:"guides/Guide_00_FastKafka_Demo",title:"FastKafka tutorial",description:"FastKafka is a powerful and easy-to-use",source:"@site/docs/guides/Guide_00_FastKafka_Demo.md",sourceDirName:"guides",slug:"/guides/Guide_00_FastKafka_Demo",permalink:"/docs/next/guides/Guide_00_FastKafka_Demo",draft:!1,tags:[],version:"current",frontMatter:{}},p={},l=[{value:"Install",id:"install",level:2},{value:"Running in Colab",id:"running-in-colab",level:2},{value:"Writing server code",id:"writing-server-code",level:2},{value:"Preparing the demo model",id:"preparing-the-demo-model",level:3},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"Testing the service",id:"testing-the-service",level:2},{value:"Recap",id:"recap",level:3},{value:"Running the service",id:"running-the-service",level:2},{value:"Documentation",id:"documentation",level:2}],c={toc:l},k="wrapper";function d(e){let{components:a,...t}=e;return(0,o.kt)(k,(0,n.Z)({},c,t,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"fastkafka-tutorial"},"FastKafka tutorial"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,o.kt)("h2",{id:"install"},"Install"),(0,o.kt)("p",null,"FastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install it with ",(0,o.kt)("inlineCode",{parentName:"p"},"pip")," as usual:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"try:\n import fastkafka\nexcept:\n ! pip install fastkafka\n")),(0,o.kt)("h2",{id:"running-in-colab"},"Running in Colab"),(0,o.kt)("p",null,"You can start this interactive tutorial in Google Colab by clicking the\nbutton below:"),(0,o.kt)("a",{href:"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb",target:"_blank"},(0,o.kt)("img",{src:"https://colab.research.google.com/assets/colab-badge.svg",alt:"Open In Colab"})),(0,o.kt)("h2",{id:"writing-server-code"},"Writing server code"),(0,o.kt)("p",null,"Here is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic."),(0,o.kt)("h3",{id:"preparing-the-demo-model"},"Preparing the demo model"),(0,o.kt)("p",null,"First we will prepare our model using the Iris dataset so that we can\ndemonstrate the preditions using FastKafka. The following call downloads\nthe dataset and trains the model."),(0,o.kt)("p",null,"We will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n')),(0,o.kt)("h3",{id:"messages"},"Messages"),(0,o.kt)("p",null,"FastKafka uses ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,o.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,o.kt)("p",null,"This example defines two message classes for use in a FastKafka\napplication:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," class is used to represent input data for a\npredictive model. It has four fields of type\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/types/#constrained-types"},(0,o.kt)("inlineCode",{parentName:"a"},"NonNegativeFloat")),",\nwhich is a subclass of float that only allows non-negative floating\npoint values.")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," class is used to represent the output of the\npredictive model. It has a single field ",(0,o.kt)("inlineCode",{parentName:"p"},"species")," of type string\nrepresenting the predicted species."))),(0,o.kt)("p",null,"These message classes will be used to parse and validate incoming data\nin Kafka consumers and producers."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,o.kt)("h3",{id:"application"},"Application"),(0,o.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,o.kt)("p",null,"It starts by defining a dictionary called ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,o.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,o.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\ngenerating the documentation only and it is not being checked by the\nactual server."),(0,o.kt)("p",null,"Next, an object of the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum set of arguments:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generation of documentation")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n')),(0,o.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,o.kt)("p",null,"FastKafka provides convenient function decorators ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,o.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,o.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,o.kt)("p",null,"This following example shows how to use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message class. Specifying the\ntype of the single argument is instructing the Pydantic to use\n",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData.parse_raw()")," on the consumed message before passing it\nto the user defined function ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),' function,\nwhich specifies that this function should produce a message to the\n\u201cpredictions" Kafka topic whenever it is called. The ',(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),"\nfunction takes a single integer argument ",(0,o.kt)("inlineCode",{parentName:"p"},"species_class")," representing\none of three possible strign values predicted by the mdoel. It creates\na new ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," message using this value and then returns it.\nThe framework will call the ",(0,o.kt)("inlineCode",{parentName:"p"},'IrisPrediction.json().encode("utf-8")'),"\nfunction on the returned value and produce it to the specified topic."))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h2",{id:"testing-the-service"},"Testing the service"),(0,o.kt)("p",null,"The service can be tested using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/testing/Tester#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstances which internally starts Kafka broker and zookeeper."),(0,o.kt)("p",null,"Before running tests, we have to install Java runtime and Apache Kafka\nlocally. To simplify the process, we provide the following convenience\ncommand:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka testing install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nmsg = IrisInputData(\n sepal_length=0.1,\n sepal_width=0.2,\n petal_length=0.3,\n petal_width=0.4,\n)\n\n# Start Tester app and create local Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n # Send IrisInputData message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with IrisPrediction in predictions topic\n await tester.awaited_mocks.on_predictions.assert_awaited_with(\n IrisPrediction(species="setosa"), timeout=2\n )\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,o.kt)("h3",{id:"recap"},"Recap"),(0,o.kt)("p",null,"We have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic and produce the predictions to ",(0,o.kt)("inlineCode",{parentName:"p"},"predictions")," topic."),(0,o.kt)("p",null,"To test the app we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Created the app")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Started our Tester class which mirrors the developed app topics for\ntesting purpuoses")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Sent IrisInputData message to ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Asserted and checked that the developed iris classification service\nhas reacted to IrisInputData message"))),(0,o.kt)("h2",{id:"running-the-service"},"Running the service"),(0,o.kt)("p",null,"The service can be started using builtin ",(0,o.kt)("inlineCode",{parentName:"p"},"faskafka run")," CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file ",(0,o.kt)("inlineCode",{parentName:"p"},'"application.py"')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("p",null,"To run the service, you will need a running Kafka broker on localhost as\nspecified in the ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")," parameter above. We can start the Kafka\nbroker locally using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/testing/ApacheKafkaBroker#fastkafka.testing.ApacheKafkaBroker"},(0,o.kt)("inlineCode",{parentName:"a"},"ApacheKafkaBroker")),".\nNotice that the same happens automatically in the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/testing/Tester#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\nas shown above."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n'127.0.0.1:9092'\n")),(0,o.kt)("p",null,"Then, we start the FastKafka service by running the following command in\nthe folder where the ",(0,o.kt)("inlineCode",{parentName:"p"},"application.py")," file is located:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n")),(0,o.kt)("p",null,"In the above command, we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--num-workers")," option to specify how many\nworkers to launch and we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--kafka-broker")," option to specify which\nkafka broker configuration to use from earlier specified ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[1200654]: [ERROR] aiokafka: Unable to update metadata from [0]\n[1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[1200656]: [ERROR] aiokafka: Unable to update metadata from [0]\n^C\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...\n")),(0,o.kt)("p",null,"You need to interupt running of the cell above by selecting\n",(0,o.kt)("inlineCode",{parentName:"p"},"Runtime->Interupt execution")," on the toolbar above."),(0,o.kt)("p",null,"Finally, we can stop the local Kafka Broker:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n")),(0,o.kt)("h2",{id:"documentation"},"Documentation"),(0,o.kt)("p",null,"The kafka app comes with builtin documentation generation using\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/tools/generator"},"AsyncApi HTML generator"),"."),(0,o.kt)("p",null,"When running in Colab, we need to update Node.js first:"),(0,o.kt)("p",null,"We need to install all dependancies for the generator using the\nfollowing command line:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n")),(0,o.kt)("p",null,"To generate the documentation programatically you just need to call the\nfolloving command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs generate application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n")),(0,o.kt)("p",null,". This will generate the ",(0,o.kt)("em",{parentName:"p"},"asyncapi")," folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"ls -l asyncapi\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"total 8\ndrwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs\ndrwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec\n")),(0,o.kt)("p",null,"In docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka docs serve"),"\nCLI command (more on that in our guides)."),(0,o.kt)("p",null,"In spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application."),(0,o.kt)("p",null,"We can locally preview the generated documentation by running the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs serve application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n\nServing documentation on http://127.0.0.1:8000\n^C\nInterupting serving of documentation and cleaning up...\n")),(0,o.kt)("p",null,"From the parameters passed to the application constructor, we get the\ndocumentation bellow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n bootstrap_servers="localhost:9092",\n)\n')),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png",alt:"Kafka_servers"})),(0,o.kt)("p",null,"The following documentation snippet are for the consumer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png",alt:"Kafka_consumer"})),(0,o.kt)("p",null,"The following documentation snippet are for the producer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png",alt:"Kafka_producer"})),(0,o.kt)("p",null,"Finally, all messages as defined as subclasses of ",(0,o.kt)("em",{parentName:"p"},"BaseModel")," are\ndocumented as well:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png",alt:"Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)"})))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/94d2eef0.b68cf51d.js b/assets/js/94d2eef0.b68cf51d.js new file mode 100644 index 0000000..be7643a --- /dev/null +++ b/assets/js/94d2eef0.b68cf51d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9723],{3905:(t,a,e)=>{e.d(a,{Zo:()=>u,kt:()=>c});var i=e(7294);function r(t,a,e){return a in t?Object.defineProperty(t,a,{value:e,enumerable:!0,configurable:!0,writable:!0}):t[a]=e,t}function n(t,a){var e=Object.keys(t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(t);a&&(i=i.filter((function(a){return Object.getOwnPropertyDescriptor(t,a).enumerable}))),e.push.apply(e,i)}return e}function s(t){for(var a=1;a<arguments.length;a++){var e=null!=arguments[a]?arguments[a]:{};a%2?n(Object(e),!0).forEach((function(a){r(t,a,e[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(e)):n(Object(e)).forEach((function(a){Object.defineProperty(t,a,Object.getOwnPropertyDescriptor(e,a))}))}return t}function p(t,a){if(null==t)return{};var e,i,r=function(t,a){if(null==t)return{};var e,i,r={},n=Object.keys(t);for(i=0;i<n.length;i++)e=n[i],a.indexOf(e)>=0||(r[e]=t[e]);return r}(t,a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);for(i=0;i<n.length;i++)e=n[i],a.indexOf(e)>=0||Object.prototype.propertyIsEnumerable.call(t,e)&&(r[e]=t[e])}return r}var l=i.createContext({}),k=function(t){var a=i.useContext(l),e=a;return t&&(e="function"==typeof t?t(a):s(s({},a),t)),e},u=function(t){var a=k(t.components);return i.createElement(l.Provider,{value:a},t.children)},m="mdxType",o={inlineCode:"code",wrapper:function(t){var a=t.children;return i.createElement(i.Fragment,{},a)}},h=i.forwardRef((function(t,a){var e=t.components,r=t.mdxType,n=t.originalType,l=t.parentName,u=p(t,["components","mdxType","originalType","parentName"]),m=k(e),h=r,c=m["".concat(l,".").concat(h)]||m[h]||o[h]||n;return e?i.createElement(c,s(s({ref:a},u),{},{components:e})):i.createElement(c,s({ref:a},u))}));function c(t,a){var e=arguments,r=a&&a.mdxType;if("string"==typeof t||r){var n=e.length,s=new Array(n);s[0]=h;var p={};for(var l in a)hasOwnProperty.call(a,l)&&(p[l]=a[l]);p.originalType=t,p[m]="string"==typeof t?t:r,s[1]=p;for(var k=2;k<n;k++)s[k]=e[k];return i.createElement.apply(null,s)}return i.createElement.apply(null,e)}h.displayName="MDXCreateElement"},4855:(t,a,e)=>{e.r(a),e.d(a,{assets:()=>l,contentTitle:()=>s,default:()=>o,frontMatter:()=>n,metadata:()=>p,toc:()=>k});var i=e(7462),r=(e(7294),e(3905));const n={},s="Release notes",p={unversionedId:"CHANGELOG",id:"version-0.7.1/CHANGELOG",title:"Release notes",description:"0.7.0",source:"@site/versioned_docs/version-0.7.1/CHANGELOG.md",sourceDirName:".",slug:"/CHANGELOG",permalink:"/docs/0.7.1/CHANGELOG",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Contributing to fastkafka",permalink:"/docs/0.7.1/CONTRIBUTING"}},l={},k=[{value:"0.7.0",id:"070",level:2},{value:"New Features",id:"new-features",level:3},{value:"Bugs Squashed",id:"bugs-squashed",level:3},{value:"0.6.0",id:"060",level:2},{value:"New Features",id:"new-features-1",level:3},{value:"Bugs Squashed",id:"bugs-squashed-1",level:3},{value:"0.5.0",id:"050",level:2},{value:"New Features",id:"new-features-2",level:3},{value:"Bugs Squashed",id:"bugs-squashed-2",level:3},{value:"0.4.0",id:"040",level:2},{value:"New Features",id:"new-features-3",level:3},{value:"0.3.1",id:"031",level:2},{value:"0.3.0",id:"030",level:2},{value:"New Features",id:"new-features-4",level:3},{value:"Bugs Squashed",id:"bugs-squashed-3",level:3},{value:"0.2.3",id:"023",level:2},{value:"0.2.2",id:"022",level:2},{value:"New Features",id:"new-features-5",level:3},{value:"Bugs Squashed",id:"bugs-squashed-4",level:3},{value:"0.2.0",id:"020",level:2},{value:"New Features",id:"new-features-6",level:3},{value:"Bugs Squashed",id:"bugs-squashed-5",level:3},{value:"0.1.3",id:"013",level:2},{value:"0.1.2",id:"012",level:2},{value:"New Features",id:"new-features-7",level:3},{value:"Bugs Squashed",id:"bugs-squashed-6",level:3},{value:"0.1.1",id:"011",level:2},{value:"Bugs Squashed",id:"bugs-squashed-7",level:3},{value:"0.1.0",id:"010",level:2}],u={toc:k},m="wrapper";function o(t){let{components:a,...e}=t;return(0,r.kt)(m,(0,i.Z)({},u,e,{components:a,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"release-notes"},"Release notes"),(0,r.kt)("h2",{id:"070"},"0.7.0"),(0,r.kt)("h3",{id:"new-features"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Optional description argument to consumes and produces decorator implemented (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/338"},"#338"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Consumes and produces decorators now have optional ",(0,r.kt)("inlineCode",{parentName:"li"},"description")," argument that is used instead of function docstring in async doc generation when specified"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"FastKafka Windows OS support enabled (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/326"},"#326"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"FastKafka can now run on Windows"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"FastKafka and FastAPI integration implemented (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/304"},"#304"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"FastKafka can now be run alongside FastAPI"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Batch consuming option to consumers implemented (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/298"},"#298"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Consumers can consume events in batches by specifying msg type of consuming function as ",(0,r.kt)("inlineCode",{parentName:"li"},"List[YourMsgType]")," "))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Removed support for synchronous produce functions (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/295"},"#295"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Added default broker values and update docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/292"},"#292"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("h3",{id:"bugs-squashed"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix index.ipynb to be runnable in colab (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/342"},"#342"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Use cli option root_path docs generate and serve CLI commands (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/341"},"#341"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix incorrect asyncapi docs path on fastkafka docs serve command (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/335"},"#335"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Serve docs now takes app ",(0,r.kt)("inlineCode",{parentName:"li"},"root_path")," argument into consideration when specified in app"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix typo (supress_timestamps->suppress_timestamps) and remove fix for enabling timestamps (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/315"},"#315"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix logs printing timestamps (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/308"},"#308"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix topics with dots causing failure of tester instantiation (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/306"},"#306"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},'Specified topics can now have "." in their names')))),(0,r.kt)("h2",{id:"060"},"0.6.0"),(0,r.kt)("h3",{id:"new-features-1"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Timestamps added to CLI commands (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/283"},"#283"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/davorrunje"},"@davorrunje"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Added option to process messages concurrently (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/278"},"#278"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"A new ",(0,r.kt)("inlineCode",{parentName:"li"},"executor")," option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add consumes and produces functions to app (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/274"},"#274"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add batching for producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/273"},"#273"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirement(batch): batch support is a real need! and i see it on the issue list.... so hope we do not need to wait too long"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("a",{parentName:"p",href:"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"},"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix broken links in guides (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/272"},"#272"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Generate the docusaurus sidebar dynamically by parsing summary.md (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/270"},"#270"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Metadata passed to consumer (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/269"},"#269"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirement(key): read the key value somehow..Maybe I missed something in the docs\nrequirement(header): read header values, Reason: I use CDC | Debezium and in the current system the header values are important to differentiate between the CRUD operations."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("a",{parentName:"p",href:"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"},"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Contribution with instructions how to build and test added (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/255"},"#255"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Export encoders, decoders from fastkafka.encoder (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/246"},"#246"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/239"},"#239"),")")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"UI Improvement: Post screenshots with links to the actual messages in testimonials section (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/228"},"#228"),")")),(0,r.kt)("h3",{id:"bugs-squashed-1"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Batch testing fix (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/280"},"#280"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Tester breaks when using Batching or KafkaEvent producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/279"},"#279"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Consumer loop callbacks are not executing in parallel (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/276"},"#276"),")"))),(0,r.kt)("h2",{id:"050"},"0.5.0"),(0,r.kt)("h3",{id:"new-features-2"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Significant speedup of Kafka producer (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/236"},"#236"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Added support for AVRO encoding/decoding (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/231"},"#231"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("h3",{id:"bugs-squashed-2"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fixed sidebar to include guides in docusaurus documentation (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/238"},"#238"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fixed link to symbols in docusaurus docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/227"},"#227"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Removed bootstrap servers from constructor (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/220"},"#220"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl")))),(0,r.kt)("h2",{id:"040"},"0.4.0"),(0,r.kt)("h3",{id:"new-features-3"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Integrate fastkafka chat (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/208"},"#208"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add benchmarking (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/206"},"#206"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Enable fast testing without running kafka locally (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/198"},"#198"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Generate docs using Docusaurus (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/194"},"#194"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add test cases for LocalRedpandaBroker (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/189"},"#189"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Reimplement patch and delegates from fastcore (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/188"},"#188"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Rename existing functions into start and stop and add lifespan handler (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/117"},"#117"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios"},"https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios"))))),(0,r.kt)("h2",{id:"031"},"0.3.1"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"README.md file updated")),(0,r.kt)("h2",{id:"030"},"0.3.0"),(0,r.kt)("h3",{id:"new-features-4"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Guide for fastkafka produces using partition key (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/172"},"#172"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Closes #161"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add support for Redpanda for testing and deployment (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/181"},"#181"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Remove bootstrap_servers from ",(0,r.kt)("strong",{parentName:"p"},"init")," and use the name of broker as an option when running/testing (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/134"},"#134"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add a GH action file to check for broken links in the docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/163"},"#163"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Optimize requirements for testing and docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/151"},"#151"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Break requirements into base and optional for testing and dev (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/124"},"#124"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Minimize base requirements needed just for running the service."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add link to example git repo into guide for building docs using actions (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/81"},"#81"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add logging for run_in_background (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/46"},"#46"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Implement partition Key mechanism for producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/16"},"#16"),")"))),(0,r.kt)("h3",{id:"bugs-squashed-3"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Implement checks for npm installation and version (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/176"},"#176"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Closes #158 by checking if the npx is installed and more verbose error handling"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix the helper.py link in CHANGELOG.md (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/165"},"#165"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"fastkafka docs install_deps fails (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/157"},"#157"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Unexpected internal error: ","[Errno 2]"," No such file or directory: 'npx'"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Broken links in docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/141"},"#141"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"fastkafka run is not showing up in CLI docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/132"},"#132"),")"))),(0,r.kt)("h2",{id:"023"},"0.2.3"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Fixed broken links on PyPi index page")),(0,r.kt)("h2",{id:"022"},"0.2.2"),(0,r.kt)("h3",{id:"new-features-5"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Extract JDK and Kafka installation out of LocalKafkaBroker (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/131"},"#131"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"PyYAML version relaxed (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/119"},"#119"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/davorrunje"},"@davorrunje"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Replace docker based kafka with local (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/68"},"#68"),")"),(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","replace docker compose with a simple docker run (standard run_jupyter.sh should do)"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","replace all tests to use LocalKafkaBroker"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","update documentation")))),(0,r.kt)("h3",{id:"bugs-squashed-4"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix broken link for FastKafka docs in index notebook (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/145"},"#145"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix encoding issues when loading setup.py on windows OS (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/135"},"#135"),")"))),(0,r.kt)("h2",{id:"020"},"0.2.0"),(0,r.kt)("h3",{id:"new-features-6"},"New Features"),(0,r.kt)("ul",{className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul"},"Replace kafka container with LocalKafkaBroker (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/112"},"#112"),")",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Replace kafka container with LocalKafkaBroker in tests"))))),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Remove kafka container from tests environment"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Fix failing tests")),(0,r.kt)("h3",{id:"bugs-squashed-5"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Fix random failing in CI (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/109"},"#109"),")")),(0,r.kt)("h2",{id:"013"},"0.1.3"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"version update in ",(0,r.kt)("strong",{parentName:"li"},"init"),".py")),(0,r.kt)("h2",{id:"012"},"0.1.2"),(0,r.kt)("h3",{id:"new-features-7"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Git workflow action for publishing Kafka docs (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/78"},"#78"),")")),(0,r.kt)("h3",{id:"bugs-squashed-6"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Include missing requirement (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/110"},"#110"),")",(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Typer is imported in this ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/helpers.py"},"file")," but it is not included in ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/settings.ini"},"settings.ini")),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add aiohttp which is imported in this ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_helpers.py"},"file")),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add nbformat which is imported in _components/helpers.py"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add nbconvert which is imported in _components/helpers.py")))),(0,r.kt)("h2",{id:"011"},"0.1.1"),(0,r.kt)("h3",{id:"bugs-squashed-7"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"JDK install fails on Python 3.8 (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/106"},"#106"),")")),(0,r.kt)("h2",{id:"010"},"0.1.0"),(0,r.kt)("p",null,"Initial release"))}o.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/97a352ae.33fe6fad.js b/assets/js/97a352ae.33fe6fad.js new file mode 100644 index 0000000..487e98f --- /dev/null +++ b/assets/js/97a352ae.33fe6fad.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5439],{3905:(e,t,a)=>{a.d(t,{Zo:()=>p,kt:()=>k});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function i(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function r(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?i(Object(a),!0).forEach((function(t){o(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):i(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,o=function(e,t){if(null==e)return{};var a,n,o={},i=Object.keys(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var s=n.createContext({}),u=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):r(r({},t),e)),a},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},h="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},c=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),h=u(a),c=o,k=h["".concat(s,".").concat(c)]||h[c]||d[c]||i;return a?n.createElement(k,r(r({ref:t},p),{},{components:a})):n.createElement(k,r({ref:t},p))}));function k(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=a.length,r=new Array(i);r[0]=c;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[h]="string"==typeof e?e:o,r[1]=l;for(var u=2;u<i;u++)r[u]=a[u];return n.createElement.apply(null,r)}return n.createElement.apply(null,a)}c.displayName="MDXCreateElement"},2522:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>r,default:()=>d,frontMatter:()=>i,metadata:()=>l,toc:()=>u});var n=a(7462),o=(a(7294),a(3905));const i={},r="Contributing to fastkafka",l={unversionedId:"CONTRIBUTING",id:"version-0.7.1/CONTRIBUTING",title:"Contributing to fastkafka",description:"First off, thanks for taking the time to contribute! \u2764\ufe0f",source:"@site/versioned_docs/version-0.7.1/CONTRIBUTING.md",sourceDirName:".",slug:"/CONTRIBUTING",permalink:"/docs/0.7.1/CONTRIBUTING",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"LICENSE",permalink:"/docs/0.7.1/LICENSE"},next:{title:"Release notes",permalink:"/docs/0.7.1/CHANGELOG"}},s={},u=[{value:"Table of Contents",id:"table-of-contents",level:2},{value:"I Have a Question",id:"i-have-a-question",level:2},{value:"I Want To Contribute",id:"i-want-to-contribute",level:2},{value:"Reporting Bugs",id:"reporting-bugs",level:3},{value:"Before Submitting a Bug Report",id:"before-submitting-a-bug-report",level:4},{value:"How Do I Submit a Good Bug Report?",id:"how-do-i-submit-a-good-bug-report",level:4},{value:"Suggesting Enhancements",id:"suggesting-enhancements",level:3},{value:"Before Submitting an Enhancement",id:"before-submitting-an-enhancement",level:4},{value:"How Do I Submit a Good Enhancement Suggestion?",id:"how-do-i-submit-a-good-enhancement-suggestion",level:4},{value:"Your First Code Contribution",id:"your-first-code-contribution",level:3},{value:"Development",id:"development",level:2},{value:"Prepare the dev environment",id:"prepare-the-dev-environment",level:3},{value:"Clone the fastkafka repository",id:"clone-the-fastkafka-repository",level:4},{value:"Optional: create a virtual python environment",id:"optional-create-a-virtual-python-environment",level:4},{value:"Install fastkafka",id:"install-fastkafka",level:4},{value:"Install JRE and Kafka toolkit",id:"install-jre-and-kafka-toolkit",level:4},{value:"Install npm",id:"install-npm",level:4},{value:"Install docusaurus",id:"install-docusaurus",level:4},{value:"Check if everything works",id:"check-if-everything-works",level:4},{value:"Way of working",id:"way-of-working",level:3},{value:"Before a PR",id:"before-a-pr",level:3},{value:"Attribution",id:"attribution",level:2}],p={toc:u},h="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(h,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"contributing-to-fastkafka"},"Contributing to fastkafka"),(0,o.kt)("p",null,"First off, thanks for taking the time to contribute! \u2764\ufe0f"),(0,o.kt)("p",null,"All types of contributions are encouraged and valued. See the ",(0,o.kt)("a",{parentName:"p",href:"#table-of-contents"},"Table of Contents")," for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. \ud83c\udf89"),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},"And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:"),(0,o.kt)("ul",{parentName:"blockquote"},(0,o.kt)("li",{parentName:"ul"},"Star the project"),(0,o.kt)("li",{parentName:"ul"},"Tweet about it"),(0,o.kt)("li",{parentName:"ul"},"Refer this project in your project's readme"),(0,o.kt)("li",{parentName:"ul"},"Mention the project at local meetups and tell your friends/colleagues"))),(0,o.kt)("h2",{id:"table-of-contents"},"Table of Contents"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#i-have-a-question"},"I Have a Question")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#i-want-to-contribute"},"I Want To Contribute"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#reporting-bugs"},"Reporting Bugs")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#suggesting-enhancements"},"Suggesting Enhancements")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#your-first-code-contribution"},"Your First Code Contribution")))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#development"},"Development"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#prepare-the-dev-environment"},"Prepare the dev environment")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#way-of-working"},"Way of working")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#before-a-pr"},"Before a PR")))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#join-the-project-team"},"Join The Project Team"))),(0,o.kt)("h2",{id:"i-have-a-question"},"I Have a Question"),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},"If you want to ask a question, we assume that you have read the available ",(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/docs"},"Documentation"),".")),(0,o.kt)("p",null,"Before you ask a question, it is best to search for existing ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues"},"Issues")," that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue."),(0,o.kt)("p",null,"If you then still feel the need to ask a question and need clarification, we recommend the following:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Contact us on ",(0,o.kt)("a",{parentName:"li",href:"https://discord.com/invite/CJWmYpyFbc"},"Discord")),(0,o.kt)("li",{parentName:"ul"},"Open an ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/new"},"Issue"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Provide as much context as you can about what you're running into")))),(0,o.kt)("p",null,"We will then take care of the issue as soon as possible."),(0,o.kt)("h2",{id:"i-want-to-contribute"},"I Want To Contribute"),(0,o.kt)("blockquote",null,(0,o.kt)("h3",{parentName:"blockquote",id:"legal-notice"},"Legal Notice"),(0,o.kt)("p",{parentName:"blockquote"},"When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.")),(0,o.kt)("h3",{id:"reporting-bugs"},"Reporting Bugs"),(0,o.kt)("h4",{id:"before-submitting-a-bug-report"},"Before Submitting a Bug Report"),(0,o.kt)("p",null,"A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Make sure that you are using the latest version."),(0,o.kt)("li",{parentName:"ul"},"Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the ",(0,o.kt)("a",{parentName:"li",href:"https://fastkafka.airt.ai/docs"},"documentation"),". If you are looking for support, you might want to check ",(0,o.kt)("a",{parentName:"li",href:"#i-have-a-question"},"this section"),")."),(0,o.kt)("li",{parentName:"ul"},"To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafkaissues?q=label%3Abug"},"bug tracker"),"."),(0,o.kt)("li",{parentName:"ul"},"Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue."),(0,o.kt)("li",{parentName:"ul"},"Collect information about the bug:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Stack trace (Traceback)"),(0,o.kt)("li",{parentName:"ul"},"OS, Platform and Version (Windows, Linux, macOS, x86, ARM)"),(0,o.kt)("li",{parentName:"ul"},"Python version"),(0,o.kt)("li",{parentName:"ul"},"Possibly your input and the output"),(0,o.kt)("li",{parentName:"ul"},"Can you reliably reproduce the issue? And can you also reproduce it with older versions?")))),(0,o.kt)("h4",{id:"how-do-i-submit-a-good-bug-report"},"How Do I Submit a Good Bug Report?"),(0,o.kt)("p",null,"We use GitHub issues to track bugs and errors. If you run into an issue with the project:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Open an ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/new"},"Issue"),". (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)"),(0,o.kt)("li",{parentName:"ul"},"Explain the behavior you would expect and the actual behavior."),(0,o.kt)("li",{parentName:"ul"},"Please provide as much context as possible and describe the ",(0,o.kt)("em",{parentName:"li"},"reproduction steps")," that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case."),(0,o.kt)("li",{parentName:"ul"},"Provide the information you collected in the previous section.")),(0,o.kt)("p",null,"Once it's filed:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"The project team will label the issue accordingly."),(0,o.kt)("li",{parentName:"ul"},"A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-repro"),". Bugs with the ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-repro")," tag will not be addressed until they are reproduced."),(0,o.kt)("li",{parentName:"ul"},"If the team is able to reproduce the issue, it will be marked ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-fix"),", as well as possibly other tags (such as ",(0,o.kt)("inlineCode",{parentName:"li"},"critical"),"), and the issue will be left to be implemented.")),(0,o.kt)("h3",{id:"suggesting-enhancements"},"Suggesting Enhancements"),(0,o.kt)("p",null,"This section guides you through submitting an enhancement suggestion for fastkafka, ",(0,o.kt)("strong",{parentName:"p"},"including completely new features and minor improvements to existing functionality"),". Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions."),(0,o.kt)("h4",{id:"before-submitting-an-enhancement"},"Before Submitting an Enhancement"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Make sure that you are using the latest version."),(0,o.kt)("li",{parentName:"ul"},"Read the ",(0,o.kt)("a",{parentName:"li",href:"https://fastkafka.airt.ai/docs"},"documentation")," carefully and find out if the functionality is already covered, maybe by an individual configuration."),(0,o.kt)("li",{parentName:"ul"},"Perform a ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues"},"search")," to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one."),(0,o.kt)("li",{parentName:"ul"},"Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library."),(0,o.kt)("li",{parentName:"ul"},"If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on ",(0,o.kt)("a",{parentName:"li",href:"https://discord.com/invite/CJWmYpyFbc"},"Discord"))),(0,o.kt)("h4",{id:"how-do-i-submit-a-good-enhancement-suggestion"},"How Do I Submit a Good Enhancement Suggestion?"),(0,o.kt)("p",null,"Enhancement suggestions are tracked as ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues"},"GitHub issues"),"."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Use a ",(0,o.kt)("strong",{parentName:"li"},"clear and descriptive title")," for the issue to identify the suggestion."),(0,o.kt)("li",{parentName:"ul"},"Provide a ",(0,o.kt)("strong",{parentName:"li"},"step-by-step description of the suggested enhancement")," in as many details as possible."),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("strong",{parentName:"li"},"Describe the current behavior")," and ",(0,o.kt)("strong",{parentName:"li"},"explain which behavior you expected to see instead")," and why. At this point you can also tell which alternatives do not work for you."),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("strong",{parentName:"li"},"Explain why this enhancement would be useful")," to most fastkafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.")),(0,o.kt)("h3",{id:"your-first-code-contribution"},"Your First Code Contribution"),(0,o.kt)("p",null,'A great way to start contributing to FastKafka would be by solving an issue tagged with "good first issue". To find a list of issues that are tagged as "good first issue" and are suitable for newcomers, please visit the following link: ',(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/labels/good%20first%20issue"},"Good first issues")),(0,o.kt)("p",null,"These issues are beginner-friendly and provide a great opportunity to get started with contributing to FastKafka. Choose an issue that interests you, follow the contribution process mentioned in ",(0,o.kt)("a",{parentName:"p",href:"#way-of-working"},"Way of working")," and ",(0,o.kt)("a",{parentName:"p",href:"#before-a-pr"},"Before a PR"),", and help us make FastKafka even better!"),(0,o.kt)("p",null,"If you have any questions or need further assistance, feel free to reach out to us. Happy coding!"),(0,o.kt)("h2",{id:"development"},"Development"),(0,o.kt)("h3",{id:"prepare-the-dev-environment"},"Prepare the dev environment"),(0,o.kt)("p",null,"To start contributing to fastkafka, you first have to prepare the development environment."),(0,o.kt)("h4",{id:"clone-the-fastkafka-repository"},"Clone the fastkafka repository"),(0,o.kt)("p",null,"To clone the repository, run the following command in the CLI:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"git clone https://github.com/airtai/fastkafka.git\n")),(0,o.kt)("h4",{id:"optional-create-a-virtual-python-environment"},"Optional: create a virtual python environment"),(0,o.kt)("p",null,"To prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your fastkafka project by running:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"python3 -m venv fastkafka-env\n")),(0,o.kt)("p",null,"And to activate your virtual environment run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"source fastkafka-env/bin/activate\n")),(0,o.kt)("p",null,"To learn more about virtual environments, please have a look at ",(0,o.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/venv.html#:~:text=A%20virtual%20environment%20is%20created,the%20virtual%20environment%20are%20available."},"official python documentation")),(0,o.kt)("h4",{id:"install-fastkafka"},"Install fastkafka"),(0,o.kt)("p",null,"To install fastkafka, navigate to the root directory of the cloned fastkafka project and run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'pip install fastkafka -e [."dev"]\n')),(0,o.kt)("h4",{id:"install-jre-and-kafka-toolkit"},"Install JRE and Kafka toolkit"),(0,o.kt)("p",null,"To be able to run tests and use all the functionalities of fastkafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Use our ",(0,o.kt)("inlineCode",{parentName:"li"},"fastkafka testing install-deps")," CLI command which will install JRE and Kafka toolkit for you in your .local folder\nOR"),(0,o.kt)("li",{parentName:"ol"},"Install JRE and Kafka manually.\nTo do this, please refer to ",(0,o.kt)("a",{parentName:"li",href:"https://docs.oracle.com/javase/9/install/toc.htm"},"JDK and JRE installation guide")," and ",(0,o.kt)("a",{parentName:"li",href:"https://kafka.apache.org/quickstart"},"Apache Kafka quickstart"))),(0,o.kt)("h4",{id:"install-npm"},"Install npm"),(0,o.kt)("p",null,"To be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Use our ",(0,o.kt)("inlineCode",{parentName:"li"},"fastkafka docs install_deps")," CLI command which will install npm for you in your .local folder\nOR"),(0,o.kt)("li",{parentName:"ol"},"Install npm manually.\nTo do this, please refer to ",(0,o.kt)("a",{parentName:"li",href:"https://docs.npmjs.com/downloading-and-installing-node-js-and-npm"},"NPM installation guide"))),(0,o.kt)("h4",{id:"install-docusaurus"},"Install docusaurus"),(0,o.kt)("p",null,"To generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of fastkafka project."),(0,o.kt)("h4",{id:"check-if-everything-works"},"Check if everything works"),(0,o.kt)("p",null,"After installing fastkafka and all the necessary dependencies, run ",(0,o.kt)("inlineCode",{parentName:"p"},"nbdev_test"),' in the root of fastkafka project. This will take a couple of minutes as it will run all the tests on fastkafka project. If everythng is setup correctly, you will get a "Success." message in your terminal, otherwise please refer to previous steps.'),(0,o.kt)("h3",{id:"way-of-working"},"Way of working"),(0,o.kt)("p",null,"The development of fastkafka is done in Jupyter notebooks. Inside the ",(0,o.kt)("inlineCode",{parentName:"p"},"nbs")," directory you will find all the source code of fastkafka, this is where you will implement your changes."),(0,o.kt)("p",null,"The testing, cleanup and exporting of the code is being handled by ",(0,o.kt)("inlineCode",{parentName:"p"},"nbdev"),", please, before starting the work on fastkafka, get familiar with it by reading ",(0,o.kt)("a",{parentName:"p",href:"https://nbdev.fast.ai/getting_started.html"},"nbdev documentation"),"."),(0,o.kt)("p",null,"The general philosopy you should follow when writing code for fastkafka is:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Function should be an atomic functionality, short and concise",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Good rule of thumb: your function should be 5-10 lines long usually"))),(0,o.kt)("li",{parentName:"ul"},"If there are more than 2 params, enforce keywording using *",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"E.g.: ",(0,o.kt)("inlineCode",{parentName:"li"},"def function(param1, *, param2, param3): ...")))),(0,o.kt)("li",{parentName:"ul"},"Define typing of arguments and return value",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected"))),(0,o.kt)("li",{parentName:"ul"},"After the function cell, write test cells using the assert keyword",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Whenever you implement something you should test that functionality immediately in the cells below "))),(0,o.kt)("li",{parentName:"ul"},"Add Google style python docstrings when function is implemented and tested")),(0,o.kt)("h3",{id:"before-a-pr"},"Before a PR"),(0,o.kt)("p",null,"After you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as smooth for you and us, please do the following before opening the request (all the commands are to be run in the root of fastkafka project):"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Format your notebooks: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbqa black nbs")),(0,o.kt)("li",{parentName:"ol"},"Close, shutdown, and clean the metadata from your notebooks: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_clean")),(0,o.kt)("li",{parentName:"ol"},"Export your code: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_export")),(0,o.kt)("li",{parentName:"ol"},"Run the tests: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_test")),(0,o.kt)("li",{parentName:"ol"},"Test code typing: ",(0,o.kt)("inlineCode",{parentName:"li"},"mypy fastkafka")),(0,o.kt)("li",{parentName:"ol"},"Test code safety with bandit: ",(0,o.kt)("inlineCode",{parentName:"li"},"bandit -r fastkafka")),(0,o.kt)("li",{parentName:"ol"},"Test code safety with semgrep: ",(0,o.kt)("inlineCode",{parentName:"li"},"semgrep --config auto -r fastkafka"))),(0,o.kt)("p",null,"When you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge."),(0,o.kt)("h2",{id:"attribution"},"Attribution"),(0,o.kt)("p",null,"This guide is based on the ",(0,o.kt)("strong",{parentName:"p"},"contributing-gen"),". ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/bttger/contributing-gen"},"Make your own"),"!"))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/980c25d7.6573b859.js b/assets/js/980c25d7.6573b859.js new file mode 100644 index 0000000..04cb4ab --- /dev/null +++ b/assets/js/980c25d7.6573b859.js @@ -0,0 +1,2 @@ +/*! For license information please see 980c25d7.6573b859.js.LICENSE.txt */ +(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[2473],{8685:(e,t,r)=>{"use strict";r.r(t),r.d(t,{default:()=>N});var n=r(7294),a=r(7452),o=r(5697),s=r.n(o),i=r(4063),l=r.n(i),u=r(1062),c=r.n(u),d=Object.defineProperty,p=Object.defineProperties,f=Object.getOwnPropertyDescriptors,h=Object.getOwnPropertySymbols,y=Object.prototype.hasOwnProperty,g=Object.prototype.propertyIsEnumerable,v=(e,t,r)=>t in e?d(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,m=(e,t)=>{for(var r in t||(t={}))y.call(t,r)&&v(e,r,t[r]);if(h)for(var r of h(t))g.call(t,r)&&v(e,r,t[r]);return e},P=(e,t)=>p(e,f(t));function b(e={}){return P(m({},e),{height:0,width:0,playerVars:P(m({},e.playerVars),{autoplay:0,start:0,end:0})})}var w={videoId:s().string,id:s().string,className:s().string,iframeClassName:s().string,style:s().object,title:s().string,loading:s().oneOf(["lazy","eager"]),opts:s().objectOf(s().any),onReady:s().func,onError:s().func,onPlay:s().func,onPause:s().func,onEnd:s().func,onStateChange:s().func,onPlaybackRateChange:s().func,onPlaybackQualityChange:s().func},E=class extends n.Component{constructor(e){super(e),this.destroyPlayerPromise=void 0,this.onPlayerReady=e=>{var t,r;return null==(r=(t=this.props).onReady)?void 0:r.call(t,e)},this.onPlayerError=e=>{var t,r;return null==(r=(t=this.props).onError)?void 0:r.call(t,e)},this.onPlayerStateChange=e=>{var t,r,n,a,o,s,i,l;switch(null==(r=(t=this.props).onStateChange)||r.call(t,e),e.data){case E.PlayerState.ENDED:null==(a=(n=this.props).onEnd)||a.call(n,e);break;case E.PlayerState.PLAYING:null==(s=(o=this.props).onPlay)||s.call(o,e);break;case E.PlayerState.PAUSED:null==(l=(i=this.props).onPause)||l.call(i,e)}},this.onPlayerPlaybackRateChange=e=>{var t,r;return null==(r=(t=this.props).onPlaybackRateChange)?void 0:r.call(t,e)},this.onPlayerPlaybackQualityChange=e=>{var t,r;return null==(r=(t=this.props).onPlaybackQualityChange)?void 0:r.call(t,e)},this.destroyPlayer=()=>this.internalPlayer?(this.destroyPlayerPromise=this.internalPlayer.destroy().then((()=>this.destroyPlayerPromise=void 0)),this.destroyPlayerPromise):Promise.resolve(),this.createPlayer=()=>{if("undefined"==typeof document)return;if(this.destroyPlayerPromise)return void this.destroyPlayerPromise.then(this.createPlayer);const e=P(m({},this.props.opts),{videoId:this.props.videoId});this.internalPlayer=c()(this.container,e),this.internalPlayer.on("ready",this.onPlayerReady),this.internalPlayer.on("error",this.onPlayerError),this.internalPlayer.on("stateChange",this.onPlayerStateChange),this.internalPlayer.on("playbackRateChange",this.onPlayerPlaybackRateChange),this.internalPlayer.on("playbackQualityChange",this.onPlayerPlaybackQualityChange),(this.props.title||this.props.loading)&&this.internalPlayer.getIframe().then((e=>{this.props.title&&e.setAttribute("title",this.props.title),this.props.loading&&e.setAttribute("loading",this.props.loading)}))},this.resetPlayer=()=>this.destroyPlayer().then(this.createPlayer),this.updatePlayer=()=>{var e;null==(e=this.internalPlayer)||e.getIframe().then((e=>{this.props.id?e.setAttribute("id",this.props.id):e.removeAttribute("id"),this.props.iframeClassName?e.setAttribute("class",this.props.iframeClassName):e.removeAttribute("class"),this.props.opts&&this.props.opts.width?e.setAttribute("width",this.props.opts.width.toString()):e.removeAttribute("width"),this.props.opts&&this.props.opts.height?e.setAttribute("height",this.props.opts.height.toString()):e.removeAttribute("height"),this.props.title?e.setAttribute("title",this.props.title):e.setAttribute("title","YouTube video player"),this.props.loading?e.setAttribute("loading",this.props.loading):e.removeAttribute("loading")}))},this.getInternalPlayer=()=>this.internalPlayer,this.updateVideo=()=>{var e,t,r,n;if(void 0===this.props.videoId||null===this.props.videoId)return void(null==(e=this.internalPlayer)||e.stopVideo());let a=!1;const o={videoId:this.props.videoId};(null==(t=this.props.opts)?void 0:t.playerVars)&&(a=1===this.props.opts.playerVars.autoplay,"start"in this.props.opts.playerVars&&(o.startSeconds=this.props.opts.playerVars.start),"end"in this.props.opts.playerVars&&(o.endSeconds=this.props.opts.playerVars.end)),a?null==(r=this.internalPlayer)||r.loadVideoById(o):null==(n=this.internalPlayer)||n.cueVideoById(o)},this.refContainer=e=>{this.container=e},this.container=null,this.internalPlayer=null}componentDidMount(){this.createPlayer()}componentDidUpdate(e){return t=this,r=null,n=function*(){(function(e,t){var r,n,a,o;return e.id!==t.id||e.className!==t.className||(null==(r=e.opts)?void 0:r.width)!==(null==(n=t.opts)?void 0:n.width)||(null==(a=e.opts)?void 0:a.height)!==(null==(o=t.opts)?void 0:o.height)||e.iframeClassName!==t.iframeClassName||e.title!==t.title})(e,this.props)&&this.updatePlayer(),function(e,t){return e.videoId!==t.videoId||!l()(b(e.opts),b(t.opts))}(e,this.props)&&(yield this.resetPlayer()),function(e,t){var r,n;if(e.videoId!==t.videoId)return!0;const a=(null==(r=e.opts)?void 0:r.playerVars)||{},o=(null==(n=t.opts)?void 0:n.playerVars)||{};return a.start!==o.start||a.end!==o.end}(e,this.props)&&this.updateVideo()},new Promise(((e,a)=>{var o=e=>{try{i(n.next(e))}catch(t){a(t)}},s=e=>{try{i(n.throw(e))}catch(t){a(t)}},i=t=>t.done?e(t.value):Promise.resolve(t.value).then(o,s);i((n=n.apply(t,r)).next())}));var t,r,n}componentWillUnmount(){this.destroyPlayer()}render(){return n.createElement("div",{className:this.props.className,style:this.props.style},n.createElement("div",{id:this.props.id,className:this.props.iframeClassName,ref:this.refContainer}))}},C=E;C.propTypes=w,C.defaultProps={videoId:"",id:"",className:"",iframeClassName:"",style:{},title:"",loading:void 0,opts:{},onReady:()=>{},onError:()=>{},onPlay:()=>{},onPause:()=>{},onEnd:()=>{},onStateChange:()=>{},onPlaybackRateChange:()=>{},onPlaybackQualityChange:()=>{}},C.PlayerState={UNSTARTED:-1,ENDED:0,PLAYING:1,PAUSED:2,BUFFERING:3,CUED:5};var S=C;const A={features:"features_s1P3",header:"header_vZXi",description:"description_vol2",containerWithMinHeight:"containerWithMinHeight_zTH6"},k={height:"720",width:"1280"};function N(){return n.createElement(a.Z,{title:"Demo",description:"Demo"},n.createElement("section",{className:`hero hero--primary ${A.containerWithMinHeight}`},n.createElement("div",{className:"container"},n.createElement("div",{className:"row"},n.createElement("div",{className:"col col--12"},n.createElement(S,{videoId:"dQw4w9WgXcQ",opts:k}))))))}},4063:e=>{"use strict";e.exports=function e(t,r){if(t===r)return!0;if(t&&r&&"object"==typeof t&&"object"==typeof r){if(t.constructor!==r.constructor)return!1;var n,a,o;if(Array.isArray(t)){if((n=t.length)!=r.length)return!1;for(a=n;0!=a--;)if(!e(t[a],r[a]))return!1;return!0}if(t.constructor===RegExp)return t.source===r.source&&t.flags===r.flags;if(t.valueOf!==Object.prototype.valueOf)return t.valueOf()===r.valueOf();if(t.toString!==Object.prototype.toString)return t.toString()===r.toString();if((n=(o=Object.keys(t)).length)!==Object.keys(r).length)return!1;for(a=n;0!=a--;)if(!Object.prototype.hasOwnProperty.call(r,o[a]))return!1;for(a=n;0!=a--;){var s=o[a];if(!e(t[s],r[s]))return!1}return!0}return t!=t&&r!=r}},9090:e=>{function t(e,t){e.onload=function(){this.onerror=this.onload=null,t(null,e)},e.onerror=function(){this.onerror=this.onload=null,t(new Error("Failed to load "+this.src),e)}}function r(e,t){e.onreadystatechange=function(){"complete"!=this.readyState&&"loaded"!=this.readyState||(this.onreadystatechange=null,t(null,e))}}e.exports=function(e,n,a){var o=document.head||document.getElementsByTagName("head")[0],s=document.createElement("script");"function"==typeof n&&(a=n,n={}),n=n||{},a=a||function(){},s.type=n.type||"text/javascript",s.charset=n.charset||"utf8",s.async=!("async"in n)||!!n.async,s.src=e,n.attrs&&function(e,t){for(var r in t)e.setAttribute(r,t[r])}(s,n.attrs),n.text&&(s.text=""+n.text),("onload"in s?t:r)(s,a),s.onload||t(s,a),o.appendChild(s)}},3988:e=>{"use strict";var t;t=function(){var e={},t={};return e.on=function(e,r){var n={name:e,handler:r};return t[e]=t[e]||[],t[e].unshift(n),n},e.off=function(e){var r=t[e.name].indexOf(e);-1!==r&&t[e.name].splice(r,1)},e.trigger=function(e,r){var n,a=t[e];if(a)for(n=a.length;n--;)a[n].handler(r)},e},e.exports=t},6006:(e,t,r)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0});var n,a=r(2275),o=(n=a)&&n.__esModule?n:{default:n};t.default={pauseVideo:{acceptableStates:[o.default.ENDED,o.default.PAUSED],stateChangeRequired:!1},playVideo:{acceptableStates:[o.default.ENDED,o.default.PLAYING],stateChangeRequired:!1},seekTo:{acceptableStates:[o.default.ENDED,o.default.PLAYING,o.default.PAUSED],stateChangeRequired:!0,timeout:3e3}},e.exports=t.default},9125:(e,t,r)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0});var n=i(r(9215)),a=i(r(8255)),o=i(r(5279)),s=i(r(6006));function i(e){return e&&e.__esModule?e:{default:e}}var l=(0,n.default)("youtube-player"),u={proxyEvents:function(e){var t={},r=function(r){var n="on"+r.slice(0,1).toUpperCase()+r.slice(1);t[n]=function(t){l('event "%s"',n,t),e.trigger(r,t)}},n=!0,a=!1,s=void 0;try{for(var i,u=o.default[Symbol.iterator]();!(n=(i=u.next()).done);n=!0){r(i.value)}}catch(c){a=!0,s=c}finally{try{!n&&u.return&&u.return()}finally{if(a)throw s}}return t},promisifyPlayer:function(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1],r={},n=function(n){t&&s.default[n]?r[n]=function(){for(var t=arguments.length,r=Array(t),a=0;a<t;a++)r[a]=arguments[a];return e.then((function(e){var t=s.default[n],a=e.getPlayerState(),o=e[n].apply(e,r);return t.stateChangeRequired||Array.isArray(t.acceptableStates)&&-1===t.acceptableStates.indexOf(a)?new Promise((function(r){e.addEventListener("onStateChange",(function n(){var a=e.getPlayerState(),o=void 0;"number"==typeof t.timeout&&(o=setTimeout((function(){e.removeEventListener("onStateChange",n),r()}),t.timeout)),Array.isArray(t.acceptableStates)&&-1!==t.acceptableStates.indexOf(a)&&(e.removeEventListener("onStateChange",n),clearTimeout(o),r())}))})).then((function(){return o})):o}))}:r[n]=function(){for(var t=arguments.length,r=Array(t),a=0;a<t;a++)r[a]=arguments[a];return e.then((function(e){return e[n].apply(e,r)}))}},o=!0,i=!1,l=void 0;try{for(var u,c=a.default[Symbol.iterator]();!(o=(u=c.next()).done);o=!0){n(u.value)}}catch(d){i=!0,l=d}finally{try{!o&&c.return&&c.return()}finally{if(i)throw l}}return r}};t.default=u,e.exports=t.default},2275:(e,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default={BUFFERING:3,ENDED:0,PAUSED:2,PLAYING:1,UNSTARTED:-1,VIDEO_CUED:5},e.exports=t.default},5279:(e,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=["ready","stateChange","playbackQualityChange","playbackRateChange","error","apiChange","volumeChange"],e.exports=t.default},8255:(e,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=["cueVideoById","loadVideoById","cueVideoByUrl","loadVideoByUrl","playVideo","pauseVideo","stopVideo","getVideoLoadedFraction","cuePlaylist","loadPlaylist","nextVideo","previousVideo","playVideoAt","setShuffle","setLoop","getPlaylist","getPlaylistIndex","setOption","mute","unMute","isMuted","setVolume","getVolume","seekTo","getPlayerState","getPlaybackRate","setPlaybackRate","getAvailablePlaybackRates","getPlaybackQuality","setPlaybackQuality","getAvailableQualityLevels","getCurrentTime","getDuration","removeEventListener","getVideoUrl","getVideoEmbedCode","getOptions","getOption","addEventListener","destroy","setSize","getIframe"],e.exports=t.default},1062:(e,t,r)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0});var n="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},a=i(r(3988)),o=i(r(5900)),s=i(r(9125));function i(e){return e&&e.__esModule?e:{default:e}}var l=void 0;t.default=function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},r=arguments.length>2&&void 0!==arguments[2]&&arguments[2],i=(0,a.default)();if(l||(l=(0,o.default)(i)),t.events)throw new Error("Event handlers cannot be overwritten.");if("string"==typeof e&&!document.getElementById(e))throw new Error('Element "'+e+'" does not exist.');t.events=s.default.proxyEvents(i);var u=new Promise((function(r){"object"===(void 0===e?"undefined":n(e))&&e.playVideo instanceof Function?r(e):l.then((function(n){var a=new n.Player(e,t);return i.on("ready",(function(){r(a)})),null}))})),c=s.default.promisifyPlayer(u,r);return c.on=i.on,c.off=i.off,c},e.exports=t.default},5900:(e,t,r)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0});var n,a=r(9090),o=(n=a)&&n.__esModule?n:{default:n};t.default=function(e){return new Promise((function(t){if(window.YT&&window.YT.Player&&window.YT.Player instanceof Function)t(window.YT);else{var r="http:"===window.location.protocol?"http:":"https:";(0,o.default)(r+"//www.youtube.com/iframe_api",(function(t){t&&e.trigger("error",t)}));var n=window.onYouTubeIframeAPIReady;window.onYouTubeIframeAPIReady=function(){n&&n(),t(window.YT)}}}))},e.exports=t.default},9215:(e,t,r)=>{function n(){var e;try{e=t.storage.debug}catch(r){}return!e&&"undefined"!=typeof process&&"env"in process&&(e=process.env.DEBUG),e}(t=e.exports=r(5046)).log=function(){return"object"==typeof console&&console.log&&Function.prototype.apply.call(console.log,console,arguments)},t.formatArgs=function(e){var r=this.useColors;if(e[0]=(r?"%c":"")+this.namespace+(r?" %c":" ")+e[0]+(r?"%c ":" ")+"+"+t.humanize(this.diff),!r)return;var n="color: "+this.color;e.splice(1,0,n,"color: inherit");var a=0,o=0;e[0].replace(/%[a-zA-Z%]/g,(function(e){"%%"!==e&&(a++,"%c"===e&&(o=a))})),e.splice(o,0,n)},t.save=function(e){try{null==e?t.storage.removeItem("debug"):t.storage.debug=e}catch(r){}},t.load=n,t.useColors=function(){if("undefined"!=typeof window&&window.process&&"renderer"===window.process.type)return!0;return"undefined"!=typeof document&&document.documentElement&&document.documentElement.style&&document.documentElement.style.WebkitAppearance||"undefined"!=typeof window&&window.console&&(window.console.firebug||window.console.exception&&window.console.table)||"undefined"!=typeof navigator&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)&&parseInt(RegExp.$1,10)>=31||"undefined"!=typeof navigator&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)},t.storage="undefined"!=typeof chrome&&void 0!==chrome.storage?chrome.storage.local:function(){try{return window.localStorage}catch(e){}}(),t.colors=["lightseagreen","forestgreen","goldenrod","dodgerblue","darkorchid","crimson"],t.formatters.j=function(e){try{return JSON.stringify(e)}catch(t){return"[UnexpectedJSONParseError]: "+t.message}},t.enable(n())},5046:(e,t,r)=>{var n;function a(e){function r(){if(r.enabled){var e=r,a=+new Date,o=a-(n||a);e.diff=o,e.prev=n,e.curr=a,n=a;for(var s=new Array(arguments.length),i=0;i<s.length;i++)s[i]=arguments[i];s[0]=t.coerce(s[0]),"string"!=typeof s[0]&&s.unshift("%O");var l=0;s[0]=s[0].replace(/%([a-zA-Z%])/g,(function(r,n){if("%%"===r)return r;l++;var a=t.formatters[n];if("function"==typeof a){var o=s[l];r=a.call(e,o),s.splice(l,1),l--}return r})),t.formatArgs.call(e,s),(r.log||t.log||console.log.bind(console)).apply(e,s)}}return r.namespace=e,r.enabled=t.enabled(e),r.useColors=t.useColors(),r.color=function(e){var r,n=0;for(r in e)n=(n<<5)-n+e.charCodeAt(r),n|=0;return t.colors[Math.abs(n)%t.colors.length]}(e),"function"==typeof t.init&&t.init(r),r}(t=e.exports=a.debug=a.default=a).coerce=function(e){return e instanceof Error?e.stack||e.message:e},t.disable=function(){t.enable("")},t.enable=function(e){t.save(e),t.names=[],t.skips=[];for(var r=("string"==typeof e?e:"").split(/[\s,]+/),n=r.length,a=0;a<n;a++)r[a]&&("-"===(e=r[a].replace(/\*/g,".*?"))[0]?t.skips.push(new RegExp("^"+e.substr(1)+"$")):t.names.push(new RegExp("^"+e+"$")))},t.enabled=function(e){var r,n;for(r=0,n=t.skips.length;r<n;r++)if(t.skips[r].test(e))return!1;for(r=0,n=t.names.length;r<n;r++)if(t.names[r].test(e))return!0;return!1},t.humanize=r(4680),t.names=[],t.skips=[],t.formatters={}},4680:e=>{var t=1e3,r=60*t,n=60*r,a=24*n,o=365.25*a;function s(e,t,r){if(!(e<t))return e<1.5*t?Math.floor(e/t)+" "+r:Math.ceil(e/t)+" "+r+"s"}e.exports=function(e,i){i=i||{};var l,u=typeof e;if("string"===u&&e.length>0)return function(e){if((e=String(e)).length>100)return;var s=/^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec(e);if(!s)return;var i=parseFloat(s[1]);switch((s[2]||"ms").toLowerCase()){case"years":case"year":case"yrs":case"yr":case"y":return i*o;case"days":case"day":case"d":return i*a;case"hours":case"hour":case"hrs":case"hr":case"h":return i*n;case"minutes":case"minute":case"mins":case"min":case"m":return i*r;case"seconds":case"second":case"secs":case"sec":case"s":return i*t;case"milliseconds":case"millisecond":case"msecs":case"msec":case"ms":return i;default:return}}(e);if("number"===u&&!1===isNaN(e))return i.long?s(l=e,a,"day")||s(l,n,"hour")||s(l,r,"minute")||s(l,t,"second")||l+" ms":function(e){if(e>=a)return Math.round(e/a)+"d";if(e>=n)return Math.round(e/n)+"h";if(e>=r)return Math.round(e/r)+"m";if(e>=t)return Math.round(e/t)+"s";return e+"ms"}(e);throw new Error("val is not a non-empty string or a valid number. val="+JSON.stringify(e))}}}]); \ No newline at end of file diff --git a/assets/js/980c25d7.6573b859.js.LICENSE.txt b/assets/js/980c25d7.6573b859.js.LICENSE.txt new file mode 100644 index 0000000..1786123 --- /dev/null +++ b/assets/js/980c25d7.6573b859.js.LICENSE.txt @@ -0,0 +1,4 @@ +/** +* @link https://github.com/gajus/sister for the canonical source repository +* @license https://github.com/gajus/sister/blob/master/LICENSE BSD 3-Clause +*/ diff --git a/assets/js/982d0b04.f756f754.js b/assets/js/982d0b04.f756f754.js new file mode 100644 index 0000000..f363440 --- /dev/null +++ b/assets/js/982d0b04.f756f754.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8468],{3905:(e,t,a)=>{a.d(t,{Zo:()=>s,kt:()=>m});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function c(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function i(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var l=n.createContext({}),p=function(e){var t=n.useContext(l),a=t;return e&&(a="function"==typeof e?e(t):c(c({},t),e)),a},s=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,l=e.parentName,s=i(e,["components","mdxType","originalType","parentName"]),d=p(a),f=r,m=d["".concat(l,".").concat(f)]||d[f]||u[f]||o;return a?n.createElement(m,c(c({ref:t},s),{},{components:a})):n.createElement(m,c({ref:t},s))}));function m(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,c=new Array(o);c[0]=f;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[d]="string"==typeof e?e:r,c[1]=i;for(var p=2;p<o;p++)c[p]=a[p];return n.createElement.apply(null,c)}return n.createElement.apply(null,a)}f.displayName="MDXCreateElement"},3331:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>l,contentTitle:()=>c,default:()=>u,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={},c=void 0,i={unversionedId:"api/fastkafka/encoder/avsc_to_pydantic",id:"api/fastkafka/encoder/avsc_to_pydantic",title:"avsc_to_pydantic",description:"avsctopydantic {fastkafka.encoder.avsctopydantic}",source:"@site/docs/api/fastkafka/encoder/avsc_to_pydantic.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avsc_to_pydantic",permalink:"/docs/next/api/fastkafka/encoder/avsc_to_pydantic",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avro_encoder",permalink:"/docs/next/api/fastkafka/encoder/avro_encoder"},next:{title:"json_decoder",permalink:"/docs/next/api/fastkafka/encoder/json_decoder"}},l={},p=[{value:"avsc_to_pydantic",id:"fastkafka.encoder.avsc_to_pydantic",level:3}],s={toc:p},d="wrapper";function u(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},s,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h3",{id:"fastkafka.encoder.avsc_to_pydantic"},"avsc_to_pydantic"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/encoder/avro.py#L283-L403",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"avsc_to_pydantic(\n schema\n)\n")),(0,r.kt)("p",null,"Generate pydantic model from given Avro Schema"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"schema")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Dict[str, Any]")),(0,r.kt)("td",{parentName:"tr",align:null},"Avro schema in dictionary format"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Type[pydantic.main.BaseModel]")),(0,r.kt)("td",{parentName:"tr",align:null},"Pydantic model class built from given avro schema")))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/9980ea0e.b9be0f2e.js b/assets/js/9980ea0e.b9be0f2e.js new file mode 100644 index 0000000..0977484 --- /dev/null +++ b/assets/js/9980ea0e.b9be0f2e.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1506],{3905:(e,t,a)=>{a.d(t,{Zo:()=>f,kt:()=>d});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){o(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function s(e,t){if(null==e)return{};var a,n,o=function(e,t){if(null==e)return{};var a,n,o={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},f=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},u="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},c=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,r=e.originalType,p=e.parentName,f=s(e,["components","mdxType","originalType","parentName"]),u=l(a),c=o,d=u["".concat(p,".").concat(c)]||u[c]||k[c]||r;return a?n.createElement(d,i(i({ref:t},f),{},{components:a})):n.createElement(d,i({ref:t},f))}));function d(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=a.length,i=new Array(r);i[0]=c;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[u]="string"==typeof e?e:o,i[1]=s;for(var l=2;l<r;l++)i[l]=a[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}c.displayName="MDXCreateElement"},1339:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>k,frontMatter:()=>r,metadata:()=>s,toc:()=>l});var n=a(7462),o=(a(7294),a(3905));const r={},i="Deploy FastKafka docs to GitHub Pages",s={unversionedId:"guides/Guide_04_Github_Actions_Workflow",id:"version-0.7.1/guides/Guide_04_Github_Actions_Workflow",title:"Deploy FastKafka docs to GitHub Pages",description:"Getting started",source:"@site/versioned_docs/version-0.7.1/guides/Guide_04_Github_Actions_Workflow.md",sourceDirName:"guides",slug:"/guides/Guide_04_Github_Actions_Workflow",permalink:"/docs/0.7.1/guides/Guide_04_Github_Actions_Workflow",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using Redpanda to test FastKafka",permalink:"/docs/0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka"},next:{title:"Deploying FastKafka using Docker",permalink:"/docs/0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka"}},p={},l=[{value:"Getting started",id:"getting-started",level:2},{value:"Options",id:"options",level:2},{value:"Set app location",id:"set-app-location",level:3},{value:"Example Repository",id:"example-repository",level:2}],f={toc:l},u="wrapper";function k(e){let{components:t,...a}=e;return(0,o.kt)(u,(0,n.Z)({},f,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"deploy-fastkafka-docs-to-github-pages"},"Deploy FastKafka docs to GitHub Pages"),(0,o.kt)("h2",{id:"getting-started"},"Getting started"),(0,o.kt)("p",null,"Add your workflow file ",(0,o.kt)("inlineCode",{parentName:"p"},".github/workflows/fastkafka_docs_deploy.yml")," and\npush it to your remote default branch."),(0,o.kt)("p",null,"Here is an example workflow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'name: Deploy FastKafka Generated Documentation to GitHub Pages\n\non:\n push:\n branches: [ "main", "master" ]\n workflow_dispatch:\n\njobs:\n deploy:\n runs-on: ubuntu-latest\n permissions:\n contents: write\n steps:\n - uses: airtai/workflows/fastkafka-ghp@main\n with:\n app: "test_fastkafka.application:kafka_app"\n')),(0,o.kt)("h2",{id:"options"},"Options"),(0,o.kt)("h3",{id:"set-app-location"},"Set app location"),(0,o.kt)("p",null,"Input in the form of ",(0,o.kt)("inlineCode",{parentName:"p"},"path:app"),", where ",(0,o.kt)("inlineCode",{parentName:"p"},"path")," is the path to a Python\nfile and ",(0,o.kt)("inlineCode",{parentName:"p"},"app")," is an object of type\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),":"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'- name: Deploy\n uses: airtai/workflows/fastkafka-ghp@main\n with:\n app: "test_fastkafka.application:kafka_app"\n')),(0,o.kt)("p",null,"In the above example,\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp is named as ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_app")," and it is available in the ",(0,o.kt)("inlineCode",{parentName:"p"},"application"),"\nsubmodule of the ",(0,o.kt)("inlineCode",{parentName:"p"},"test_fastkafka")," module."),(0,o.kt)("h2",{id:"example-repository"},"Example Repository"),(0,o.kt)("p",null,"A\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\nlibrary that uses the above-mentioned workfow actions to publish\nFastKafka docs to ",(0,o.kt)("inlineCode",{parentName:"p"},"Github Pages")," can be found\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_project/blob/main/.github/workflows/fastkafka_deploy.yaml"},"here"),"."))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/99912bf6.1a9c1ee9.js b/assets/js/99912bf6.1a9c1ee9.js new file mode 100644 index 0000000..ec15fbe --- /dev/null +++ b/assets/js/99912bf6.1a9c1ee9.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9469],{3905:(e,t,a)=>{a.d(t,{Zo:()=>f,kt:()=>d});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){o(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function s(e,t){if(null==e)return{};var a,n,o=function(e,t){if(null==e)return{};var a,n,o={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},f=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},u="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},c=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,r=e.originalType,p=e.parentName,f=s(e,["components","mdxType","originalType","parentName"]),u=l(a),c=o,d=u["".concat(p,".").concat(c)]||u[c]||k[c]||r;return a?n.createElement(d,i(i({ref:t},f),{},{components:a})):n.createElement(d,i({ref:t},f))}));function d(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=a.length,i=new Array(r);i[0]=c;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[u]="string"==typeof e?e:o,i[1]=s;for(var l=2;l<r;l++)i[l]=a[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}c.displayName="MDXCreateElement"},9036:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>k,frontMatter:()=>r,metadata:()=>s,toc:()=>l});var n=a(7462),o=(a(7294),a(3905));const r={},i="Deploy FastKafka docs to GitHub Pages",s={unversionedId:"guides/Guide_04_Github_Actions_Workflow",id:"version-0.6.0/guides/Guide_04_Github_Actions_Workflow",title:"Deploy FastKafka docs to GitHub Pages",description:"Getting started",source:"@site/versioned_docs/version-0.6.0/guides/Guide_04_Github_Actions_Workflow.md",sourceDirName:"guides",slug:"/guides/Guide_04_Github_Actions_Workflow",permalink:"/docs/0.6.0/guides/Guide_04_Github_Actions_Workflow",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using Redpanda to test FastKafka",permalink:"/docs/0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka"},next:{title:"Deploying FastKafka using Docker",permalink:"/docs/0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka"}},p={},l=[{value:"Getting started",id:"getting-started",level:2},{value:"Options",id:"options",level:2},{value:"Set app location",id:"set-app-location",level:3},{value:"Example Repository",id:"example-repository",level:2}],f={toc:l},u="wrapper";function k(e){let{components:t,...a}=e;return(0,o.kt)(u,(0,n.Z)({},f,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"deploy-fastkafka-docs-to-github-pages"},"Deploy FastKafka docs to GitHub Pages"),(0,o.kt)("h2",{id:"getting-started"},"Getting started"),(0,o.kt)("p",null,"Add your workflow file ",(0,o.kt)("inlineCode",{parentName:"p"},".github/workflows/fastkafka_docs_deploy.yml")," and\npush it to your remote default branch."),(0,o.kt)("p",null,"Here is an example workflow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'name: Deploy FastKafka Generated Documentation to GitHub Pages\n\non:\n push:\n branches: [ "main", "master" ]\n workflow_dispatch:\n\njobs:\n deploy:\n runs-on: ubuntu-latest\n permissions:\n contents: write\n steps:\n - uses: airtai/workflows/fastkafka-ghp@main\n with:\n app: "test_fastkafka.application:kafka_app"\n')),(0,o.kt)("h2",{id:"options"},"Options"),(0,o.kt)("h3",{id:"set-app-location"},"Set app location"),(0,o.kt)("p",null,"Input in the form of ",(0,o.kt)("inlineCode",{parentName:"p"},"path:app"),", where ",(0,o.kt)("inlineCode",{parentName:"p"},"path")," is the path to a Python\nfile and ",(0,o.kt)("inlineCode",{parentName:"p"},"app")," is an object of type\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),":"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'- name: Deploy\n uses: airtai/workflows/fastkafka-ghp@main\n with:\n app: "test_fastkafka.application:kafka_app"\n')),(0,o.kt)("p",null,"In the above example,\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp is named as ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_app")," and it is available in the ",(0,o.kt)("inlineCode",{parentName:"p"},"application"),"\nsubmodule of the ",(0,o.kt)("inlineCode",{parentName:"p"},"test_fastkafka")," module."),(0,o.kt)("h2",{id:"example-repository"},"Example Repository"),(0,o.kt)("p",null,"A\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\nlibrary that uses the above-mentioned workfow actions to publish\nFastKafka docs to ",(0,o.kt)("inlineCode",{parentName:"p"},"Github Pages")," can be found\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_project/blob/main/.github/workflows/fastkafka_deploy.yaml"},"here"),"."))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/99bfca7e.4ea0922d.js b/assets/js/99bfca7e.4ea0922d.js new file mode 100644 index 0000000..ead9528 --- /dev/null +++ b/assets/js/99bfca7e.4ea0922d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4172],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>m});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){r(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function i(e,t){if(null==e)return{};var n,a,r=function(e,t){if(null==e)return{};var n,a,r={},o=Object.keys(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var c=a.createContext({}),p=function(e){var t=a.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},d=function(e){var t=p(e.components);return a.createElement(c.Provider,{value:t},e.children)},s="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},k=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,c=e.parentName,d=i(e,["components","mdxType","originalType","parentName"]),s=p(n),k=r,m=s["".concat(c,".").concat(k)]||s[k]||u[k]||o;return n?a.createElement(m,l(l({ref:t},d),{},{components:n})):a.createElement(m,l({ref:t},d))}));function m(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,l=new Array(o);l[0]=k;var i={};for(var c in t)hasOwnProperty.call(t,c)&&(i[c]=t[c]);i.originalType=e,i[s]="string"==typeof e?e:r,l[1]=i;for(var p=2;p<o;p++)l[p]=n[p];return a.createElement.apply(null,l)}return a.createElement.apply(null,n)}k.displayName="MDXCreateElement"},129:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>l,default:()=>u,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var a=n(7462),r=(n(7294),n(3905));const o={},l=void 0,i={unversionedId:"api/fastkafka/encoder/json_decoder",id:"api/fastkafka/encoder/json_decoder",title:"json_decoder",description:"jsondecoder {fastkafka.encoder.jsondecoder}",source:"@site/docs/api/fastkafka/encoder/json_decoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/json_decoder",permalink:"/docs/next/api/fastkafka/encoder/json_decoder",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avsc_to_pydantic",permalink:"/docs/next/api/fastkafka/encoder/avsc_to_pydantic"},next:{title:"json_encoder",permalink:"/docs/next/api/fastkafka/encoder/json_encoder"}},c={},p=[{value:"json_decoder",id:"fastkafka.encoder.json_decoder",level:3}],d={toc:p},s="wrapper";function u(e){let{components:t,...n}=e;return(0,r.kt)(s,(0,a.Z)({},d,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h3",{id:"fastkafka.encoder.json_decoder"},"json_decoder"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/encoder/json.py#L42-L55",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"json_decoder(\n raw_msg, cls\n)\n")),(0,r.kt)("p",null,"Decoder to decode json string in bytes to pydantic model instance"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"raw_msg")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bytes")),(0,r.kt)("td",{parentName:"tr",align:null},"Bytes message received from Kafka topic"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"cls")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Type[pydantic.main.BaseModel]")),(0,r.kt)("td",{parentName:"tr",align:null},"Pydantic class; This pydantic class will be used to construct instance of same class"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Any")),(0,r.kt)("td",{parentName:"tr",align:null},"An instance of given pydantic class")))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/99d969f2.d89c6b4c.js b/assets/js/99d969f2.d89c6b4c.js new file mode 100644 index 0000000..dce41ed --- /dev/null +++ b/assets/js/99d969f2.d89c6b4c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6005],{3905:(a,e,t)=>{t.d(e,{Zo:()=>f,kt:()=>d});var n=t(7294);function i(a,e,t){return e in a?Object.defineProperty(a,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):a[e]=t,a}function s(a,e){var t=Object.keys(a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(a);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(a,e).enumerable}))),t.push.apply(t,n)}return t}function o(a){for(var e=1;e<arguments.length;e++){var t=null!=arguments[e]?arguments[e]:{};e%2?s(Object(t),!0).forEach((function(e){i(a,e,t[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(a,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(e){Object.defineProperty(a,e,Object.getOwnPropertyDescriptor(t,e))}))}return a}function r(a,e){if(null==a)return{};var t,n,i=function(a,e){if(null==a)return{};var t,n,i={},s=Object.keys(a);for(n=0;n<s.length;n++)t=s[n],e.indexOf(t)>=0||(i[t]=a[t]);return i}(a,e);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(a);for(n=0;n<s.length;n++)t=s[n],e.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(a,t)&&(i[t]=a[t])}return i}var p=n.createContext({}),l=function(a){var e=n.useContext(p),t=e;return a&&(t="function"==typeof a?a(e):o(o({},e),a)),t},f=function(a){var e=l(a.components);return n.createElement(p.Provider,{value:e},a.children)},k="mdxType",c={inlineCode:"code",wrapper:function(a){var e=a.children;return n.createElement(n.Fragment,{},e)}},u=n.forwardRef((function(a,e){var t=a.components,i=a.mdxType,s=a.originalType,p=a.parentName,f=r(a,["components","mdxType","originalType","parentName"]),k=l(t),u=i,d=k["".concat(p,".").concat(u)]||k[u]||c[u]||s;return t?n.createElement(d,o(o({ref:e},f),{},{components:t})):n.createElement(d,o({ref:e},f))}));function d(a,e){var t=arguments,i=e&&e.mdxType;if("string"==typeof a||i){var s=t.length,o=new Array(s);o[0]=u;var r={};for(var p in e)hasOwnProperty.call(e,p)&&(r[p]=e[p]);r.originalType=a,r[k]="string"==typeof a?a:i,o[1]=r;for(var l=2;l<s;l++)o[l]=t[l];return n.createElement.apply(null,o)}return n.createElement.apply(null,t)}u.displayName="MDXCreateElement"},9515:(a,e,t)=>{t.r(e),t.d(e,{assets:()=>p,contentTitle:()=>o,default:()=>c,frontMatter:()=>s,metadata:()=>r,toc:()=>l});var n=t(7462),i=(t(7294),t(3905));const s={},o="Using FastAPI to Run FastKafka Application",r={unversionedId:"guides/Guide_32_Using_fastapi_to_run_fastkafka_application",id:"version-0.8.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application",title:"Using FastAPI to Run FastKafka Application",description:"When deploying a FastKafka application, the default approach is to",source:"@site/versioned_docs/version-0.8.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application.md",sourceDirName:"guides",slug:"/guides/Guide_32_Using_fastapi_to_run_fastkafka_application",permalink:"/docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Deploying FastKafka using Docker",permalink:"/docs/guides/Guide_30_Using_docker_to_deploy_fastkafka"},next:{title:"Benchmarking FastKafka app",permalink:"/docs/guides/Guide_06_Benchmarking_FastKafka"}},p={},l=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"1. Basic FastKafka app",id:"1-basic-fastkafka-app",level:2},{value:"2. Using fastapi_lifespan method",id:"2-using-fastapi_lifespan-method",level:2},{value:"Putting it all together",id:"putting-it-all-together",level:2}],f={toc:l},k="wrapper";function c(a){let{components:e,...t}=a;return(0,i.kt)(k,(0,n.Z)({},f,t,{components:e,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"using-fastapi-to-run-fastkafka-application"},"Using FastAPI to Run FastKafka Application"),(0,i.kt)("p",null,"When deploying a FastKafka application, the default approach is to\nutilize the ",(0,i.kt)("a",{parentName:"p",href:"/docs/cli/fastkafka#fastkafka-run"},(0,i.kt)("inlineCode",{parentName:"a"},"fastkafka run"))," CLI\ncommand. This command allows you to launch your FastKafka application as\na standalone service. However, if you already have a FastAPI application\nin place and wish to run FastKafka application alongside it, you have an\nalternative option."),(0,i.kt)("p",null,"FastKafka provides a method called\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nthat leverages ",(0,i.kt)("a",{parentName:"p",href:"https://fastapi.tiangolo.com/advanced/events/#lifespan-events"},"FastAPI\u2019s\nlifespan"),"\nfeature. This method allows you to run your FastKafka application\ntogether with your existing FastAPI app, seamlessly integrating their\nfunctionalities. By using the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nmethod, you can start the FastKafka application within the same process\nas the FastAPI app."),(0,i.kt)("p",null,"The\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nmethod ensures that both FastAPI and FastKafka are initialized and start\nworking simultaneously. This approach enables the execution of\nKafka-related tasks, such as producing and consuming messages, while\nalso handling HTTP requests through FastAPI\u2019s routes."),(0,i.kt)("p",null,"By combining FastAPI and FastKafka in this manner, you can build a\ncomprehensive application that harnesses the power of both frameworks.\nWhether you require real-time messaging capabilities or traditional HTTP\nendpoints, this approach allows you to leverage the strengths of FastAPI\nand FastKafka within a single deployment setup."),(0,i.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A basic knowledge of\n",(0,i.kt)("a",{parentName:"li",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nis needed to proceed with this guide. If you are not familiar with\n",(0,i.kt)("a",{parentName:"li",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),",\nplease go through the ",(0,i.kt)("a",{parentName:"li",href:"/docs#tutorial"},"tutorial")," first."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("a",{parentName:"li",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nand ",(0,i.kt)("inlineCode",{parentName:"li"},"FastAPI")," libraries needs to be installed.")),(0,i.kt)("p",null,"This guide will provide a step-by-step explanation, taking you through\neach stage individually, before combining all the components in the\nfinal section for a comprehensive understanding of the process."),(0,i.kt)("h2",{id:"1-basic-fastkafka-app"},"1. Basic FastKafka app"),(0,i.kt)("p",null,"In this step, we will begin by creating a simple FastKafka application."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\nfrom typing import *\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Greetings",\n kafka_brokers=kafka_brokers,\n)\n\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\n\n@kafka_app.consumes()\nasync def on_names(msg: TestMsg):\n await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))\n\n\n@kafka_app.produces()\nasync def to_greetings(greeting: TestMsg) -> TestMsg:\n return greeting\n')),(0,i.kt)("p",null,"In the above example, we consume messages from a topic called ",(0,i.kt)("inlineCode",{parentName:"p"},"names"),',\nwe prepend \u201cHello" to the message, and send it back to another topic\ncalled ',(0,i.kt)("inlineCode",{parentName:"p"},"greetings"),"."),(0,i.kt)("p",null,"We now have a simple\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp to produce and consume from two topics."),(0,i.kt)("h2",{id:"2-using-fastapi_lifespan-method"},"2. Using fastapi_lifespan method"),(0,i.kt)("p",null,"In this step of the guide, we will explore the integration of a\nFastKafka application with a FastAPI application using the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nmethod. The\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka._application.app.FastKafka.fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka.fastapi_lifespan")),"\nmethod is a feature provided by FastKafka, which allows you to\nseamlessly integrate a FastKafka application with a FastAPI application\nby leveraging FastAPI\u2019s lifespan feature."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from fastapi import FastAPI\n\nfastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(kafka_broker_name="localhost"))\n\n\n@fastapi_app.get("/hello")\nasync def hello():\n return {"msg": "hello there"}\n')),(0,i.kt)("p",null,"In the above example, a new instance of the ",(0,i.kt)("inlineCode",{parentName:"p"},"FastAPI")," app is created,\nand when the app is started using uvicorn, it also runs the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napplication concurrently."),(0,i.kt)("h2",{id:"putting-it-all-together"},"Putting it all together"),(0,i.kt)("p",null,"Let\u2019s put the above code together and write it in a file called\n",(0,i.kt)("inlineCode",{parentName:"p"},"fast_apps.py"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "fast_apps.py" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\nfrom typing import *\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Greetings",\n kafka_brokers=kafka_brokers,\n)\n\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\n\n@kafka_app.consumes()\nasync def on_names(msg: TestMsg):\n await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))\n\n\n@kafka_app.produces()\nasync def to_greetings(greeting: TestMsg) -> TestMsg:\n return greeting\n\n\nfrom fastapi import FastAPI\n\nfastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan("localhost"))\n\n@fastapi_app.get("/hello")\nasync def hello():\n return {"msg": "hello there"}\n')),(0,i.kt)("p",null,"Finally, you can run the FastAPI application using a web server of your\nchoice, such as Uvicorn or Hypercorn by running the below command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"uvicorn fast_apps:fastapi_app --host=0.0.0.0 --port=8080\n")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/9af63d42.bb0edfcf.js b/assets/js/9af63d42.bb0edfcf.js new file mode 100644 index 0000000..36209ee --- /dev/null +++ b/assets/js/9af63d42.bb0edfcf.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3814],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function i(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var l=n.createContext({}),p=function(e){var t=n.useContext(l),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},c=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,l=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),d=p(a),k=r,f=d["".concat(l,".").concat(k)]||d[k]||u[k]||o;return a?n.createElement(f,s(s({ref:t},c),{},{components:a})):n.createElement(f,s({ref:t},c))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,s=new Array(o);s[0]=k;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[d]="string"==typeof e?e:r,s[1]=i;for(var p=2;p<o;p++)s[p]=a[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,a)}k.displayName="MDXCreateElement"},709:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>u,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={},s="Batch producing",i={unversionedId:"guides/Guide_23_Batch_Producing",id:"version-0.7.1/guides/Guide_23_Batch_Producing",title:"Batch producing",description:"If you want to send your data in batches @produces decorator makes",source:"@site/versioned_docs/version-0.7.1/guides/Guide_23_Batch_Producing.md",sourceDirName:"guides",slug:"/guides/Guide_23_Batch_Producing",permalink:"/docs/0.7.1/guides/Guide_23_Batch_Producing",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Defining a partition key",permalink:"/docs/0.7.1/guides/Guide_22_Partition_Keys"},next:{title:"Lifespan Events",permalink:"/docs/0.7.1/guides/Guide_05_Lifespan_Handler"}},l={},p=[{value:"Return a batch from the producing function",id:"return-a-batch-from-the-producing-function",level:2},{value:"App example",id:"app-example",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the batch was sent to the Kafka topic with the defined key",id:"check-if-the-batch-was-sent-to-the-kafka-topic-with-the-defined-key",level:2},{value:"Batch key",id:"batch-key",level:2},{value:"Check if the batch was sent to the Kafka topic",id:"check-if-the-batch-was-sent-to-the-kafka-topic",level:2}],c={toc:p},d="wrapper";function u(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"batch-producing"},"Batch producing"),(0,r.kt)("p",null,"If you want to send your data in batches ",(0,r.kt)("inlineCode",{parentName:"p"},"@produces")," decorator makes\nthat possible for you. By returning a ",(0,r.kt)("inlineCode",{parentName:"p"},"list")," of messages you want to\nsend in a batch the producer will collect the messages and send them in\na batch to a Kafka broker."),(0,r.kt)("p",null,"This guide will demonstrate how to use this feature."),(0,r.kt)("h2",{id:"return-a-batch-from-the-producing-function"},"Return a batch from the producing function"),(0,r.kt)("p",null,"To define a batch that you want to produce to Kafka topic, you need to\nreturn the ",(0,r.kt)("inlineCode",{parentName:"p"},"List")," of the messages that you want to be batched from your\nproducing function."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n return [HelloWorld(msg=msg) for msg in msgs]\n")),(0,r.kt)("p",null,"In the example, we want to return the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message class batch\nthat is created from a list of msgs we passed into our producing\nfunction."),(0,r.kt)("p",null,'Lets also prepare a backgound task that will send a batch of \u201chello\nworld" messages when the app starts.'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n')),(0,r.kt)("h2",{id:"app-example"},"App example"),(0,r.kt)("p",null,"We will modify the app example from ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_21_Produces_Basics"},"@producer\nbasics")," guide to return the\n",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," batch. The final app will look like this (make sure you\nreplace the ",(0,r.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,r.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values):"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n return [HelloWorld(msg=msg) for msg in msgs]\n')),(0,r.kt)("h2",{id:"run-the-app"},"Run the app"),(0,r.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n")),(0,r.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"[46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task\n[46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.\n")),(0,r.kt)("h2",{id:"check-if-the-batch-was-sent-to-the-kafka-topic-with-the-defined-key"},"Check if the batch was sent to the Kafka topic with the defined key"),(0,r.kt)("p",null,'Lets check the topic and see if there are \u201cHello world" messages in the\nhello_world topic. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n")),(0,r.kt)("p",null,"You should see the batch of messages in your topic."),(0,r.kt)("h2",{id:"batch-key"},"Batch key"),(0,r.kt)("p",null,"To define a key for your batch like in ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_22_Partition_Keys"},"Defining a partition\nkey")," guide you can wrap the\nreturning value in a\n",(0,r.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/KafkaEvent/#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass. To learn more about defining a partition ke and\n",(0,r.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/KafkaEvent/#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass, please, have a look at ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_22_Partition_Keys"},"Defining a partition\nkey")," guide."),(0,r.kt)("p",null,"Let\u2019s demonstrate that."),(0,r.kt)("p",null,"To define a key, we just need to modify our producing function, like\nthis:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")\n')),(0,r.kt)("p",null,"Now our app looks like this:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")\n')),(0,r.kt)("h2",{id:"check-if-the-batch-was-sent-to-the-kafka-topic"},"Check if the batch was sent to the Kafka topic"),(0,r.kt)("p",null,'Lets check the topic and see if there are \u201cHello world" messages in the\nhello_world topic, containing a defined key. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n")),(0,r.kt)("p",null,"You should see the batch of messages with the defined key in your topic."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/9defa5b7.dbb12a65.js b/assets/js/9defa5b7.dbb12a65.js new file mode 100644 index 0000000..669db8b --- /dev/null +++ b/assets/js/9defa5b7.dbb12a65.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5955],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?a(Object(n),!0).forEach((function(t){o(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):a(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function s(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=r.createContext({}),u=function(e){var t=r.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=u(e.components);return r.createElement(l.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=u(n),k=o,d=p["".concat(l,".").concat(k)]||p[k]||f[k]||a;return n?r.createElement(d,i(i({ref:t},c),{},{components:n})):r.createElement(d,i({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=k;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,i[1]=s;for(var u=2;u<a;u++)i[u]=n[u];return r.createElement.apply(null,i)}return r.createElement.apply(null,n)}k.displayName="MDXCreateElement"},993:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>f,frontMatter:()=>a,metadata:()=>s,toc:()=>u});var r=n(7462),o=(n(7294),n(3905));const a={},i="Intro",s={unversionedId:"guides/Guide_01_Intro",id:"version-0.7.1/guides/Guide_01_Intro",title:"Intro",description:"This tutorial will show you how to use FastKafkaAPI, step by",source:"@site/versioned_docs/version-0.7.1/guides/Guide_01_Intro.md",sourceDirName:"guides",slug:"/guides/Guide_01_Intro",permalink:"/docs/0.7.1/guides/Guide_01_Intro",draft:!1,tags:[],version:"0.7.1",frontMatter:{}},l={},u=[{value:"Installing FastKafkaAPI",id:"installing-fastkafkaapi",level:2},{value:"Preparing a Kafka broker",id:"preparing-a-kafka-broker",level:2},{value:"Running the code",id:"running-the-code",level:2}],c={toc:u},p="wrapper";function f(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"intro"},"Intro"),(0,o.kt)("p",null,"This tutorial will show you how to use ",(0,o.kt)("b",null,"FastKafkaAPI"),", step by\nstep."),(0,o.kt)("p",null,"The goal of FastKafkaAPI is to simplify the use of Apache Kafka in\nPython inspired by FastAPI look and feel."),(0,o.kt)("p",null,"In this Intro tutorial we\u2019ll go trough the basic requirements to run the\ndemos presented in future steps."),(0,o.kt)("h2",{id:"installing-fastkafkaapi"},"Installing FastKafkaAPI"),(0,o.kt)("p",null,"First step is to install FastKafkaAPI"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"$ pip install fastkafka\n")),(0,o.kt)("h2",{id:"preparing-a-kafka-broker"},"Preparing a Kafka broker"),(0,o.kt)("p",null,"Next step is to prepare the Kafka environment, our consumers and\nproducers will need some channel of communication."),(0,o.kt)("p",null,'!!! info "Hey, your first info!"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. \n")),(0,o.kt)("p",null,"To go through the tutorial, we recommend that you use dockerized Kafka\nbrokers, if you have Docker and docker-compose installed the setup\nshould take you no time (if we exclude the container download times)."),(0,o.kt)("p",null,'!!! warning "Listen! This is important."'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},'To be able to setup this configuration you need to have Docker and docker-compose installed\n\nSee here for more info on <a href = \\"https://docs.docker.com/\\" target=\\"_blank\\">Docker</a> and <a href = \\"https://docs.docker.com/compose/install/\\" target=\\"_blank\\">docker compose</a>\n')),(0,o.kt)("p",null,"To setup the recommended environment, first, create a new folder wher\nyou want to save your demo files (e.g.\xa0fastkafka_demo). Inside the new\nfolder create a new YAML file named ",(0,o.kt)("b",null,"kafka_demo.yml")," and copy the\nfollowing configuration into it:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'version: "3"\nservices:\n zookeeper:\n image: wurstmeister/zookeeper\n hostname: zookeeper\n container_name: zookeeper\n networks:\n - fastkafka-network\n ports:\n - "2181:2181"\n - "22:22"\n - "2888:2888"\n - "3888:3888"\n kafka:\n image: wurstmeister/kafka\n container_name: kafka\n ports:\n - "9093:9093"\n environment:\n HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d\' \' -f 2"\n KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"\n KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT\n KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093\n KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093\n KAFKA_INTER_BROKER_LISTENER_NAME: INTER\n KAFKA_CREATE_TOPICS: "hello:1:1"\n volumes:\n - /var/run/docker.sock:/var/run/docker.sock\n depends_on:\n - zookeeper\n healthcheck:\n test: [ "CMD", "kafka-topics.sh", "--list", "--zookeeper", "zookeeper:2181" ]\n interval: 5s\n timeout: 10s\n retries: 5\n networks:\n - fastkafka-network\nnetworks:\n fastkafka-network:\n name: "fastkafka-network"\n')),(0,o.kt)("p",null,"This configuration will start a single instance of Zookeeper, single\ninstance of Kafka broker and create a \u2018hello\u2019 topic (quite enough for a\nstart). To start the configuration, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"$ docker-compose -f kafka_demo.yaml up -d --wait\n")),(0,o.kt)("p",null,"This will start the necessary containers and wait till they report that\nthey are Healthy. After the command finishes, you are good to go to try\nout the FastKafkaAPI capabilities! \ud83c\udf8a"),(0,o.kt)("h2",{id:"running-the-code"},"Running the code"),(0,o.kt)("p",null,"After installing FastKafkaAPI and initialising the Kafka broker you can\nproceed to the \u2018First Steps\u2019 part of the tutorial. There, you will write\nyour first Kafka client and producer apps, run them, and interact with\nthem."),(0,o.kt)("p",null,"You are highly encouraged to follow along the tutorials not just by\nreading trough them but by implementing the code examples in your own\nenvironment. This will not only help you remember the use cases better\nbut also, hopefully, demonstrate to you the ease of use of this library."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/9fc8d1d9.4bcec99f.js b/assets/js/9fc8d1d9.4bcec99f.js new file mode 100644 index 0000000..8149c1c --- /dev/null +++ b/assets/js/9fc8d1d9.4bcec99f.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1673],{3905:(e,a,o)=>{o.d(a,{Zo:()=>c,kt:()=>f});var s=o(7294);function t(e,a,o){return a in e?Object.defineProperty(e,a,{value:o,enumerable:!0,configurable:!0,writable:!0}):e[a]=o,e}function n(e,a){var o=Object.keys(e);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);a&&(s=s.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),o.push.apply(o,s)}return o}function r(e){for(var a=1;a<arguments.length;a++){var o=null!=arguments[a]?arguments[a]:{};a%2?n(Object(o),!0).forEach((function(a){t(e,a,o[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(o)):n(Object(o)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(o,a))}))}return e}function i(e,a){if(null==e)return{};var o,s,t=function(e,a){if(null==e)return{};var o,s,t={},n=Object.keys(e);for(s=0;s<n.length;s++)o=n[s],a.indexOf(o)>=0||(t[o]=e[o]);return t}(e,a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);for(s=0;s<n.length;s++)o=n[s],a.indexOf(o)>=0||Object.prototype.propertyIsEnumerable.call(e,o)&&(t[o]=e[o])}return t}var p=s.createContext({}),k=function(e){var a=s.useContext(p),o=a;return e&&(o="function"==typeof e?e(a):r(r({},a),e)),o},c=function(e){var a=k(e.components);return s.createElement(p.Provider,{value:a},e.children)},_="mdxType",l={inlineCode:"code",wrapper:function(e){var a=e.children;return s.createElement(s.Fragment,{},a)}},m=s.forwardRef((function(e,a){var o=e.components,t=e.mdxType,n=e.originalType,p=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),_=k(o),m=t,f=_["".concat(p,".").concat(m)]||_[m]||l[m]||n;return o?s.createElement(f,r(r({ref:a},c),{},{components:o})):s.createElement(f,r({ref:a},c))}));function f(e,a){var o=arguments,t=a&&a.mdxType;if("string"==typeof e||t){var n=o.length,r=new Array(n);r[0]=m;var i={};for(var p in a)hasOwnProperty.call(a,p)&&(i[p]=a[p]);i.originalType=e,i[_]="string"==typeof e?e:t,r[1]=i;for(var k=2;k<n;k++)r[k]=o[k];return s.createElement.apply(null,r)}return s.createElement.apply(null,o)}m.displayName="MDXCreateElement"},4941:(e,a,o)=>{o.r(a),o.d(a,{assets:()=>p,contentTitle:()=>r,default:()=>l,frontMatter:()=>n,metadata:()=>i,toc:()=>k});var s=o(7462),t=(o(7294),o(3905));const n={},r="Using multiple Kafka clusters",i={unversionedId:"guides/Guide_24_Using_Multiple_Kafka_Clusters",id:"version-0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters",title:"Using multiple Kafka clusters",description:"Ready to take your FastKafka app to the next level? This guide shows you",source:"@site/versioned_docs/version-0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters.md",sourceDirName:"guides",slug:"/guides/Guide_24_Using_Multiple_Kafka_Clusters",permalink:"/docs/0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Encoding and Decoding Kafka Messages with FastKafka",permalink:"/docs/0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"},next:{title:"Using Redpanda to test FastKafka",permalink:"/docs/0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka"}},p={},k=[{value:"Test message",id:"test-message",level:3},{value:"Defining multiple broker configurations",id:"defining-multiple-broker-configurations",level:2},{value:"How it works",id:"how-it-works",level:4},{value:"Testing the application",id:"testing-the-application",level:2},{value:"Running the application",id:"running-the-application",level:2},{value:"Application documentation",id:"application-documentation",level:2},{value:"Examples on how to use multiple broker configurations",id:"examples-on-how-to-use-multiple-broker-configurations",level:2},{value:"Example #1",id:"example-1",level:3},{value:"Testing",id:"testing",level:4},{value:"Example #2",id:"example-2",level:3},{value:"Testing",id:"testing-1",level:4},{value:"Example #3",id:"example-3",level:3},{value:"Testing",id:"testing-2",level:4}],c={toc:k},_="wrapper";function l(e){let{components:a,...o}=e;return(0,t.kt)(_,(0,s.Z)({},c,o,{components:a,mdxType:"MDXLayout"}),(0,t.kt)("h1",{id:"using-multiple-kafka-clusters"},"Using multiple Kafka clusters"),(0,t.kt)("p",null,"Ready to take your FastKafka app to the next level? This guide shows you\nhow to connect to multiple Kafka clusters effortlessly. Consolidate\ntopics and produce messages across clusters like a pro. Unleash the full\npotential of your Kafka-powered app with FastKafka. Let\u2019s dive in and\nelevate your application\u2019s capabilities!"),(0,t.kt)("h3",{id:"test-message"},"Test message"),(0,t.kt)("p",null,"To showcase the functionalities of FastKafka and illustrate the concepts\ndiscussed, we can use a simple test message called ",(0,t.kt)("inlineCode",{parentName:"p"},"TestMsg"),". Here\u2019s the\ndefinition of the ",(0,t.kt)("inlineCode",{parentName:"p"},"TestMsg")," class:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},"class TestMsg(BaseModel):\n msg: str = Field(...)\n")),(0,t.kt)("h2",{id:"defining-multiple-broker-configurations"},"Defining multiple broker configurations"),(0,t.kt)("p",null,"When building a FastKafka application, you may need to consume messages\nfrom multiple Kafka clusters, each with its own set of broker\nconfigurations. FastKafka provides the flexibility to define different\nbroker clusters using the brokers argument in the consumes decorator.\nLet\u2019s explore an example code snippet"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\nkafka_brokers_1 = dict(\n development=dict(url="dev.server_1", port=9092),\n production=dict(url="prod.server_1", port=9092),\n)\nkafka_brokers_2 = dict(\n development=dict(url="dev.server_2", port=9092),\n production=dict(url="prod.server_1", port=9092),\n)\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals_1(msg: TestMsg):\n print(f"Received on s1: {msg=}")\n await to_predictions_1(msg)\n\n\n@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)\nasync def on_preprocessed_signals_2(msg: TestMsg):\n print(f"Received on s2: {msg=}")\n await to_predictions_2(msg)\n \n@app.produces(topic="predictions")\nasync def to_predictions_1(msg: TestMsg) -> TestMsg:\n return msg\n \n@app.produces(topic="predictions", brokers=kafka_brokers_2)\nasync def to_predictions_2(msg: TestMsg) -> TestMsg:\n return msg\n')),(0,t.kt)("p",null,"In this example, the application has two consumes endpoints, both of\nwhich will consume events from ",(0,t.kt)("inlineCode",{parentName:"p"},"preprocessed_signals")," topic.\n",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_1")," will consume events from ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1"),"\nconfiguration and ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_2")," will consume events from\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," configuration. When producing, ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_1")," will\nproduce to ",(0,t.kt)("inlineCode",{parentName:"p"},"predictions")," topic on ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," cluster and\n",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_2")," will produce to ",(0,t.kt)("inlineCode",{parentName:"p"},"predictions")," topic on\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," cluster."),(0,t.kt)("h4",{id:"how-it-works"},"How it works"),(0,t.kt)("p",null,"The ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," configuration represents the primary cluster,\nwhile ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," serves as an alternative cluster specified in\nthe decorator."),(0,t.kt)("p",null,"Using the FastKafka class, the app object is initialized with the\nprimary broker configuration (",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1"),"). By default, the\n",(0,t.kt)("inlineCode",{parentName:"p"},"@app.consumes")," decorator without the brokers argument consumes messages\nfrom the ",(0,t.kt)("inlineCode",{parentName:"p"},"preprocessed_signals")," topic on ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1"),"."),(0,t.kt)("p",null,"To consume messages from a different cluster, the ",(0,t.kt)("inlineCode",{parentName:"p"},"@app.consumes"),"\ndecorator includes the ",(0,t.kt)("inlineCode",{parentName:"p"},"brokers")," argument. This allows explicit\nspecification of the broker cluster in the ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_2"),"\nfunction, enabling consumption from the same topic but using the\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," configuration."),(0,t.kt)("p",null,"The brokers argument can also be used in the @app.produces decorator to\ndefine multiple broker clusters for message production."),(0,t.kt)("p",null,"It\u2019s important to ensure that all broker configurations have the same\nrequired settings as the primary cluster to ensure consistent behavior."),(0,t.kt)("h2",{id:"testing-the-application"},"Testing the application"),(0,t.kt)("p",null,"To test our FastKafka \u2018mirroring\u2019 application, we can use our testing\nframework. Lets take a look how it\u2019s done:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n # Send TestMsg to topic/broker pair on_preprocessed_signals_1 is consuming from\n await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal_s1"))\n # Assert on_preprocessed_signals_1 consumed sent message\n await app.awaited_mocks.on_preprocessed_signals_1.assert_called_with(\n TestMsg(msg="signal_s1"), timeout=5\n )\n # Assert app has produced a prediction\n await tester.mirrors[app.to_predictions_1].assert_called_with(\n TestMsg(msg="signal_s1"), timeout=5\n )\n\n # Send TestMsg to topic/broker pair on_preprocessed_signals_2 is consuming from\n await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal_s2"))\n # Assert on_preprocessed_signals_2 consumed sent message\n await app.awaited_mocks.on_preprocessed_signals_2.assert_called_with(\n TestMsg(msg="signal_s2"), timeout=5\n )\n # Assert app has produced a prediction\n await tester.mirrors[app.to_predictions_2].assert_called_with(\n TestMsg(msg="signal_s2"), timeout=5\n )\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-05-30 10:33:08.720 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n23-05-30 10:33:08.720 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-05-30 10:33:08.721 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-05-30 10:33:08.721 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\n23-05-30 10:33:08.722 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:08.722 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\n23-05-30 10:33:08.723 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:08.741 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\n23-05-30 10:33:08.741 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:08.742 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\n23-05-30 10:33:08.743 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:08.744 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:08.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\n23-05-30 10:33:08.746 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:08.747 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:08.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:08.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-05-30 10:33:08.749 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:33:08.754 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:08.754 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\n23-05-30 10:33:08.755 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:08.755 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:08.755 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:08.756 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-05-30 10:33:08.756 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:33:08.757 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:08.757 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\n23-05-30 10:33:08.758 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:08.758 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:08.759 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:08.759 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-05-30 10:33:08.759 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:33:08.760 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:08.761 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\n23-05-30 10:33:08.762 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:08.762 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:08.762 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:08.763 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-05-30 10:33:08.763 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\nReceived on s1: msg=TestMsg(msg='signal_s1')\nReceived on s2: msg=TestMsg(msg='signal_s2')\n23-05-30 10:33:13.745 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:13.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:13.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:13.747 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:13.747 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:13.748 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:13.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:13.749 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:13.749 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:13.750 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:13.750 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:13.751 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:13.751 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:13.751 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n23-05-30 10:33:13.754 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"The usage of the ",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors")," dictionary allows specifying the\ndesired topic/broker combination for sending the test messages,\nespecially when working with multiple Kafka clusters. This ensures that\nthe data is sent to the appropriate topic/broker based on the consuming\nfunction, and consumed from appropriate topic/broker based on the\nproducing function."),(0,t.kt)("h2",{id:"running-the-application"},"Running the application"),(0,t.kt)("p",null,"You can run your application using ",(0,t.kt)("inlineCode",{parentName:"p"},"fastkafka run")," CLI command in the\nsame way that you would run a single cluster app."),(0,t.kt)("p",null,"To start your app, copy the code above in multi_cluster_example.py and\nrun it by running:"),(0,t.kt)("p",null,"Now we can run the app. Copy the code above in multi_cluster_example.py,\nadjust your server configurations, and run it by running"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=development multi_cluster_example:app\n")),(0,t.kt)("p",null,"In your app logs, you should see your app starting up and your two\nconsumer functions connecting to different kafka clusters."),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"[90735]: 23-05-30 10:33:29.699 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}\n[90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:57647'}\n[90735]: 23-05-30 10:33:29.714 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[90735]: 23-05-30 10:33:29.714 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\n[90735]: 23-05-30 10:33:29.714 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\n[90735]: 23-05-30 10:33:29.714 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[90735]: 23-05-30 10:33:29.718 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[90735]: 23-05-30 10:33:29.718 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\n[90735]: 23-05-30 10:33:29.718 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\n[90735]: 23-05-30 10:33:29.718 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[90735]: 23-05-30 10:33:29.722 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \n[90735]: 23-05-30 10:33:29.723 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \nStarting process cleanup, this may take a few seconds...\n23-05-30 10:33:33.548 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 90735...\n[90735]: 23-05-30 10:33:34.666 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:34.777 [INFO] fastkafka._server: terminate_asyncio_process(): Process 90735 terminated.\n")),(0,t.kt)("h2",{id:"application-documentation"},"Application documentation"),(0,t.kt)("p",null,"At the moment the documentation for multicluster app is not yet\nimplemented, but it is under development and you can expecti it soon!"),(0,t.kt)("h2",{id:"examples-on-how-to-use-multiple-broker-configurations"},"Examples on how to use multiple broker configurations"),(0,t.kt)("h3",{id:"example-1"},"Example ","#","1"),(0,t.kt)("p",null,"In this section, we\u2019ll explore how you can effectively forward topics\nbetween different Kafka clusters, enabling seamless data synchronization\nfor your applications."),(0,t.kt)("p",null,"Imagine having two Kafka clusters, namely ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2"),", each hosting its own set of topics and messages. Now,\nif you want to forward a specific topic (in this case:\n",(0,t.kt)("inlineCode",{parentName:"p"},"preprocessed_signals"),") from kafka_brokers_1 to kafka_brokers_2,\nFastKafka provides an elegant solution."),(0,t.kt)("p",null,"Let\u2019s examine the code snippet that configures our application for topic\nforwarding:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals_original(msg: TestMsg):\n await to_preprocessed_signals_forward(msg)\n\n\n@app.produces(topic="preprocessed_signals", brokers=kafka_brokers_2)\nasync def to_preprocessed_signals_forward(data: TestMsg) -> TestMsg:\n return data\n')),(0,t.kt)("p",null,"Here\u2019s how it works: our FastKafka application is configured to consume\nmessages from ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," and process them in the\n",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_original")," function. We want to forward these\nmessages to ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2"),". To achieve this, we define the\n",(0,t.kt)("inlineCode",{parentName:"p"},"to_preprocessed_signals_forward")," function as a producer, seamlessly\nproducing the processed messages to the preprocessed_signals topic\nwithin the ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," cluster."),(0,t.kt)("h4",{id:"testing"},"Testing"),(0,t.kt)("p",null,"To test our FastKafka forwarding application, we can use our testing\nframework. Let\u2019s take a look at the testing code snippet:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n await tester.mirrors[app.on_preprocessed_signals_original](TestMsg(msg="signal"))\n await tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-05-30 10:33:40.969 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n23-05-30 10:33:40.970 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-05-30 10:33:40.971 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-05-30 10:33:40.972 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n23-05-30 10:33:40.972 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:40.982 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-05-30 10:33:40.982 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:40.983 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:40.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-05-30 10:33:40.984 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:40.985 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:40.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:40.986 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-05-30 10:33:40.987 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:33:40.988 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:40.988 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n23-05-30 10:33:40.989 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:40.989 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:40.990 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:40.990 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-05-30 10:33:40.991 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:33:44.983 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:44.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:44.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:44.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:44.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:44.986 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:44.987 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:44.987 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:44.987 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n23-05-30 10:33:44.988 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"With the help of the ",(0,t.kt)("strong",{parentName:"p"},"Tester")," object, we can simulate and verify the\nbehavior of our FastKafka application. Here\u2019s how it works:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"We create an instance of the ",(0,t.kt)("strong",{parentName:"p"},"Tester")," by passing in our ",(0,t.kt)("em",{parentName:"p"},"app"),"\nobject, which represents our FastKafka application.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Using the ",(0,t.kt)("strong",{parentName:"p"},"tester.mirrors")," dictionary, we can send a message to a\nspecific Kafka broker and topic combination. In this case, we use\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.on_preprocessed_signals_original]"),' to send a\nTestMsg message with the content \u201csignal" to the appropriate Kafka\nbroker and topic.')),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"After sending the message, we can perform assertions on the mirrored\nfunction using\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)"),".\nThis assertion ensures that the mirrored function has been called\nwithin a specified timeout period (in this case, 5 seconds)."))),(0,t.kt)("h3",{id:"example-2"},"Example ","#","2"),(0,t.kt)("p",null,"In this section, we\u2019ll explore how you can effortlessly consume data\nfrom multiple sources, process it, and aggregate the results into a\nsingle topic on a specific cluster."),(0,t.kt)("p",null,"Imagine you have two Kafka clusters: ",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," and\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_2"),", each hosting its own set of topics and messages.\nNow, what if you want to consume data from both clusters, perform some\nprocessing, and produce the results to a single topic on\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1"),"? FastKafka has got you covered!"),(0,t.kt)("p",null,"Let\u2019s take a look at the code snippet that configures our application\nfor aggregating multiple clusters:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals_1(msg: TestMsg):\n print(f"Default: {msg=}")\n await to_predictions(msg)\n\n\n@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)\nasync def on_preprocessed_signals_2(msg: TestMsg):\n print(f"Specified: {msg=}")\n await to_predictions(msg)\n\n\n@app.produces(topic="predictions")\nasync def to_predictions(prediction: TestMsg) -> TestMsg:\n print(f"Sending prediction: {prediction}")\n return [prediction]\n')),(0,t.kt)("p",null,'Here\u2019s the idea: our FastKafka application is set to consume messages\nfrom the topic \u201cpreprocessed_signals" on ',(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," cluster, as\nwell as from the same topic on ",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_2")," cluster. We have two\nconsuming functions, ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_1")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_2"),", that handle the messages from their\nrespective clusters. These functions perform any required processing, in\nthis case, just calling the to_predictions function."),(0,t.kt)("p",null,'The exciting part is that the to_predictions function acts as a\nproducer, sending the processed results to the \u201cpredictions" topic on\n',(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1 cluster"),". By doing so, we effectively aggregate the\ndata from multiple sources into a single topic on a specific cluster."),(0,t.kt)("p",null,"This approach enables you to consume data from multiple Kafka clusters,\nprocess it, and produce the aggregated results to a designated topic.\nWhether you\u2019re generating predictions, performing aggregations, or any\nother form of data processing, FastKafka empowers you to harness the\nfull potential of multiple clusters."),(0,t.kt)("h4",{id:"testing-1"},"Testing"),(0,t.kt)("p",null,"Let\u2019s take a look at the testing code snippet:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal"))\n await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal"))\n await tester.on_predictions.assert_called(timeout=5)\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-05-30 10:33:50.827 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n23-05-30 10:33:50.827 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-05-30 10:33:50.828 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-05-30 10:33:50.829 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-05-30 10:33:50.829 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:50.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-05-30 10:33:50.875 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:50.876 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n23-05-30 10:33:50.876 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:33:50.877 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:50.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-05-30 10:33:50.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:50.879 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:50.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:50.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-05-30 10:33:50.880 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:33:50.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:50.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n23-05-30 10:33:50.881 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:50.882 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:50.882 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:50.883 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-05-30 10:33:50.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:33:50.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:33:50.884 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-05-30 10:33:50.885 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:33:50.885 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:33:50.886 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:33:50.886 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-05-30 10:33:50.886 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\nDefault: msg=TestMsg(msg='signal')\nSending prediction: msg='signal'\nSpecified: msg=TestMsg(msg='signal')\nSending prediction: msg='signal'\n23-05-30 10:33:54.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:54.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:54.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:54.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:54.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:54.881 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:54.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:54.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:54.882 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:33:54.882 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:33:54.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"Here\u2019s how the code above works:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Within an ",(0,t.kt)("inlineCode",{parentName:"p"},"async with")," block, create an instance of the Tester by\npassing in your app object, representing your FastKafka application.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Using the tester.mirrors dictionary, you can send messages to\nspecific Kafka broker and topic combinations. In this case, we use\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.on_preprocessed_signals_1]")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.on_preprocessed_signals_2]"),' to send TestMsg\nmessages with the content \u201csignal" to the corresponding Kafka broker\nand topic combinations.')),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"After sending the messages, you can perform assertions on the\n",(0,t.kt)("strong",{parentName:"p"},"on_predictions")," function using\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.on_predictions.assert_called(timeout=5)"),". This assertion\nensures that the on_predictions function has been called within a\nspecified timeout period (in this case, 5 seconds)."))),(0,t.kt)("h3",{id:"example-3"},"Example ","#","3"),(0,t.kt)("p",null,"In some scenarios, you may need to produce messages to multiple Kafka\nclusters simultaneously. FastKafka simplifies this process by allowing\nyou to configure your application to produce messages to multiple\nclusters effortlessly. Let\u2019s explore how you can achieve this:"),(0,t.kt)("p",null,"Consider the following code snippet that demonstrates producing messages\nto multiple clusters:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals(msg: TestMsg):\n print(f"{msg=}")\n await to_predictions_1(TestMsg(msg="prediction"))\n await to_predictions_2(TestMsg(msg="prediction"))\n\n\n@app.produces(topic="predictions")\nasync def to_predictions_1(prediction: TestMsg) -> TestMsg:\n print(f"Sending prediction to s1: {prediction}")\n return [prediction]\n\n\n@app.produces(topic="predictions", brokers=kafka_brokers_2)\nasync def to_predictions_2(prediction: TestMsg) -> TestMsg:\n print(f"Sending prediction to s2: {prediction}")\n return [prediction]\n')),(0,t.kt)("p",null,"Here\u2019s what you need to know about producing to multiple clusters:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"We define two Kafka broker configurations: ",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," and\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_2"),", representing different clusters with their\nrespective connection details.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"We create an instance of the FastKafka application, specifying\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," as the primary cluster for producing messages.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"The ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals"),' function serves as a consumer,\nhandling incoming messages from the \u201cpreprocessed_signals" topic.\nWithin this function, we invoke two producer functions:\n',(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_1")," and ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_2"),".")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"The ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_1"),' function sends predictions to the\n\u201cpredictions" topic on ',(0,t.kt)("em",{parentName:"p"},"kafka_brokers_1")," cluster.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Additionally, the ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_2"),' function sends the same\npredictions to the \u201cpredictions" topic on ',(0,t.kt)("em",{parentName:"p"},"kafka_brokers_2")," cluster.\nThis allows for producing the same data to multiple clusters\nsimultaneously."))),(0,t.kt)("p",null,"By utilizing this approach, you can seamlessly produce messages to\nmultiple Kafka clusters, enabling you to distribute data across\ndifferent environments or leverage the strengths of various clusters."),(0,t.kt)("p",null,"Feel free to customize the producer functions as per your requirements,\nperforming any necessary data transformations or enrichment before\nsending the predictions."),(0,t.kt)("p",null,"With FastKafka, producing to multiple clusters becomes a breeze,\nempowering you to harness the capabilities of multiple environments\neffortlessly."),(0,t.kt)("h4",{id:"testing-2"},"Testing"),(0,t.kt)("p",null,"Let\u2019s take a look at the testing code snippet:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n await tester.to_preprocessed_signals(TestMsg(msg="signal"))\n await tester.mirrors[to_predictions_1].assert_called(timeout=5)\n await tester.mirrors[to_predictions_2].assert_called(timeout=5)\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-05-30 10:34:00.033 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n23-05-30 10:34:00.034 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-05-30 10:34:00.035 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-05-30 10:34:00.036 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-05-30 10:34:00.037 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:34:00.038 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n23-05-30 10:34:00.038 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:34:00.052 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-05-30 10:34:00.053 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-05-30 10:34:00.054 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:34:00.055 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-05-30 10:34:00.055 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:34:00.056 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:34:00.056 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:34:00.057 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-05-30 10:34:00.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:34:00.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:34:00.058 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-05-30 10:34:00.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:34:00.059 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:34:00.059 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:34:00.062 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-05-30 10:34:00.062 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-05-30 10:34:00.063 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-05-30 10:34:00.063 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n23-05-30 10:34:00.064 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-05-30 10:34:00.064 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-05-30 10:34:00.065 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-05-30 10:34:00.065 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-05-30 10:34:00.065 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\nmsg=TestMsg(msg='signal')\nSending prediction to s1: msg='prediction'\nSending prediction to s2: msg='prediction'\n23-05-30 10:34:04.055 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:34:04.055 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:34:04.056 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:34:04.056 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:34:04.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:34:04.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:34:04.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:34:04.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-05-30 10:34:04.058 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-05-30 10:34:04.059 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-30 10:34:04.059 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"Here\u2019s how you can perform the necessary tests:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Within an async with block, create an instance of the ",(0,t.kt)("strong",{parentName:"p"},"Tester")," by\npassing in your app object, representing your FastKafka application.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Using the ",(0,t.kt)("inlineCode",{parentName:"p"},"tester.to_preprocessed_signals"),' method, you can send a\nTestMsg message with the content \u201csignal".')),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"After sending the message, you can perform assertions on the\nto_predictions_1 and to_predictions_2 functions using\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[to_predictions_1].assert_called(timeout=5)")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[to_predictions_2].assert_called(timeout=5)"),". These\nassertions ensure that the respective producer functions have\nproduced data to their respective topic/broker combinations."))),(0,t.kt)("p",null,"By employing this testing approach, you can verify that the producing\nfunctions correctly send messages to their respective clusters. The\ntesting framework provided by FastKafka enables you to ensure the\naccuracy and reliability of your application\u2019s producing logic."))}l.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/9fda8563.2e6d9fba.js b/assets/js/9fda8563.2e6d9fba.js new file mode 100644 index 0000000..8b14cd0 --- /dev/null +++ b/assets/js/9fda8563.2e6d9fba.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7055],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){a(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function s(e,t){if(null==e)return{};var n,r,a=function(e,t){if(null==e)return{};var n,r,a={},o=Object.keys(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var c=r.createContext({}),l=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=l(e.components);return r.createElement(c.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,c=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),p=l(n),k=a,m=p["".concat(c,".").concat(k)]||p[k]||f[k]||o;return n?r.createElement(m,i(i({ref:t},u),{},{components:n})):r.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,i=new Array(o);i[0]=k;var s={};for(var c in t)hasOwnProperty.call(t,c)&&(s[c]=t[c]);s.originalType=e,s[p]="string"==typeof e?e:a,i[1]=s;for(var l=2;l<o;l++)i[l]=n[l];return r.createElement.apply(null,i)}return r.createElement.apply(null,n)}k.displayName="MDXCreateElement"},7279:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>f,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var r=n(7462),a=(n(7294),n(3905));const o={},i=void 0,s={unversionedId:"api/fastkafka/executors/DynamicTaskExecutor",id:"version-0.6.0/api/fastkafka/executors/DynamicTaskExecutor",title:"DynamicTaskExecutor",description:"fastkafka.executors.DynamicTaskExecutor {fastkafka.executors.DynamicTaskExecutor}",source:"@site/versioned_docs/version-0.6.0/api/fastkafka/executors/DynamicTaskExecutor.md",sourceDirName:"api/fastkafka/executors",slug:"/api/fastkafka/executors/DynamicTaskExecutor",permalink:"/docs/0.6.0/api/fastkafka/executors/DynamicTaskExecutor",draft:!1,tags:[],version:"0.6.0",frontMatter:{}},c={},l=[{value:"<code>fastkafka.executors.DynamicTaskExecutor</code>",id:"fastkafka.executors.DynamicTaskExecutor",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>run</code>",id:"run",level:3}],u={toc:l},p="wrapper";function f(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,r.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"fastkafka.executors.DynamicTaskExecutor"},(0,a.kt)("inlineCode",{parentName:"h2"},"fastkafka.executors.DynamicTaskExecutor")),(0,a.kt)("p",null,"A class that implements a dynamic task executor for processing consumer records."),(0,a.kt)("p",null,"The DynamicTaskExecutor class extends the StreamExecutor class and provides functionality\nfor running a tasks in parallel using asyncio.Task."),(0,a.kt)("h3",{id:"init"},(0,a.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000, size: int = 100000) -> None")),(0,a.kt)("p",null,"Create an instance of DynamicTaskExecutor"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"throw_exceptions"),": Flag indicating whether exceptions should be thrown ot logged.\nDefaults to False."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"max_buffer_size"),": Maximum buffer size for the memory object stream.\nDefaults to 100_000."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"size"),": Size of the task pool. Defaults to 100_000.")),(0,a.kt)("h3",{id:"run"},(0,a.kt)("inlineCode",{parentName:"h3"},"run")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None")),(0,a.kt)("p",null,"Runs the dynamic task executor."),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"is_shutting_down_f"),": Function to check if the executor is shutting down."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"generator"),": Generator function for retrieving consumer records."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"processor"),": Processor function for processing consumer records.")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Returns"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"None")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/a03cde8f.cdce6d67.js b/assets/js/a03cde8f.cdce6d67.js new file mode 100644 index 0000000..06cafb7 --- /dev/null +++ b/assets/js/a03cde8f.cdce6d67.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4789],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?a(Object(n),!0).forEach((function(t){o(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):a(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function s(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=r.createContext({}),u=function(e){var t=r.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=u(e.components);return r.createElement(l.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=u(n),k=o,d=p["".concat(l,".").concat(k)]||p[k]||f[k]||a;return n?r.createElement(d,i(i({ref:t},c),{},{components:n})):r.createElement(d,i({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=k;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,i[1]=s;for(var u=2;u<a;u++)i[u]=n[u];return r.createElement.apply(null,i)}return r.createElement.apply(null,n)}k.displayName="MDXCreateElement"},7255:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>f,frontMatter:()=>a,metadata:()=>s,toc:()=>u});var r=n(7462),o=(n(7294),n(3905));const a={},i="Intro",s={unversionedId:"guides/Guide_01_Intro",id:"version-0.5.0/guides/Guide_01_Intro",title:"Intro",description:"This tutorial will show you how to use FastKafkaAPI, step by",source:"@site/versioned_docs/version-0.5.0/guides/Guide_01_Intro.md",sourceDirName:"guides",slug:"/guides/Guide_01_Intro",permalink:"/docs/0.5.0/guides/Guide_01_Intro",draft:!1,tags:[],version:"0.5.0",frontMatter:{}},l={},u=[{value:"Installing FastKafkaAPI",id:"installing-fastkafkaapi",level:2},{value:"Preparing a Kafka broker",id:"preparing-a-kafka-broker",level:2},{value:"Running the code",id:"running-the-code",level:2}],c={toc:u},p="wrapper";function f(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"intro"},"Intro"),(0,o.kt)("p",null,"This tutorial will show you how to use ",(0,o.kt)("b",null,"FastKafkaAPI"),", step by\nstep."),(0,o.kt)("p",null,"The goal of FastKafkaAPI is to simplify the use of Apache Kafka in\nPython inspired by FastAPI look and feel."),(0,o.kt)("p",null,"In this Intro tutorial we\u2019ll go trough the basic requirements to run the\ndemos presented in future steps."),(0,o.kt)("h2",{id:"installing-fastkafkaapi"},"Installing FastKafkaAPI"),(0,o.kt)("p",null,"First step is to install FastKafkaAPI"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"$ pip install fastkafka\n")),(0,o.kt)("h2",{id:"preparing-a-kafka-broker"},"Preparing a Kafka broker"),(0,o.kt)("p",null,"Next step is to prepare the Kafka environment, our consumers and\nproducers will need some channel of communication."),(0,o.kt)("p",null,'!!! info "Hey, your first info!"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. \n")),(0,o.kt)("p",null,"To go through the tutorial, we recommend that you use dockerized Kafka\nbrokers, if you have Docker and docker-compose installed the setup\nshould take you no time (if we exclude the container download times)."),(0,o.kt)("p",null,'!!! warning "Listen! This is important."'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},'To be able to setup this configuration you need to have Docker and docker-compose installed\n\nSee here for more info on <a href = \\"https://docs.docker.com/\\" target=\\"_blank\\">Docker</a> and <a href = \\"https://docs.docker.com/compose/install/\\" target=\\"_blank\\">docker compose</a>\n')),(0,o.kt)("p",null,"To setup the recommended environment, first, create a new folder wher\nyou want to save your demo files (e.g.\xa0fastkafka_demo). Inside the new\nfolder create a new YAML file named ",(0,o.kt)("b",null,"kafka_demo.yml")," and copy the\nfollowing configuration into it:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'version: "3"\nservices:\n zookeeper:\n image: wurstmeister/zookeeper\n hostname: zookeeper\n container_name: zookeeper\n networks:\n - fastkafka-network\n ports:\n - "2181:2181"\n - "22:22"\n - "2888:2888"\n - "3888:3888"\n kafka:\n image: wurstmeister/kafka\n container_name: kafka\n ports:\n - "9093:9093"\n environment:\n HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d\' \' -f 2"\n KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"\n KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT\n KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093\n KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093\n KAFKA_INTER_BROKER_LISTENER_NAME: INTER\n KAFKA_CREATE_TOPICS: "hello:1:1"\n volumes:\n - /var/run/docker.sock:/var/run/docker.sock\n depends_on:\n - zookeeper\n healthcheck:\n test: [ "CMD", "kafka-topics.sh", "--list", "--zookeeper", "zookeeper:2181" ]\n interval: 5s\n timeout: 10s\n retries: 5\n networks:\n - fastkafka-network\nnetworks:\n fastkafka-network:\n name: "fastkafka-network"\n')),(0,o.kt)("p",null,"This configuration will start a single instance of Zookeeper, single\ninstance of Kafka broker and create a \u2018hello\u2019 topic (quite enough for a\nstart). To start the configuration, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"$ docker-compose -f kafka_demo.yaml up -d --wait\n")),(0,o.kt)("p",null,"This will start the necessary containers and wait till they report that\nthey are Healthy. After the command finishes, you are good to go to try\nout the FastKafkaAPI capabilities! \ud83c\udf8a"),(0,o.kt)("h2",{id:"running-the-code"},"Running the code"),(0,o.kt)("p",null,"After installing FastKafkaAPI and initialising the Kafka broker you can\nproceed to the \u2018First Steps\u2019 part of the tutorial. There, you will write\nyour first Kafka client and producer apps, run them, and interact with\nthem."),(0,o.kt)("p",null,"You are highly encouraged to follow along the tutorials not just by\nreading trough them but by implementing the code examples in your own\nenvironment. This will not only help you remember the use cases better\nbut also, hopefully, demonstrate to you the ease of use of this library."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/a07fb1cb.ef0f7575.js b/assets/js/a07fb1cb.ef0f7575.js new file mode 100644 index 0000000..2dc366d --- /dev/null +++ b/assets/js/a07fb1cb.ef0f7575.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5430],{3905:(e,t,a)=>{a.d(t,{Zo:()=>l,kt:()=>k});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function c(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function i(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),s=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):c(c({},t),e)),a},l=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},d="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,l=i(e,["components","mdxType","originalType","parentName"]),d=s(a),u=r,k=d["".concat(p,".").concat(u)]||d[u]||f[u]||o;return a?n.createElement(k,c(c({ref:t},l),{},{components:a})):n.createElement(k,c({ref:t},l))}));function k(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,c=new Array(o);c[0]=u;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[d]="string"==typeof e?e:r,c[1]=i;for(var s=2;s<o;s++)c[s]=a[s];return n.createElement.apply(null,c)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},8510:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>c,default:()=>f,frontMatter:()=>o,metadata:()=>i,toc:()=>s});var n=a(7462),r=(a(7294),a(3905));const o={},c=void 0,i={unversionedId:"api/fastkafka/encoder/avsc_to_pydantic",id:"version-0.6.0/api/fastkafka/encoder/avsc_to_pydantic",title:"avsc_to_pydantic",description:"fastkafka.encoder.avsctopydantic {fastkafka.encoder.avsctopydantic}",source:"@site/versioned_docs/version-0.6.0/api/fastkafka/encoder/avsc_to_pydantic.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avsc_to_pydantic",permalink:"/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avro_encoder",permalink:"/docs/0.6.0/api/fastkafka/encoder/avro_encoder"},next:{title:"json_decoder",permalink:"/docs/0.6.0/api/fastkafka/encoder/json_decoder"}},p={},s=[{value:"<code>fastkafka.encoder.avsc_to_pydantic</code>",id:"fastkafka.encoder.avsc_to_pydantic",level:2},{value:"<code>avsc_to_pydantic</code>",id:"avsc_to_pydantic",level:3}],l={toc:s},d="wrapper";function f(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},l,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.encoder.avsc_to_pydantic"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.avsc_to_pydantic")),(0,r.kt)("h3",{id:"avsc_to_pydantic"},(0,r.kt)("inlineCode",{parentName:"h3"},"avsc_to_pydantic")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def avsc_to_pydantic(schema: Dict[str, Any]) -> ModelMetaclass")),(0,r.kt)("p",null,"Generate pydantic model from given Avro Schema"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"schema"),": Avro schema in dictionary format")),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Pydantic model class built from given avro schema")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/a17dbf83.57177a4c.js b/assets/js/a17dbf83.57177a4c.js new file mode 100644 index 0000000..03d3e3d --- /dev/null +++ b/assets/js/a17dbf83.57177a4c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[162],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){a(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function s(e,t){if(null==e)return{};var n,r,a=function(e,t){if(null==e)return{};var n,r,a={},o=Object.keys(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var c=r.createContext({}),l=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=l(e.components);return r.createElement(c.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,c=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),p=l(n),k=a,m=p["".concat(c,".").concat(k)]||p[k]||f[k]||o;return n?r.createElement(m,i(i({ref:t},u),{},{components:n})):r.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,i=new Array(o);i[0]=k;var s={};for(var c in t)hasOwnProperty.call(t,c)&&(s[c]=t[c]);s.originalType=e,s[p]="string"==typeof e?e:a,i[1]=s;for(var l=2;l<o;l++)i[l]=n[l];return r.createElement.apply(null,i)}return r.createElement.apply(null,n)}k.displayName="MDXCreateElement"},9044:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>f,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var r=n(7462),a=(n(7294),n(3905));const o={},i=void 0,s={unversionedId:"api/fastkafka/executors/DynamicTaskExecutor",id:"version-0.7.0/api/fastkafka/executors/DynamicTaskExecutor",title:"DynamicTaskExecutor",description:"fastkafka.executors.DynamicTaskExecutor {fastkafka.executors.DynamicTaskExecutor}",source:"@site/versioned_docs/version-0.7.0/api/fastkafka/executors/DynamicTaskExecutor.md",sourceDirName:"api/fastkafka/executors",slug:"/api/fastkafka/executors/DynamicTaskExecutor",permalink:"/docs/0.7.0/api/fastkafka/executors/DynamicTaskExecutor",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"json_encoder",permalink:"/docs/0.7.0/api/fastkafka/encoder/json_encoder"},next:{title:"SequentialExecutor",permalink:"/docs/0.7.0/api/fastkafka/executors/SequentialExecutor"}},c={},l=[{value:"<code>fastkafka.executors.DynamicTaskExecutor</code>",id:"fastkafka.executors.DynamicTaskExecutor",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>run</code>",id:"run",level:3}],u={toc:l},p="wrapper";function f(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,r.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"fastkafka.executors.DynamicTaskExecutor"},(0,a.kt)("inlineCode",{parentName:"h2"},"fastkafka.executors.DynamicTaskExecutor")),(0,a.kt)("p",null,"A class that implements a dynamic task executor for processing consumer records."),(0,a.kt)("p",null,"The DynamicTaskExecutor class extends the StreamExecutor class and provides functionality\nfor running a tasks in parallel using asyncio.Task."),(0,a.kt)("h3",{id:"init"},(0,a.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000, size: int = 100000) -> None")),(0,a.kt)("p",null,"Create an instance of DynamicTaskExecutor"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"throw_exceptions"),": Flag indicating whether exceptions should be thrown ot logged.\nDefaults to False."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"max_buffer_size"),": Maximum buffer size for the memory object stream.\nDefaults to 100_000."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"size"),": Size of the task pool. Defaults to 100_000.")),(0,a.kt)("h3",{id:"run"},(0,a.kt)("inlineCode",{parentName:"h3"},"run")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None")),(0,a.kt)("p",null,"Runs the dynamic task executor."),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"is_shutting_down_f"),": Function to check if the executor is shutting down."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"generator"),": Generator function for retrieving consumer records."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"processor"),": Processor function for processing consumer records.")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Returns"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"None")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/a34ed3b2.a8b3a7f3.js b/assets/js/a34ed3b2.a8b3a7f3.js new file mode 100644 index 0000000..a6efe22 --- /dev/null +++ b/assets/js/a34ed3b2.a8b3a7f3.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[2777],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>u});var n=a(7294);function i(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){i(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,i=function(e,t){if(null==e)return{};var a,n,i={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(i[a]=e[a]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var p=n.createContext({}),s=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},c=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},d="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=s(a),m=i,u=d["".concat(p,".").concat(m)]||d[m]||k[m]||r;return a?n.createElement(u,o(o({ref:t},c),{},{components:a})):n.createElement(u,o({ref:t},c))}));function u(e,t){var a=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=a.length,o=new Array(r);o[0]=m;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[d]="string"==typeof e?e:i,o[1]=l;for(var s=2;s<r;s++)o[s]=a[s];return n.createElement.apply(null,o)}return n.createElement.apply(null,a)}m.displayName="MDXCreateElement"},3749:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>k,frontMatter:()=>r,metadata:()=>l,toc:()=>s});var n=a(7462),i=(a(7294),a(3905));const r={},o="Deploying FastKafka using Docker",l={unversionedId:"guides/Guide_30_Using_docker_to_deploy_fastkafka",id:"version-0.8.0/guides/Guide_30_Using_docker_to_deploy_fastkafka",title:"Deploying FastKafka using Docker",description:"Building a Docker Image",source:"@site/versioned_docs/version-0.8.0/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",sourceDirName:"guides",slug:"/guides/Guide_30_Using_docker_to_deploy_fastkafka",permalink:"/docs/guides/Guide_30_Using_docker_to_deploy_fastkafka",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Deploy FastKafka docs to GitHub Pages",permalink:"/docs/guides/Guide_04_Github_Actions_Workflow"},next:{title:"Using FastAPI to Run FastKafka Application",permalink:"/docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application"}},p={},s=[{value:"Building a Docker Image",id:"building-a-docker-image",level:2},{value:"Creating FastKafka Code",id:"creating-fastkafka-code",level:3},{value:"Creating requirements.txt file",id:"creating-requirementstxt-file",level:3},{value:"Creating Dockerfile",id:"creating-dockerfile",level:3},{value:"Build the Docker Image",id:"build-the-docker-image",level:3},{value:"Start the Docker Container",id:"start-the-docker-container",level:3},{value:"Additional Security",id:"additional-security",level:2},{value:"Example repo",id:"example-repo",level:2}],c={toc:s},d="wrapper";function k(e){let{components:t,...a}=e;return(0,i.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"deploying-fastkafka-using-docker"},"Deploying FastKafka using Docker"),(0,i.kt)("h2",{id:"building-a-docker-image"},"Building a Docker Image"),(0,i.kt)("p",null,"To build a Docker image for a FastKafka project, we need the following\nitems:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A library that is built using FastKafka."),(0,i.kt)("li",{parentName:"ol"},"A file in which the requirements are specified. This could be a\nrequirements.txt file, a setup.py file, or even a wheel file."),(0,i.kt)("li",{parentName:"ol"},"A Dockerfile to build an image that will include the two files\nmentioned above.")),(0,i.kt)("h3",{id:"creating-fastkafka-code"},"Creating FastKafka Code"),(0,i.kt)("p",null,"Let\u2019s create a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\napplication and write it to the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file based on the\n",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h3",{id:"creating-requirementstxt-file"},"Creating requirements.txt file"),(0,i.kt)("p",null,"The above code only requires FastKafka. So, we will add only that to the\n",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file, but you can add additional requirements to it\nas well."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"fastkafka>=0.3.0\n")),(0,i.kt)("p",null,"Here we are using ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," to store the project\u2019s\ndependencies. However, other methods like ",(0,i.kt)("inlineCode",{parentName:"p"},"setup.py"),", ",(0,i.kt)("inlineCode",{parentName:"p"},"pipenv"),", and\n",(0,i.kt)("inlineCode",{parentName:"p"},"wheel")," files can also be used. ",(0,i.kt)("inlineCode",{parentName:"p"},"setup.py")," is commonly used for\npackaging and distributing Python modules, while ",(0,i.kt)("inlineCode",{parentName:"p"},"pipenv")," is a tool used\nfor managing virtual environments and package dependencies. ",(0,i.kt)("inlineCode",{parentName:"p"},"wheel"),"\nfiles are built distributions of Python packages that can be installed\nwith pip."),(0,i.kt)("h3",{id:"creating-dockerfile"},"Creating Dockerfile"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-dockerfile"},'# (1)\nFROM python:3.9-slim-bullseye\n# (2)\nWORKDIR /project\n# (3)\nCOPY application.py requirements.txt /project/\n# (4)\nRUN pip install --no-cache-dir --upgrade -r /project/requirements.txt\n# (5)\nCMD ["fastkafka", "run", "--num-workers", "2", "--kafka-broker", "production", "application:kafka_app"]\n')),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Start from the official Python base image.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Set the current working directory to ",(0,i.kt)("inlineCode",{parentName:"p"},"/project"),"."),(0,i.kt)("p",{parentName:"li"},"This is where we\u2019ll put the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file and the\n",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Copy the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file and ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file inside\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"/project")," directory.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Install the package dependencies in the requirements file."),(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"--no-cache-dir")," option tells ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," to not save the downloaded\npackages locally, as that is only if ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," was going to be run again\nto install the same packages, but that\u2019s not the case when working\nwith containers."),(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"--upgrade")," option tells ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," to upgrade the packages if they\nare already installed.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Set the ",(0,i.kt)("strong",{parentName:"p"},"command")," to run the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command."),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"p"},"CMD")," takes a list of strings, each of these strings is what you\nwould type in the command line separated by spaces."),(0,i.kt)("p",{parentName:"li"},"This command will be run from the ",(0,i.kt)("strong",{parentName:"p"},"current working directory"),", the\nsame ",(0,i.kt)("inlineCode",{parentName:"p"},"/project")," directory you set above with ",(0,i.kt)("inlineCode",{parentName:"p"},"WORKDIR /project"),"."),(0,i.kt)("p",{parentName:"li"},"We supply additional parameters ",(0,i.kt)("inlineCode",{parentName:"p"},"--num-workers")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"--kafka-broker"),"\nfor the run command. Finally, we specify the location of our\nFastKafka application as a command argument."),(0,i.kt)("p",{parentName:"li"},"To learn more about ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command please check the ",(0,i.kt)("a",{parentName:"p",href:"../../cli/fastkafka/#fastkafka-run"},"CLI\ndocs"),"."))),(0,i.kt)("h3",{id:"build-the-docker-image"},"Build the Docker Image"),(0,i.kt)("p",null,"Now that all the files are in place, let\u2019s build the container image."),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Go to the project directory (where your ",(0,i.kt)("inlineCode",{parentName:"p"},"Dockerfile")," is, containing\nyour ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file).")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Run the following command to build the image:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"docker build -t fastkafka_project_image .\n")),(0,i.kt)("p",{parentName:"li"},"This command will create a docker image with the name\n",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," and the ",(0,i.kt)("inlineCode",{parentName:"p"},"latest")," tag."))),(0,i.kt)("p",null,"That\u2019s it! You have now built a docker image for your FastKafka project."),(0,i.kt)("h3",{id:"start-the-docker-container"},"Start the Docker Container"),(0,i.kt)("p",null,"Run a container based on the built image:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"docker run -d --name fastkafka_project_container fastkafka_project_image\n")),(0,i.kt)("h2",{id:"additional-security"},"Additional Security"),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"Trivy")," is an open-source tool that scans Docker images for\nvulnerabilities. It can be integrated into your CI/CD pipeline to ensure\nthat your images are secure and free from known vulnerabilities. Here\u2019s\nhow you can use ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," to scan your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image"),":"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Install ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," on your local machine by following the instructions\nprovided in the ",(0,i.kt)("a",{parentName:"p",href:"https://aquasecurity.github.io/trivy/latest/getting-started/installation/"},"official ",(0,i.kt)("inlineCode",{parentName:"a"},"trivy"),"\ndocumentation"),".")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Run the following command to scan your fastkafka_project_image:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"trivy image fastkafka_project_image\n")),(0,i.kt)("p",{parentName:"li"},"This command will scan your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," for any\nvulnerabilities and provide you with a report of its findings.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Fix any vulnerabilities identified by ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy"),". You can do this by\nupdating the vulnerable package to a more secure version or by using\na different package altogether.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Rebuild your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," and repeat steps 2 and 3\nuntil ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," reports no vulnerabilities."))),(0,i.kt)("p",null,"By using ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," to scan your Docker images, you can ensure that your\ncontainers are secure and free from known vulnerabilities."),(0,i.kt)("h2",{id:"example-repo"},"Example repo"),(0,i.kt)("p",null,"A\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nbased library which uses above mentioned Dockerfile to build a docker\nimage can be found\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_project/"},"here")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/a4055066.ba2a7e48.js b/assets/js/a4055066.ba2a7e48.js new file mode 100644 index 0000000..dbe3120 --- /dev/null +++ b/assets/js/a4055066.ba2a7e48.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4018],{3905:(e,t,a)=>{a.d(t,{Zo:()=>k,kt:()=>d});var r=a(7294);function n(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,r)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){n(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,r,n=function(e,t){if(null==e)return{};var a,r,n={},o=Object.keys(e);for(r=0;r<o.length;r++)a=o[r],t.indexOf(a)>=0||(n[a]=e[a]);return n}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)a=o[r],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(n[a]=e[a])}return n}var s=r.createContext({}),p=function(e){var t=r.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},k=function(e){var t=p(e.components);return r.createElement(s.Provider,{value:t},e.children)},c="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},u=r.forwardRef((function(e,t){var a=e.components,n=e.mdxType,o=e.originalType,s=e.parentName,k=l(e,["components","mdxType","originalType","parentName"]),c=p(a),u=n,d=c["".concat(s,".").concat(u)]||c[u]||f[u]||o;return a?r.createElement(d,i(i({ref:t},k),{},{components:a})):r.createElement(d,i({ref:t},k))}));function d(e,t){var a=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var o=a.length,i=new Array(o);i[0]=u;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:n,i[1]=l;for(var p=2;p<o;p++)i[p]=a[p];return r.createElement.apply(null,i)}return r.createElement.apply(null,a)}u.displayName="MDXCreateElement"},9328:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>f,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var r=a(7462),n=(a(7294),a(3905));const o={},i=void 0,l={unversionedId:"api/fastkafka/testing/ApacheKafkaBroker",id:"version-0.7.1/api/fastkafka/testing/ApacheKafkaBroker",title:"ApacheKafkaBroker",description:"fastkafka.testing.ApacheKafkaBroker {fastkafka.testing.ApacheKafkaBroker}",source:"@site/versioned_docs/version-0.7.1/api/fastkafka/testing/ApacheKafkaBroker.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/ApacheKafkaBroker",permalink:"/docs/0.7.1/api/fastkafka/testing/ApacheKafkaBroker",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"SequentialExecutor",permalink:"/docs/0.7.1/api/fastkafka/executors/SequentialExecutor"},next:{title:"LocalRedpandaBroker",permalink:"/docs/0.7.1/api/fastkafka/testing/LocalRedpandaBroker"}},s={},p=[{value:"<code>fastkafka.testing.ApacheKafkaBroker</code>",id:"fastkafka.testing.ApacheKafkaBroker",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>get_service_config_string</code>",id:"get_service_config_string",level:3},{value:"<code>start</code>",id:"start",level:3},{value:"<code>stop</code>",id:"stop",level:3}],k={toc:p},c="wrapper";function f(e){let{components:t,...a}=e;return(0,n.kt)(c,(0,r.Z)({},k,a,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"fastkafka.testing.ApacheKafkaBroker"},(0,n.kt)("inlineCode",{parentName:"h2"},"fastkafka.testing.ApacheKafkaBroker")),(0,n.kt)("p",null,"ApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing."),(0,n.kt)("h3",{id:"init"},(0,n.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None")),(0,n.kt)("p",null,"Initialises the ApacheKafkaBroker object"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"zookeeper_port"),": Port for clients (Kafka brokes) to connect"),(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect")),(0,n.kt)("h3",{id:"get_service_config_string"},(0,n.kt)("inlineCode",{parentName:"h3"},"get_service_config_string")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def get_service_config_string(self: fastkafka.testing.ApacheKafkaBroker, service: str, data_dir: pathlib.Path) -> str")),(0,n.kt)("p",null,"Gets the configuration string for a service."),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"service"),': Name of the service ("kafka" or "zookeeper").'),(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the service will save data.")),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"The service configuration string.")),(0,n.kt)("h3",{id:"start"},(0,n.kt)("inlineCode",{parentName:"h3"},"start")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def start(self: fastkafka.testing.ApacheKafkaBroker) -> str")),(0,n.kt)("p",null,"Starts a local Kafka broker and ZooKeeper instance synchronously."),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"The Kafka broker bootstrap server address in string format: host:port.")),(0,n.kt)("h3",{id:"stop"},(0,n.kt)("inlineCode",{parentName:"h3"},"stop")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def stop(self: fastkafka.testing.ApacheKafkaBroker) -> None")),(0,n.kt)("p",null,"Stops a local kafka broker and zookeeper instance synchronously"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"None")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/a4cbee7f.d067a9ae.js b/assets/js/a4cbee7f.d067a9ae.js new file mode 100644 index 0000000..1b058bb --- /dev/null +++ b/assets/js/a4cbee7f.d067a9ae.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5144],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>u});var n=a(7294);function i(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){i(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,i=function(e,t){if(null==e)return{};var a,n,i={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(i[a]=e[a]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var p=n.createContext({}),s=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},c=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},d="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=s(a),m=i,u=d["".concat(p,".").concat(m)]||d[m]||k[m]||r;return a?n.createElement(u,o(o({ref:t},c),{},{components:a})):n.createElement(u,o({ref:t},c))}));function u(e,t){var a=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=a.length,o=new Array(r);o[0]=m;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[d]="string"==typeof e?e:i,o[1]=l;for(var s=2;s<r;s++)o[s]=a[s];return n.createElement.apply(null,o)}return n.createElement.apply(null,a)}m.displayName="MDXCreateElement"},501:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>k,frontMatter:()=>r,metadata:()=>l,toc:()=>s});var n=a(7462),i=(a(7294),a(3905));const r={},o="Deploying FastKafka using Docker",l={unversionedId:"guides/Guide_30_Using_docker_to_deploy_fastkafka",id:"version-0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka",title:"Deploying FastKafka using Docker",description:"Building a Docker Image",source:"@site/versioned_docs/version-0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",sourceDirName:"guides",slug:"/guides/Guide_30_Using_docker_to_deploy_fastkafka",permalink:"/docs/0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Deploy FastKafka docs to GitHub Pages",permalink:"/docs/0.5.0/guides/Guide_04_Github_Actions_Workflow"},next:{title:"Benchmarking FastKafka app",permalink:"/docs/0.5.0/guides/Guide_06_Benchmarking_FastKafka"}},p={},s=[{value:"Building a Docker Image",id:"building-a-docker-image",level:2},{value:"Creating FastKafka Code",id:"creating-fastkafka-code",level:3},{value:"Creating requirements.txt file",id:"creating-requirementstxt-file",level:3},{value:"Creating Dockerfile",id:"creating-dockerfile",level:3},{value:"Build the Docker Image",id:"build-the-docker-image",level:3},{value:"Start the Docker Container",id:"start-the-docker-container",level:3},{value:"Additional Security",id:"additional-security",level:2},{value:"Example repo",id:"example-repo",level:2}],c={toc:s},d="wrapper";function k(e){let{components:t,...a}=e;return(0,i.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"deploying-fastkafka-using-docker"},"Deploying FastKafka using Docker"),(0,i.kt)("h2",{id:"building-a-docker-image"},"Building a Docker Image"),(0,i.kt)("p",null,"To build a Docker image for a FastKafka project, we need the following\nitems:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A library that is built using FastKafka."),(0,i.kt)("li",{parentName:"ol"},"A file in which the requirements are specified. This could be a\nrequirements.txt file, a setup.py file, or even a wheel file."),(0,i.kt)("li",{parentName:"ol"},"A Dockerfile to build an image that will include the two files\nmentioned above.")),(0,i.kt)("h3",{id:"creating-fastkafka-code"},"Creating FastKafka Code"),(0,i.kt)("p",null,"Let\u2019s create a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\napplication and write it to the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file based on the\n",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h3",{id:"creating-requirementstxt-file"},"Creating requirements.txt file"),(0,i.kt)("p",null,"The above code only requires ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka"),". So, we will add only\n",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka")," to the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file, but you can add additional\nrequirements to it as well."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"fastkafka>=0.3.0\n")),(0,i.kt)("p",null,"Here we are using ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," to store the project\u2019s\ndependencies. However, other methods like ",(0,i.kt)("inlineCode",{parentName:"p"},"setup.py"),", ",(0,i.kt)("inlineCode",{parentName:"p"},"pipenv"),", and\n",(0,i.kt)("inlineCode",{parentName:"p"},"wheel")," files can also be used. ",(0,i.kt)("inlineCode",{parentName:"p"},"setup.py")," is commonly used for\npackaging and distributing Python modules, while ",(0,i.kt)("inlineCode",{parentName:"p"},"pipenv")," is a tool used\nfor managing virtual environments and package dependencies. ",(0,i.kt)("inlineCode",{parentName:"p"},"wheel"),"\nfiles are built distributions of Python packages that can be installed\nwith pip."),(0,i.kt)("h3",{id:"creating-dockerfile"},"Creating Dockerfile"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-dockerfile"},'# (1)\nFROM python:3.9-slim-bullseye\n# (2)\nWORKDIR /project\n# (3)\nCOPY application.py requirements.txt /project/\n# (4)\nRUN pip install --no-cache-dir --upgrade -r /project/requirements.txt\n# (5)\nCMD ["fastkafka", "run", "--num-workers", "2", "--kafka-broker", "production", "application:kafka_app"]\n')),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Start from the official Python base image.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Set the current working directory to ",(0,i.kt)("inlineCode",{parentName:"p"},"/project"),"."),(0,i.kt)("p",{parentName:"li"},"This is where we\u2019ll put the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file and the\n",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Copy the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file and ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file inside\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"/project")," directory.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Install the package dependencies in the requirements file."),(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"--no-cache-dir")," option tells ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," to not save the downloaded\npackages locally, as that is only if ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," was going to be run again\nto install the same packages, but that\u2019s not the case when working\nwith containers."),(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"--upgrade")," option tells ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," to upgrade the packages if they\nare already installed.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Set the ",(0,i.kt)("strong",{parentName:"p"},"command")," to run the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command."),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"p"},"CMD")," takes a list of strings, each of these strings is what you\nwould type in the command line separated by spaces."),(0,i.kt)("p",{parentName:"li"},"This command will be run from the ",(0,i.kt)("strong",{parentName:"p"},"current working directory"),", the\nsame ",(0,i.kt)("inlineCode",{parentName:"p"},"/project")," directory you set above with ",(0,i.kt)("inlineCode",{parentName:"p"},"WORKDIR /project"),"."),(0,i.kt)("p",{parentName:"li"},"We supply additional parameters ",(0,i.kt)("inlineCode",{parentName:"p"},"--num-workers")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"--kafka-broker"),"\nfor the run command. Finally, we specify the location of our\n",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka")," application location as a command argument."),(0,i.kt)("p",{parentName:"li"},"To learn more about ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command please check the ",(0,i.kt)("a",{parentName:"p",href:"../../cli/fastkafka/#fastkafka-run"},"CLI\ndocs"),"."))),(0,i.kt)("h3",{id:"build-the-docker-image"},"Build the Docker Image"),(0,i.kt)("p",null,"Now that all the files are in place, let\u2019s build the container image."),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Go to the project directory (where your ",(0,i.kt)("inlineCode",{parentName:"p"},"Dockerfile")," is, containing\nyour ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file).")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Run the following command to build the image:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"docker build -t fastkafka_project_image .\n")),(0,i.kt)("p",{parentName:"li"},"This command will create a docker image with the name\n",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," and the ",(0,i.kt)("inlineCode",{parentName:"p"},"latest")," tag."))),(0,i.kt)("p",null,"That\u2019s it! You have now built a docker image for your FastKafka project."),(0,i.kt)("h3",{id:"start-the-docker-container"},"Start the Docker Container"),(0,i.kt)("p",null,"Run a container based on the built image:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"docker run -d --name fastkafka_project_container fastkafka_project_image\n")),(0,i.kt)("h2",{id:"additional-security"},"Additional Security"),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"Trivy")," is an open-source tool that scans Docker images for\nvulnerabilities. It can be integrated into your CI/CD pipeline to ensure\nthat your images are secure and free from known vulnerabilities. Here\u2019s\nhow you can use ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," to scan your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image"),":"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Install ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," on your local machine by following the instructions\nprovided in the ",(0,i.kt)("a",{parentName:"p",href:"https://aquasecurity.github.io/trivy/latest/getting-started/installation/"},"official ",(0,i.kt)("inlineCode",{parentName:"a"},"trivy"),"\ndocumentation"),".")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Run the following command to scan your fastkafka_project_image:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"trivy image fastkafka_project_image\n")),(0,i.kt)("p",{parentName:"li"},"This command will scan your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," for any\nvulnerabilities and provide you with a report of its findings.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Fix any vulnerabilities identified by ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy"),". You can do this by\nupdating the vulnerable package to a more secure version or by using\na different package altogether.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Rebuild your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," and repeat steps 2 and 3\nuntil ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," reports no vulnerabilities."))),(0,i.kt)("p",null,"By using ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," to scan your Docker images, you can ensure that your\ncontainers are secure and free from known vulnerabilities."),(0,i.kt)("h2",{id:"example-repo"},"Example repo"),(0,i.kt)("p",null,"A\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nbased library which uses above mentioned Dockerfile to build a docker\nimage can be found\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_project/"},"here")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/a5b090b0.a4364faf.js b/assets/js/a5b090b0.a4364faf.js new file mode 100644 index 0000000..c1ade9d --- /dev/null +++ b/assets/js/a5b090b0.a4364faf.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4029],{3905:(e,t,a)=>{a.d(t,{Zo:()=>s,kt:()=>m});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function c(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function i(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var l=n.createContext({}),p=function(e){var t=n.useContext(l),a=t;return e&&(a="function"==typeof e?e(t):c(c({},t),e)),a},s=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},d="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,l=e.parentName,s=i(e,["components","mdxType","originalType","parentName"]),d=p(a),u=r,m=d["".concat(l,".").concat(u)]||d[u]||f[u]||o;return a?n.createElement(m,c(c({ref:t},s),{},{components:a})):n.createElement(m,c({ref:t},s))}));function m(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,c=new Array(o);c[0]=u;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[d]="string"==typeof e?e:r,c[1]=i;for(var p=2;p<o;p++)c[p]=a[p];return n.createElement.apply(null,c)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},8298:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>l,contentTitle:()=>c,default:()=>f,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={},c=void 0,i={unversionedId:"api/fastkafka/encoder/avsc_to_pydantic",id:"version-0.8.0/api/fastkafka/encoder/avsc_to_pydantic",title:"avsc_to_pydantic",description:"avsctopydantic {fastkafka.encoder.avsctopydantic}",source:"@site/versioned_docs/version-0.8.0/api/fastkafka/encoder/avsc_to_pydantic.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avsc_to_pydantic",permalink:"/docs/api/fastkafka/encoder/avsc_to_pydantic",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avro_encoder",permalink:"/docs/api/fastkafka/encoder/avro_encoder"},next:{title:"json_decoder",permalink:"/docs/api/fastkafka/encoder/json_decoder"}},l={},p=[{value:"avsc_to_pydantic",id:"fastkafka.encoder.avsc_to_pydantic",level:3}],s={toc:p},d="wrapper";function f(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},s,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h3",{id:"fastkafka.encoder.avsc_to_pydantic"},"avsc_to_pydantic"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/avro.py#L283-L403",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"avsc_to_pydantic(\n schema\n)\n")),(0,r.kt)("p",null,"Generate pydantic model from given Avro Schema"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"schema")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Dict[str, Any]")),(0,r.kt)("td",{parentName:"tr",align:null},"Avro schema in dictionary format"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Type[pydantic.main.BaseModel]")),(0,r.kt)("td",{parentName:"tr",align:null},"Pydantic model class built from given avro schema")))))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/a624bde7.bd5c926b.js b/assets/js/a624bde7.bd5c926b.js new file mode 100644 index 0000000..aa7c924 --- /dev/null +++ b/assets/js/a624bde7.bd5c926b.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8823],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?a(Object(n),!0).forEach((function(t){o(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):a(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function s(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=r.createContext({}),u=function(e){var t=r.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=u(e.components);return r.createElement(l.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=u(n),k=o,d=p["".concat(l,".").concat(k)]||p[k]||f[k]||a;return n?r.createElement(d,i(i({ref:t},c),{},{components:n})):r.createElement(d,i({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=k;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,i[1]=s;for(var u=2;u<a;u++)i[u]=n[u];return r.createElement.apply(null,i)}return r.createElement.apply(null,n)}k.displayName="MDXCreateElement"},1882:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>f,frontMatter:()=>a,metadata:()=>s,toc:()=>u});var r=n(7462),o=(n(7294),n(3905));const a={},i="Intro",s={unversionedId:"guides/Guide_01_Intro",id:"version-0.8.0/guides/Guide_01_Intro",title:"Intro",description:"This tutorial will show you how to use FastKafkaAPI, step by",source:"@site/versioned_docs/version-0.8.0/guides/Guide_01_Intro.md",sourceDirName:"guides",slug:"/guides/Guide_01_Intro",permalink:"/docs/guides/Guide_01_Intro",draft:!1,tags:[],version:"0.8.0",frontMatter:{}},l={},u=[{value:"Installing FastKafkaAPI",id:"installing-fastkafkaapi",level:2},{value:"Preparing a Kafka broker",id:"preparing-a-kafka-broker",level:2},{value:"Running the code",id:"running-the-code",level:2}],c={toc:u},p="wrapper";function f(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"intro"},"Intro"),(0,o.kt)("p",null,"This tutorial will show you how to use ",(0,o.kt)("b",null,"FastKafkaAPI"),", step by\nstep."),(0,o.kt)("p",null,"The goal of FastKafkaAPI is to simplify the use of Apache Kafka in\nPython inspired by FastAPI look and feel."),(0,o.kt)("p",null,"In this Intro tutorial we\u2019ll go trough the basic requirements to run the\ndemos presented in future steps."),(0,o.kt)("h2",{id:"installing-fastkafkaapi"},"Installing FastKafkaAPI"),(0,o.kt)("p",null,"First step is to install FastKafkaAPI"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"$ pip install fastkafka\n")),(0,o.kt)("h2",{id:"preparing-a-kafka-broker"},"Preparing a Kafka broker"),(0,o.kt)("p",null,"Next step is to prepare the Kafka environment, our consumers and\nproducers will need some channel of communication."),(0,o.kt)("p",null,'!!! info "Hey, your first info!"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. \n")),(0,o.kt)("p",null,"To go through the tutorial, we recommend that you use dockerized Kafka\nbrokers, if you have Docker and docker-compose installed the setup\nshould take you no time (if we exclude the container download times)."),(0,o.kt)("p",null,'!!! warning "Listen! This is important."'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},'To be able to setup this configuration you need to have Docker and docker-compose installed\n\nSee here for more info on <a href = \\"https://docs.docker.com/\\" target=\\"_blank\\">Docker</a> and <a href = \\"https://docs.docker.com/compose/install/\\" target=\\"_blank\\">docker compose</a>\n')),(0,o.kt)("p",null,"To setup the recommended environment, first, create a new folder wher\nyou want to save your demo files (e.g.\xa0fastkafka_demo). Inside the new\nfolder create a new YAML file named ",(0,o.kt)("b",null,"kafka_demo.yml")," and copy the\nfollowing configuration into it:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'version: "3"\nservices:\n zookeeper:\n image: wurstmeister/zookeeper\n hostname: zookeeper\n container_name: zookeeper\n networks:\n - fastkafka-network\n ports:\n - "2181:2181"\n - "22:22"\n - "2888:2888"\n - "3888:3888"\n kafka:\n image: wurstmeister/kafka\n container_name: kafka\n ports:\n - "9093:9093"\n environment:\n HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d\' \' -f 2"\n KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"\n KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT\n KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093\n KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093\n KAFKA_INTER_BROKER_LISTENER_NAME: INTER\n KAFKA_CREATE_TOPICS: "hello:1:1"\n volumes:\n - /var/run/docker.sock:/var/run/docker.sock\n depends_on:\n - zookeeper\n healthcheck:\n test: [ "CMD", "kafka-topics.sh", "--list", "--zookeeper", "zookeeper:2181" ]\n interval: 5s\n timeout: 10s\n retries: 5\n networks:\n - fastkafka-network\nnetworks:\n fastkafka-network:\n name: "fastkafka-network"\n')),(0,o.kt)("p",null,"This configuration will start a single instance of Zookeeper, single\ninstance of Kafka broker and create a \u2018hello\u2019 topic (quite enough for a\nstart). To start the configuration, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"$ docker-compose -f kafka_demo.yaml up -d --wait\n")),(0,o.kt)("p",null,"This will start the necessary containers and wait till they report that\nthey are Healthy. After the command finishes, you are good to go to try\nout the FastKafkaAPI capabilities! \ud83c\udf8a"),(0,o.kt)("h2",{id:"running-the-code"},"Running the code"),(0,o.kt)("p",null,"After installing FastKafkaAPI and initialising the Kafka broker you can\nproceed to the \u2018First Steps\u2019 part of the tutorial. There, you will write\nyour first Kafka client and producer apps, run them, and interact with\nthem."),(0,o.kt)("p",null,"You are highly encouraged to follow along the tutorials not just by\nreading trough them but by implementing the code examples in your own\nenvironment. This will not only help you remember the use cases better\nbut also, hopefully, demonstrate to you the ease of use of this library."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/a686ca68.cc915e73.js b/assets/js/a686ca68.cc915e73.js new file mode 100644 index 0000000..8fecf9d --- /dev/null +++ b/assets/js/a686ca68.cc915e73.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7639],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?a(Object(n),!0).forEach((function(t){o(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):a(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function s(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=r.createContext({}),u=function(e){var t=r.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=u(e.components);return r.createElement(l.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=u(n),k=o,d=p["".concat(l,".").concat(k)]||p[k]||f[k]||a;return n?r.createElement(d,i(i({ref:t},c),{},{components:n})):r.createElement(d,i({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=k;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,i[1]=s;for(var u=2;u<a;u++)i[u]=n[u];return r.createElement.apply(null,i)}return r.createElement.apply(null,n)}k.displayName="MDXCreateElement"},58:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>f,frontMatter:()=>a,metadata:()=>s,toc:()=>u});var r=n(7462),o=(n(7294),n(3905));const a={},i="Intro",s={unversionedId:"guides/Guide_01_Intro",id:"version-0.6.0/guides/Guide_01_Intro",title:"Intro",description:"This tutorial will show you how to use FastKafkaAPI, step by",source:"@site/versioned_docs/version-0.6.0/guides/Guide_01_Intro.md",sourceDirName:"guides",slug:"/guides/Guide_01_Intro",permalink:"/docs/0.6.0/guides/Guide_01_Intro",draft:!1,tags:[],version:"0.6.0",frontMatter:{}},l={},u=[{value:"Installing FastKafkaAPI",id:"installing-fastkafkaapi",level:2},{value:"Preparing a Kafka broker",id:"preparing-a-kafka-broker",level:2},{value:"Running the code",id:"running-the-code",level:2}],c={toc:u},p="wrapper";function f(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"intro"},"Intro"),(0,o.kt)("p",null,"This tutorial will show you how to use ",(0,o.kt)("b",null,"FastKafkaAPI"),", step by\nstep."),(0,o.kt)("p",null,"The goal of FastKafkaAPI is to simplify the use of Apache Kafka in\nPython inspired by FastAPI look and feel."),(0,o.kt)("p",null,"In this Intro tutorial we\u2019ll go trough the basic requirements to run the\ndemos presented in future steps."),(0,o.kt)("h2",{id:"installing-fastkafkaapi"},"Installing FastKafkaAPI"),(0,o.kt)("p",null,"First step is to install FastKafkaAPI"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"$ pip install fastkafka\n")),(0,o.kt)("h2",{id:"preparing-a-kafka-broker"},"Preparing a Kafka broker"),(0,o.kt)("p",null,"Next step is to prepare the Kafka environment, our consumers and\nproducers will need some channel of communication."),(0,o.kt)("p",null,'!!! info "Hey, your first info!"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. \n")),(0,o.kt)("p",null,"To go through the tutorial, we recommend that you use dockerized Kafka\nbrokers, if you have Docker and docker-compose installed the setup\nshould take you no time (if we exclude the container download times)."),(0,o.kt)("p",null,'!!! warning "Listen! This is important."'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},'To be able to setup this configuration you need to have Docker and docker-compose installed\n\nSee here for more info on <a href = \\"https://docs.docker.com/\\" target=\\"_blank\\">Docker</a> and <a href = \\"https://docs.docker.com/compose/install/\\" target=\\"_blank\\">docker compose</a>\n')),(0,o.kt)("p",null,"To setup the recommended environment, first, create a new folder wher\nyou want to save your demo files (e.g.\xa0fastkafka_demo). Inside the new\nfolder create a new YAML file named ",(0,o.kt)("b",null,"kafka_demo.yml")," and copy the\nfollowing configuration into it:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-yaml"},'version: "3"\nservices:\n zookeeper:\n image: wurstmeister/zookeeper\n hostname: zookeeper\n container_name: zookeeper\n networks:\n - fastkafka-network\n ports:\n - "2181:2181"\n - "22:22"\n - "2888:2888"\n - "3888:3888"\n kafka:\n image: wurstmeister/kafka\n container_name: kafka\n ports:\n - "9093:9093"\n environment:\n HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d\' \' -f 2"\n KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"\n KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT\n KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093\n KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093\n KAFKA_INTER_BROKER_LISTENER_NAME: INTER\n KAFKA_CREATE_TOPICS: "hello:1:1"\n volumes:\n - /var/run/docker.sock:/var/run/docker.sock\n depends_on:\n - zookeeper\n healthcheck:\n test: [ "CMD", "kafka-topics.sh", "--list", "--zookeeper", "zookeeper:2181" ]\n interval: 5s\n timeout: 10s\n retries: 5\n networks:\n - fastkafka-network\nnetworks:\n fastkafka-network:\n name: "fastkafka-network"\n')),(0,o.kt)("p",null,"This configuration will start a single instance of Zookeeper, single\ninstance of Kafka broker and create a \u2018hello\u2019 topic (quite enough for a\nstart). To start the configuration, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"$ docker-compose -f kafka_demo.yaml up -d --wait\n")),(0,o.kt)("p",null,"This will start the necessary containers and wait till they report that\nthey are Healthy. After the command finishes, you are good to go to try\nout the FastKafkaAPI capabilities! \ud83c\udf8a"),(0,o.kt)("h2",{id:"running-the-code"},"Running the code"),(0,o.kt)("p",null,"After installing FastKafkaAPI and initialising the Kafka broker you can\nproceed to the \u2018First Steps\u2019 part of the tutorial. There, you will write\nyour first Kafka client and producer apps, run them, and interact with\nthem."),(0,o.kt)("p",null,"You are highly encouraged to follow along the tutorials not just by\nreading trough them but by implementing the code examples in your own\nenvironment. This will not only help you remember the use cases better\nbut also, hopefully, demonstrate to you the ease of use of this library."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/a6c229c0.135ade25.js b/assets/js/a6c229c0.135ade25.js new file mode 100644 index 0000000..6c331ae --- /dev/null +++ b/assets/js/a6c229c0.135ade25.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7773],{3905:(e,t,a)=>{a.d(t,{Zo:()=>m,kt:()=>N});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function i(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function l(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?i(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):i(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function o(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},i=Object.keys(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var d=n.createContext({}),s=function(e){var t=n.useContext(d),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},m=function(e){var t=s(e.components);return n.createElement(d.Provider,{value:t},e.children)},p="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,i=e.originalType,d=e.parentName,m=o(e,["components","mdxType","originalType","parentName"]),p=s(a),u=r,N=p["".concat(d,".").concat(u)]||p[u]||k[u]||i;return a?n.createElement(N,l(l({ref:t},m),{},{components:a})):n.createElement(N,l({ref:t},m))}));function N(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var i=a.length,l=new Array(i);l[0]=u;var o={};for(var d in t)hasOwnProperty.call(t,d)&&(o[d]=t[d]);o.originalType=e,o[p]="string"==typeof e?e:r,l[1]=o;for(var s=2;s<i;s++)l[s]=a[s];return n.createElement.apply(null,l)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},4563:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>d,contentTitle:()=>l,default:()=>k,frontMatter:()=>i,metadata:()=>o,toc:()=>s});var n=a(7462),r=(a(7294),a(3905));const i={},l=void 0,o={unversionedId:"api/fastkafka/FastKafka",id:"version-0.8.0/api/fastkafka/FastKafka",title:"FastKafka",description:"init {fastkafka._application.app.FastKafka.init}",source:"@site/versioned_docs/version-0.8.0/api/fastkafka/FastKafka.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/",permalink:"/docs/api/fastkafka/",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"EventMetadata",permalink:"/docs/api/fastkafka/EventMetadata"},next:{title:"KafkaEvent",permalink:"/docs/api/fastkafka/KafkaEvent"}},d={},s=[{value:"<strong>init</strong>",id:"fastkafka._application.app.FastKafka.init",level:3},{value:"benchmark",id:"fastkafka._application.app.FastKafka.benchmark",level:3},{value:"consumes",id:"fastkafka._application.app.FastKafka.consumes",level:3},{value:"create_docs",id:"fastkafka._application.app.FastKafka.create_docs",level:3},{value:"create_mocks",id:"fastkafka._application.app.FastKafka.create_mocks",level:3},{value:"fastapi_lifespan",id:"fastkafka._application.app.FastKafka.fastapi_lifespan",level:3},{value:"get_topics",id:"fastkafka._application.app.FastKafka.get_topics",level:3},{value:"is_started",id:"fastkafka._application.app.FastKafka.is_started",level:3},{value:"produces",id:"fastkafka._application.app.FastKafka.produces",level:3},{value:"run_in_background",id:"fastkafka._application.app.FastKafka.run_in_background",level:3},{value:"set_kafka_broker",id:"fastkafka._application.app.FastKafka.set_kafka_broker",level:3}],m={toc:s},p="wrapper";function k(e){let{components:t,...a}=e;return(0,r.kt)(p,(0,n.Z)({},m,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.init"},(0,r.kt)("strong",{parentName:"h3"},"init")),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L179-L305",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n self,\n title=None,\n description=None,\n version=None,\n contact=None,\n kafka_brokers=None,\n root_path=None,\n lifespan=None,\n bootstrap_servers_id='localhost',\n loop=None,\n client_id=None,\n metadata_max_age_ms=300000,\n request_timeout_ms=40000,\n api_version='auto',\n acks=<object object at 0x7ff10d5f9100>,\n key_serializer=None,\n value_serializer=None,\n compression_type=None,\n max_batch_size=16384,\n partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>,\n max_request_size=1048576,\n linger_ms=0,\n send_backoff_ms=100,\n retry_backoff_ms=100,\n security_protocol='PLAINTEXT',\n ssl_context=None,\n connections_max_idle_ms=540000,\n enable_idempotence=False,\n transactional_id=None,\n transaction_timeout_ms=60000,\n sasl_mechanism='PLAIN',\n sasl_plain_password=None,\n sasl_plain_username=None,\n sasl_kerberos_service_name='kafka',\n sasl_kerberos_domain_name=None,\n sasl_oauth_token_provider=None,\n group_id=None,\n key_deserializer=None,\n value_deserializer=None,\n fetch_max_wait_ms=500,\n fetch_max_bytes=52428800,\n fetch_min_bytes=1,\n max_partition_fetch_bytes=1048576,\n auto_offset_reset='latest',\n enable_auto_commit=True,\n auto_commit_interval_ms=5000,\n check_crcs=True,\n partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),\n max_poll_interval_ms=300000,\n rebalance_timeout_ms=None,\n session_timeout_ms=10000,\n heartbeat_interval_ms=3000,\n consumer_timeout_ms=200,\n max_poll_records=None,\n exclude_internal_topics=True,\n isolation_level='read_uncommitted',\n)\n")),(0,r.kt)("p",null,"Creates FastKafka application"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"title")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"optional title for the documentation. If None,the title will be set to empty string"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"description")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"optional description for the documentation. IfNone, the description will be set to empty string"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"version")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"optional version for the documentation. If None,the version will be set to empty string"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"contact")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[Dict[str, str]]")),(0,r.kt)("td",{parentName:"tr",align:null},"optional contact for the documentation. If None, thecontact will be set to placeholder values:name='Author' url=HttpUrl(' ",(0,r.kt)("a",{parentName:"td",href:"https://www.google.com"},"https://www.google.com")," ', ) email='",(0,r.kt)("a",{parentName:"td",href:"mailto:noreply@gmail.com"},"noreply@gmail.com"),"'"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"kafka_brokers")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[Dict[str, Any]]")),(0,r.kt)("td",{parentName:"tr",align:null},'dictionary describing kafka brokers used for settingthe bootstrap server when running the applicationa and forgenerating documentation. Defaults to { "localhost": { "url": "localhost", "description": "local kafka broker", "port": "9092", } }'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"root_path")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[pathlib.Path, str, NoneType]")),(0,r.kt)("td",{parentName:"tr",align:null},"path to where documentation will be created"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"lifespan")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[Callable[[ForwardRef('FastKafka')], AsyncContextManager[NoneType]]]")),(0,r.kt)("td",{parentName:"tr",align:null},"asynccontextmanager that is used for setting lifespan hooks.",(0,r.kt)("strong",{parentName:"td"},"aenter")," is called before app start and ",(0,r.kt)("strong",{parentName:"td"},"aexit")," after app stop.The lifespan is called whe application is started as async contextmanager, e.g.:",(0,r.kt)("inlineCode",{parentName:"td"},"async with kafka_app...")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"client_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka-producer-#")," (appended with a unique numberper instance)"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key_serializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"used to convert user-supplied keys to bytesIf not :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", called as ",(0,r.kt)("inlineCode",{parentName:"td"},"f(key),")," should return:class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"value_serializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"used to convert user-supplied messagevalues to :class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),". If not :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", called as",(0,r.kt)("inlineCode",{parentName:"td"},"f(value)"),", should return :class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"acks")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"one of ",(0,r.kt)("inlineCode",{parentName:"td"},"0"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"1"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),". The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common:",(0,r.kt)("em",{parentName:"td"}," ",(0,r.kt)("inlineCode",{parentName:"em"},"0"),": Producer will not wait for any acknowledgment from the server at all. The message will immediately be added to the socket buffer and considered sent. No guarantee can be made that the server has received the record in this case, and the retries configuration will not take effect (as the client won't generally know of any failures). The offset given back for each record will always be set to -1.")," ",(0,r.kt)("inlineCode",{parentName:"td"},"1"),": The broker leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after acknowledging the record but before the followers have replicated it then the record will be lost.* ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),": The broker leader will wait for the full set of in-sync replicas to acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica remains alive. This is the strongest available guarantee.If unset, defaults to ",(0,r.kt)("inlineCode",{parentName:"td"},"acks=1"),". If ",(0,r.kt)("inlineCode",{parentName:"td"},"enable_idempotence")," is:data:",(0,r.kt)("inlineCode",{parentName:"td"},"True")," defaults to ",(0,r.kt)("inlineCode",{parentName:"td"},"acks=all")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"<object object at 0x7ff10d5f9100>"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"compression_type")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The compression type for all data generated bythe producer. Valid values are ",(0,r.kt)("inlineCode",{parentName:"td"},"gzip"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"snappy"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"lz4"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"zstd"),"or :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),".Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_batch_size")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum size of buffered data per partition.After this amount :meth:",(0,r.kt)("inlineCode",{parentName:"td"},"send")," coroutine will block until batch isdrained.Default: 16384"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"16384"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than ",(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms"),", producer will wait ",(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms - process_time"),".Default: 0 (i.e. no delay)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"0"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"partitioner")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Callable used to determine which partitioneach message is assigned to. Called (after key serialization):",(0,r.kt)("inlineCode",{parentName:"td"},"partitioner(key_bytes, all_partitions, available_partitions)"),".The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", the message is delivered to a random partition(filtered to partitions with available leaders only, if possible)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_request_size")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1048576"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"metadata_max_age_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"request_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Produce request timeout in milliseconds.As it's sent as part of:class:",(0,r.kt)("inlineCode",{parentName:"td"},"~kafka.protocol.produce.ProduceRequest")," (it's a blockingcall), maximum waiting time can be up to ",(0,r.kt)("inlineCode",{parentName:"td"},"2 *request_timeout_ms"),".Default: 40000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"40000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"retry_backoff_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Milliseconds to backoff when retrying onerrors. Default: 100."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"api_version")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"specify which kafka API version to use.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"auto"),", will attempt to infer the broker version byprobing various APIs. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"auto")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'auto'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"security_protocol")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Protocol used to communicate with brokers.Valid values are: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SSL"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT"),",",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAINTEXT'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ssl_context")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"pre-configured :class:",(0,r.kt)("inlineCode",{parentName:"td"},"~ssl.SSLContext"),"for wrapping socket connections. Directly passed into asyncio's:meth:",(0,r.kt)("inlineCode",{parentName:"td"},"~asyncio.loop.create_connection"),". For moreinformation see :ref:",(0,r.kt)("inlineCode",{parentName:"td"},"ssl_auth"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"connections_max_idle_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Close idle connections after the numberof milliseconds specified by this config. Specifying :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")," willdisable idle checks. Default: 540000 (9 minutes)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"540000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"enable_idempotence")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"When set to :data:",(0,r.kt)("inlineCode",{parentName:"td"},"True"),", the producer willensure that exactly one copy of each message is written in thestream. If :data:",(0,r.kt)("inlineCode",{parentName:"td"},"False"),", producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),". If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:",(0,r.kt)("inlineCode",{parentName:"td"},"ValueError")," will be thrown.New in version 0.5.0."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"False"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_mechanism")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Authentication mechanism when security_protocolis configured for ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT")," or ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Valid valuesare: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"GSSAPI"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-256"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-512"),",",(0,r.kt)("inlineCode",{parentName:"td"},"OAUTHBEARER"),".Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAIN'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_username")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"username for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_password")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"password for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"group_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"name of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key_deserializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Any callable that takes araw message key and returns a deserialized key."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"value_deserializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Any callable that takes araw message value and returns a deserialized value."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_min_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Minimum amount of data the server shouldreturn for a fetch request, otherwise wait up to",(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"52428800"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_wait_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"500"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_partition_fetch_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of dataper-partition the server will return. The maximum total memoryused for a request ",(0,r.kt)("inlineCode",{parentName:"td"},"= #partitions * max_partition_fetch_bytes"),".This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1048576"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_poll_records")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum number of records returned in asingle call to :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany"),". Defaults ",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", no limit."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"auto_offset_reset")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"A policy for resetting offsets on:exc:",(0,r.kt)("inlineCode",{parentName:"td"},".OffsetOutOfRangeError")," errors: ",(0,r.kt)("inlineCode",{parentName:"td"},"earliest")," will move to the oldestavailable message, ",(0,r.kt)("inlineCode",{parentName:"td"},"latest")," will move to the most recent, and",(0,r.kt)("inlineCode",{parentName:"td"},"none")," will raise an exception so you can handle this case.Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"latest"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'latest'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"enable_auto_commit")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"If true the consumer's offset will beperiodically committed in the background. Default: True."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"auto_commit_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"milliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"5000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"check_crcs")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Automatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: True"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"partition_assignment_strategy")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"List of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: ","[:class:",(0,r.kt)("inlineCode",{parentName:"td"},".RoundRobinPartitionAssignor"),"]"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_poll_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum allowed time between calls toconsume messages (e.g., :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany"),"). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See ",(0,r.kt)("inlineCode",{parentName:"td"},"KIP-62"),"_ for moreinformation. Default 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"rebalance_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to ",(0,r.kt)("inlineCode",{parentName:"td"},"max.poll.interval.ms")," configuration,but as ",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka")," will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:",(0,r.kt)("inlineCode",{parentName:"td"},".ConsumerRebalanceListener")," to delay rebalacing. Defaultsto ",(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Client group session and failure detectiontimeout. The consumer sends periodic heartbeats(",(0,r.kt)("inlineCode",{parentName:"td"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe ",(0,r.kt)("strong",{parentName:"td"},"broker")," configuration properties",(0,r.kt)("inlineCode",{parentName:"td"},"group.min.session.timeout.ms")," and ",(0,r.kt)("inlineCode",{parentName:"td"},"group.max.session.timeout.ms"),".Default: 10000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"10000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"heartbeat_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than ",(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms"),", but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 3000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"3000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"consumer_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"maximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"200"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"exclude_internal_topics")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Whether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: True"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"isolation_level")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Controls how to read messages writtentransactionally.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed"),", :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," will only returntransactional messages which have been committed.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"read_uncommitted")," (the default), :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, in",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," mode, :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," the seek_to_end method willreturn the LSO. See method docs below. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"read_uncommitted")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'read_uncommitted'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_oauth_token_provider")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"OAuthBearer token provider instance. (See :mod:",(0,r.kt)("inlineCode",{parentName:"td"},"kafka.oauth.abstract"),").Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.benchmark"},"benchmark"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L1108-L1159",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"benchmark(\n self, interval=1, sliding_window_size=None\n)\n")),(0,r.kt)("p",null,"Decorator to benchmark produces/consumes functions"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"interval")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[int, datetime.timedelta]")),(0,r.kt)("td",{parentName:"tr",align:null},"Period to use to calculate throughput. If value is of type int,then it will be used as seconds. If value is of type timedelta,then it will be used as it is. default: 1 - one second"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sliding_window_size")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[int]")),(0,r.kt)("td",{parentName:"tr",align:null},"The size of the sliding window to use to calculateaverage throughput. default: None - By default average throughput isnot calculated"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.consumes"},"consumes"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L474-L557",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"consumes(\n self,\n topic=None,\n decoder='json',\n executor=None,\n brokers=None,\n prefix='on_',\n description=None,\n loop=None,\n bootstrap_servers='localhost',\n client_id='aiokafka-0.8.1',\n group_id=None,\n key_deserializer=None,\n value_deserializer=None,\n fetch_max_wait_ms=500,\n fetch_max_bytes=52428800,\n fetch_min_bytes=1,\n max_partition_fetch_bytes=1048576,\n request_timeout_ms=40000,\n retry_backoff_ms=100,\n auto_offset_reset='latest',\n enable_auto_commit=True,\n auto_commit_interval_ms=5000,\n check_crcs=True,\n metadata_max_age_ms=300000,\n partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),\n max_poll_interval_ms=300000,\n rebalance_timeout_ms=None,\n session_timeout_ms=10000,\n heartbeat_interval_ms=3000,\n consumer_timeout_ms=200,\n max_poll_records=None,\n ssl_context=None,\n security_protocol='PLAINTEXT',\n api_version='auto',\n exclude_internal_topics=True,\n connections_max_idle_ms=540000,\n isolation_level='read_uncommitted',\n sasl_mechanism='PLAIN',\n sasl_plain_password=None,\n sasl_plain_username=None,\n sasl_kerberos_service_name='kafka',\n sasl_kerberos_domain_name=None,\n sasl_oauth_token_provider=None,\n)\n")),(0,r.kt)("p",null,"Decorator registering the callback called when a message is received in a topic."),(0,r.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"topic")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Kafka topic that the consumer will subscribe to and execute thedecorated function when it receives a message from the topic,default: None. If the topic is not specified, topic name will beinferred from the decorated function name by stripping the defined prefix"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"decoder")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[str, Callable[[bytes, Type[pydantic.main.BaseModel]], Any]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Decoder to use to decode messages consumed from the topic,default: json - By default, it uses json decoder to decodebytes to json string and then it creates instance of pydanticBaseModel. It also accepts custom decoder function."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'json'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"executor")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[str, fastkafka._components.task_streaming.StreamExecutor, NoneType]")),(0,r.kt)("td",{parentName:"tr",align:null},'Type of executor to choose for consuming tasks. Avaliable optionsare "SequentialExecutor" and "DynamicTaskExecutor". The default option is"SequentialExecutor" which will execute the consuming tasks sequentially.If the consuming tasks have high latency it is recommended to use"DynamicTaskExecutor" which will wrap the consuming functions into tasksand run them in on asyncio loop in background. This comes with a cost ofincreased overhead so use it only in cases when your consume functions havehigh latency such as database queries or some other type of networking.'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"prefix")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},'Prefix stripped from the decorated function to define a topic nameif the topic argument is not passed, default: "on_". If the decoratedfunction name is not prefixed with the defined prefix and topic argumentis not passed, then this method will throw ValueError'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'on_'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"brokers")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"description")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional description of the consuming function async docs.If not provided, consuming function ",(0,r.kt)("strong",{parentName:"td"},"doc")," attr will be used."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bootstrap_servers")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a ",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," string (or list of",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," strings) that the consumer should contact to bootstrapinitial cluster metadata.This does not have to be the full node list.It just needs to have at least one broker that will respond to aMetadata API Request. Default port is 9092. If no servers arespecified, will default to ",(0,r.kt)("inlineCode",{parentName:"td"},"localhost:9092"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'localhost'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"client_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client. Alsosubmitted to :class:",(0,r.kt)("inlineCode",{parentName:"td"},"~.consumer.group_coordinator.GroupCoordinator"),"for logging with respect to consumer group administration. Default:",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka-{version}")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'aiokafka-0.8.1'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"group_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"name of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key_deserializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Any callable that takes araw message key and returns a deserialized key."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"value_deserializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Any callable that takes araw message value and returns a deserialized value."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_min_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Minimum amount of data the server shouldreturn for a fetch request, otherwise wait up to",(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"52428800"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_wait_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"500"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_partition_fetch_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of dataper-partition the server will return. The maximum total memoryused for a request ",(0,r.kt)("inlineCode",{parentName:"td"},"= #partitions * max_partition_fetch_bytes"),".This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1048576"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_poll_records")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum number of records returned in asingle call to :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany"),". Defaults ",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", no limit."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"request_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Client request timeout in milliseconds.Default: 40000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"40000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"retry_backoff_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Milliseconds to backoff when retrying onerrors. Default: 100."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"auto_offset_reset")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"A policy for resetting offsets on:exc:",(0,r.kt)("inlineCode",{parentName:"td"},".OffsetOutOfRangeError")," errors: ",(0,r.kt)("inlineCode",{parentName:"td"},"earliest")," will move to the oldestavailable message, ",(0,r.kt)("inlineCode",{parentName:"td"},"latest")," will move to the most recent, and",(0,r.kt)("inlineCode",{parentName:"td"},"none")," will raise an exception so you can handle this case.Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"latest"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'latest'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"enable_auto_commit")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"If true the consumer's offset will beperiodically committed in the background. Default: True."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"auto_commit_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"milliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"5000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"check_crcs")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Automatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: True"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"metadata_max_age_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"partition_assignment_strategy")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"List of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: ","[:class:",(0,r.kt)("inlineCode",{parentName:"td"},".RoundRobinPartitionAssignor"),"]"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_poll_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum allowed time between calls toconsume messages (e.g., :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany"),"). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See ",(0,r.kt)("inlineCode",{parentName:"td"},"KIP-62"),"_ for moreinformation. Default 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"rebalance_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to ",(0,r.kt)("inlineCode",{parentName:"td"},"max.poll.interval.ms")," configuration,but as ",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka")," will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:",(0,r.kt)("inlineCode",{parentName:"td"},".ConsumerRebalanceListener")," to delay rebalacing. Defaultsto ",(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Client group session and failure detectiontimeout. The consumer sends periodic heartbeats(",(0,r.kt)("inlineCode",{parentName:"td"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe ",(0,r.kt)("strong",{parentName:"td"},"broker")," configuration properties",(0,r.kt)("inlineCode",{parentName:"td"},"group.min.session.timeout.ms")," and ",(0,r.kt)("inlineCode",{parentName:"td"},"group.max.session.timeout.ms"),".Default: 10000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"10000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"heartbeat_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than ",(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms"),", but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 3000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"3000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"consumer_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"maximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"200"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"api_version")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"specify which kafka API version to use.:class:",(0,r.kt)("inlineCode",{parentName:"td"},"AIOKafkaConsumer")," supports Kafka API versions >=0.9 only.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"auto"),", will attempt to infer the broker version byprobing various APIs. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"auto")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'auto'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"security_protocol")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Protocol used to communicate with brokers.Valid values are: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SSL"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT"),",",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAINTEXT'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ssl_context")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"pre-configured :class:",(0,r.kt)("inlineCode",{parentName:"td"},"~ssl.SSLContext"),"for wrapping socket connections. Directly passed into asyncio's:meth:",(0,r.kt)("inlineCode",{parentName:"td"},"~asyncio.loop.create_connection"),". For more information see:ref:",(0,r.kt)("inlineCode",{parentName:"td"},"ssl_auth"),". Default: None."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"exclude_internal_topics")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Whether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: True"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"connections_max_idle_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Close idle connections after the numberof milliseconds specified by this config. Specifying ",(0,r.kt)("inlineCode",{parentName:"td"},"None")," willdisable idle checks. Default: 540000 (9 minutes)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"540000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"isolation_level")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Controls how to read messages writtentransactionally.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed"),", :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," will only returntransactional messages which have been committed.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"read_uncommitted")," (the default), :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, in",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," mode, :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," the seek_to_end method willreturn the LSO. See method docs below. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"read_uncommitted")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'read_uncommitted'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_mechanism")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Authentication mechanism when security_protocolis configured for ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT")," or ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Valid values are:",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"GSSAPI"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-256"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-512"),",",(0,r.kt)("inlineCode",{parentName:"td"},"OAUTHBEARER"),".Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAIN'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_username")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"username for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_password")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"password for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_oauth_token_provider")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"OAuthBearer token provider instance. (See :mod:",(0,r.kt)("inlineCode",{parentName:"td"},"kafka.oauth.abstract"),").Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]], Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]]")),(0,r.kt)("td",{parentName:"tr",align:null},": A function returning the same function")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.create_docs"},"create_docs"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L938-L964",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"create_docs(\n self\n)\n")),(0,r.kt)("p",null,"Create the asyncapi documentation based on the configured consumers and producers."),(0,r.kt)("p",null,"This function exports the asyncapi specification based on the configured consumers\nand producers in the FastKafka instance. It generates the asyncapi documentation by\nextracting the topics and callbacks from the consumers and producers."),(0,r.kt)("p",null,"Note:\nThe asyncapi documentation is saved to the location specified by the ",(0,r.kt)("inlineCode",{parentName:"p"},"_asyncapi_path"),"\nattribute of the FastKafka instance."),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.create_mocks"},"create_mocks"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L1026-L1104",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"create_mocks(\n self\n)\n")),(0,r.kt)("p",null,"Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock"),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.fastapi_lifespan"},"fastapi_lifespan"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L1163-L1182",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"fastapi_lifespan(\n self, kafka_broker_name\n)\n")),(0,r.kt)("p",null,"Method for managing the lifespan of a FastAPI application with a specific Kafka broker."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"kafka_broker_name")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"The name of the Kafka broker to start FastKafka"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[ForwardRef('FastAPI')], AsyncIterator[None]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Lifespan function to use for initializing FastAPI")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.get_topics"},"get_topics"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L663-L672",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"get_topics(\n self\n)\n")),(0,r.kt)("p",null,"Get all topics for both producing and consuming."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Iterable[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"A set of topics for both producing and consuming.")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.is_started"},"is_started"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L308-L319",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"@property\nis_started(\n self\n)\n")),(0,r.kt)("p",null,"Property indicating whether the FastKafka object is started."),(0,r.kt)("p",null,"The is_started property indicates if the FastKafka object is currently\nin a started state. This implies that all background tasks, producers,\nand consumers have been initiated, and the object is successfully connected\nto the Kafka broker."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bool")),(0,r.kt)("td",{parentName:"tr",align:null},"True if the object is started, False otherwise.")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.produces"},"produces"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L582-L659",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"produces(\n self,\n topic=None,\n encoder='json',\n prefix='to_',\n brokers=None,\n description=None,\n loop=None,\n bootstrap_servers='localhost',\n client_id=None,\n metadata_max_age_ms=300000,\n request_timeout_ms=40000,\n api_version='auto',\n acks=<object object at 0x7ff10d5f9100>,\n key_serializer=None,\n value_serializer=None,\n compression_type=None,\n max_batch_size=16384,\n partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>,\n max_request_size=1048576,\n linger_ms=0,\n send_backoff_ms=100,\n retry_backoff_ms=100,\n security_protocol='PLAINTEXT',\n ssl_context=None,\n connections_max_idle_ms=540000,\n enable_idempotence=False,\n transactional_id=None,\n transaction_timeout_ms=60000,\n sasl_mechanism='PLAIN',\n sasl_plain_password=None,\n sasl_plain_username=None,\n sasl_kerberos_service_name='kafka',\n sasl_kerberos_domain_name=None,\n sasl_oauth_token_provider=None,\n)\n")),(0,r.kt)("p",null,"Decorator registering the callback called when delivery report for a produced message is received"),(0,r.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"topic")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Kafka topic that the producer will send returned values fromthe decorated function to, default: None- If the topic is notspecified, topic name will be inferred from the decorated functionname by stripping the defined prefix."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"encoder")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[str, Callable[[pydantic.main.BaseModel], bytes]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Encoder to use to encode messages before sending it to topic,default: json - By default, it uses json encoder to convertpydantic basemodel to json string and then encodes the string to bytesusing 'utf-8' encoding. It also accepts custom encoder function."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'json'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"prefix")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},'Prefix stripped from the decorated function to define a topicname if the topic argument is not passed, default: "to_". If thedecorated function name is not prefixed with the defined prefixand topic argument is not passed, then this method will throw ValueError'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'to_'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"brokers")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"description")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional description of the producing function async docs.If not provided, producing function ",(0,r.kt)("strong",{parentName:"td"},"doc")," attr will be used."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bootstrap_servers")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a ",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," string or list of",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," strings that the producer should contact tobootstrap initial cluster metadata. This does not have to be thefull node list. It just needs to have at least one broker that willrespond to a Metadata API Request. Default port is 9092. If noservers are specified, will default to ",(0,r.kt)("inlineCode",{parentName:"td"},"localhost:9092"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'localhost'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"client_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka-producer-#")," (appended with a unique numberper instance)"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key_serializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"used to convert user-supplied keys to bytesIf not :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", called as ",(0,r.kt)("inlineCode",{parentName:"td"},"f(key),")," should return:class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"value_serializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"used to convert user-supplied messagevalues to :class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),". If not :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", called as",(0,r.kt)("inlineCode",{parentName:"td"},"f(value)"),", should return :class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"acks")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"one of ",(0,r.kt)("inlineCode",{parentName:"td"},"0"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"1"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),". The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common:",(0,r.kt)("em",{parentName:"td"}," ",(0,r.kt)("inlineCode",{parentName:"em"},"0"),": Producer will not wait for any acknowledgment from the server at all. The message will immediately be added to the socket buffer and considered sent. No guarantee can be made that the server has received the record in this case, and the retries configuration will not take effect (as the client won't generally know of any failures). The offset given back for each record will always be set to -1.")," ",(0,r.kt)("inlineCode",{parentName:"td"},"1"),": The broker leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after acknowledging the record but before the followers have replicated it then the record will be lost.* ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),": The broker leader will wait for the full set of in-sync replicas to acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica remains alive. This is the strongest available guarantee.If unset, defaults to ",(0,r.kt)("inlineCode",{parentName:"td"},"acks=1"),". If ",(0,r.kt)("inlineCode",{parentName:"td"},"enable_idempotence")," is:data:",(0,r.kt)("inlineCode",{parentName:"td"},"True")," defaults to ",(0,r.kt)("inlineCode",{parentName:"td"},"acks=all")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"<object object at 0x7ff10d5f9100>"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"compression_type")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The compression type for all data generated bythe producer. Valid values are ",(0,r.kt)("inlineCode",{parentName:"td"},"gzip"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"snappy"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"lz4"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"zstd"),"or :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),".Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_batch_size")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum size of buffered data per partition.After this amount :meth:",(0,r.kt)("inlineCode",{parentName:"td"},"send")," coroutine will block until batch isdrained.Default: 16384"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"16384"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than ",(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms"),", producer will wait ",(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms - process_time"),".Default: 0 (i.e. no delay)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"0"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"partitioner")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Callable used to determine which partitioneach message is assigned to. Called (after key serialization):",(0,r.kt)("inlineCode",{parentName:"td"},"partitioner(key_bytes, all_partitions, available_partitions)"),".The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", the message is delivered to a random partition(filtered to partitions with available leaders only, if possible)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_request_size")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1048576"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"metadata_max_age_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"request_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Produce request timeout in milliseconds.As it's sent as part of:class:",(0,r.kt)("inlineCode",{parentName:"td"},"~kafka.protocol.produce.ProduceRequest")," (it's a blockingcall), maximum waiting time can be up to ",(0,r.kt)("inlineCode",{parentName:"td"},"2 *request_timeout_ms"),".Default: 40000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"40000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"retry_backoff_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Milliseconds to backoff when retrying onerrors. Default: 100."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"api_version")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"specify which kafka API version to use.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"auto"),", will attempt to infer the broker version byprobing various APIs. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"auto")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'auto'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"security_protocol")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Protocol used to communicate with brokers.Valid values are: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SSL"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT"),",",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAINTEXT'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ssl_context")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"pre-configured :class:",(0,r.kt)("inlineCode",{parentName:"td"},"~ssl.SSLContext"),"for wrapping socket connections. Directly passed into asyncio's:meth:",(0,r.kt)("inlineCode",{parentName:"td"},"~asyncio.loop.create_connection"),". For moreinformation see :ref:",(0,r.kt)("inlineCode",{parentName:"td"},"ssl_auth"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"connections_max_idle_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Close idle connections after the numberof milliseconds specified by this config. Specifying :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")," willdisable idle checks. Default: 540000 (9 minutes)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"540000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"enable_idempotence")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"When set to :data:",(0,r.kt)("inlineCode",{parentName:"td"},"True"),", the producer willensure that exactly one copy of each message is written in thestream. If :data:",(0,r.kt)("inlineCode",{parentName:"td"},"False"),", producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),". If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:",(0,r.kt)("inlineCode",{parentName:"td"},"ValueError")," will be thrown.New in version 0.5.0."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"False"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_mechanism")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Authentication mechanism when security_protocolis configured for ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT")," or ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Valid valuesare: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"GSSAPI"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-256"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-512"),",",(0,r.kt)("inlineCode",{parentName:"td"},"OAUTHBEARER"),".Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAIN'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_username")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"username for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_password")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"password for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]], Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]]")),(0,r.kt)("td",{parentName:"tr",align:null},": A function returning the same function")))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ValueError")),(0,r.kt)("td",{parentName:"tr",align:null},"when needed")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.run_in_background"},"run_in_background"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L676-L709",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"run_in_background(\n self\n)\n")),(0,r.kt)("p",null,"Decorator to schedule a task to be run in the background."),(0,r.kt)("p",null,"This decorator is used to schedule a task to be run in the background when the app's ",(0,r.kt)("inlineCode",{parentName:"p"},"_on_startup")," event is triggered."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[Callable[..., Coroutine[Any, Any, Any]]], Callable[..., Coroutine[Any, Any, Any]]]")),(0,r.kt)("td",{parentName:"tr",align:null},"A decorator function that takes a background task as an input and stores it to be run in the backround.")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.set_kafka_broker"},"set_kafka_broker"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L321-L337",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"set_kafka_broker(\n self, kafka_broker_name\n)\n")),(0,r.kt)("p",null,"Sets the Kafka broker to start FastKafka with"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"kafka_broker_name")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"The name of the Kafka broker to start FastKafka"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ValueError")),(0,r.kt)("td",{parentName:"tr",align:null},"If the provided kafka_broker_name is not found in dictionary of kafka_brokers")))))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/a7914a5c.64fb6d59.js b/assets/js/a7914a5c.64fb6d59.js new file mode 100644 index 0000000..98a4ae1 --- /dev/null +++ b/assets/js/a7914a5c.64fb6d59.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4842],{3905:(a,e,t)=>{t.d(e,{Zo:()=>c,kt:()=>d});var n=t(7294);function o(a,e,t){return e in a?Object.defineProperty(a,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):a[e]=t,a}function s(a,e){var t=Object.keys(a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(a);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(a,e).enumerable}))),t.push.apply(t,n)}return t}function r(a){for(var e=1;e<arguments.length;e++){var t=null!=arguments[e]?arguments[e]:{};e%2?s(Object(t),!0).forEach((function(e){o(a,e,t[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(a,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(e){Object.defineProperty(a,e,Object.getOwnPropertyDescriptor(t,e))}))}return a}function i(a,e){if(null==a)return{};var t,n,o=function(a,e){if(null==a)return{};var t,n,o={},s=Object.keys(a);for(n=0;n<s.length;n++)t=s[n],e.indexOf(t)>=0||(o[t]=a[t]);return o}(a,e);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(a);for(n=0;n<s.length;n++)t=s[n],e.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(a,t)&&(o[t]=a[t])}return o}var p=n.createContext({}),l=function(a){var e=n.useContext(p),t=e;return a&&(t="function"==typeof a?a(e):r(r({},e),a)),t},c=function(a){var e=l(a.components);return n.createElement(p.Provider,{value:e},a.children)},k="mdxType",u={inlineCode:"code",wrapper:function(a){var e=a.children;return n.createElement(n.Fragment,{},e)}},f=n.forwardRef((function(a,e){var t=a.components,o=a.mdxType,s=a.originalType,p=a.parentName,c=i(a,["components","mdxType","originalType","parentName"]),k=l(t),f=o,d=k["".concat(p,".").concat(f)]||k[f]||u[f]||s;return t?n.createElement(d,r(r({ref:e},c),{},{components:t})):n.createElement(d,r({ref:e},c))}));function d(a,e){var t=arguments,o=e&&e.mdxType;if("string"==typeof a||o){var s=t.length,r=new Array(s);r[0]=f;var i={};for(var p in e)hasOwnProperty.call(e,p)&&(i[p]=e[p]);i.originalType=a,i[k]="string"==typeof a?a:o,r[1]=i;for(var l=2;l<s;l++)r[l]=t[l];return n.createElement.apply(null,r)}return n.createElement.apply(null,t)}f.displayName="MDXCreateElement"},2804:(a,e,t)=>{t.r(e),t.d(e,{assets:()=>p,contentTitle:()=>r,default:()=>u,frontMatter:()=>s,metadata:()=>i,toc:()=>l});var n=t(7462),o=(t(7294),t(3905));const s={},r="FastKafka",i={unversionedId:"index",id:"version-0.7.1/index",title:"FastKafka",description:"Effortless Kafka integration for your web services",source:"@site/versioned_docs/version-0.7.1/index.md",sourceDirName:".",slug:"/",permalink:"/docs/0.7.1/",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",next:{title:"@consumes basics",permalink:"/docs/0.7.1/guides/Guide_11_Consumes_Basics"}},p={},l=[{value:"\u2b50\u2b50\u2b50 Stay in touch \u2b50\u2b50\u2b50",id:"-stay-in-touch-",level:4},{value:"\ud83d\udc1d\ud83d\udc1d\ud83d\udc1d We were busy lately \ud83d\udc1d\ud83d\udc1d\ud83d\udc1d",id:"-we-were-busy-lately-",level:4},{value:"Install",id:"install",level:2},{value:"Tutorial",id:"tutorial",level:2},{value:"Writing server code",id:"writing-server-code",level:2},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"Testing the service",id:"testing-the-service",level:2},{value:"Recap",id:"recap",level:3},{value:"Running the service",id:"running-the-service",level:2},{value:"Documentation",id:"documentation",level:2},{value:"License",id:"license",level:2}],c={toc:l},k="wrapper";function u(a){let{components:e,...t}=a;return(0,o.kt)(k,(0,n.Z)({},c,t,{components:e,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"fastkafka"},"FastKafka"),(0,o.kt)("b",null,"Effortless Kafka integration for your web services"),(0,o.kt)("hr",null),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/v/fastkafka.png",alt:"PyPI"})," ",(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/dm/fastkafka.png",alt:"PyPI -\nDownloads"})," ",(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/pyversions/fastkafka.png",alt:"PyPI - Python\nVersion"})),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/github/actions/workflow/status/airtai/fastkafka/test.yaml",alt:"GitHub Workflow\nStatus"}),"\n",(0,o.kt)("img",{parentName:"p",src:"https://github.com/airtai/fastkafka//actions/workflows/codeql.yml/badge.svg",alt:"CodeQL"}),"\n",(0,o.kt)("img",{parentName:"p",src:"https://github.com/airtai/fastkafka//actions/workflows/dependency-review.yml/badge.svg",alt:"Dependency\nReview"})),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/github/license/airtai/fastkafka.png",alt:"GitHub"})),(0,o.kt)("hr",null),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,o.kt)("hr",null),(0,o.kt)("h4",{id:"-stay-in-touch-"},"\u2b50\u2b50\u2b50 Stay in touch \u2b50\u2b50\u2b50"),(0,o.kt)("p",null,"Please show your support and stay in touch by:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"giving our ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/"},"GitHub repository")," a\nstar, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"joining our ",(0,o.kt)("a",{parentName:"p",href:"https://discord.gg/CJWmYpyFbc"},"Discord server"),"."))),(0,o.kt)("p",null,"Your support helps us to stay in touch with you and encourages us to\ncontinue developing and improving the library. Thank you for your\nsupport!"),(0,o.kt)("hr",null),(0,o.kt)("h4",{id:"-we-were-busy-lately-"},"\ud83d\udc1d\ud83d\udc1d\ud83d\udc1d We were busy lately \ud83d\udc1d\ud83d\udc1d\ud83d\udc1d"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://repobeats.axiom.co/api/embed/21f36049093d5eb8e5fdad18c3c5d8df5428ca30.svg",alt:"Activity",title:"Repobeats analytics image"})),(0,o.kt)("h2",{id:"install"},"Install"),(0,o.kt)("p",null,"FastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install base version of ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka")," with ",(0,o.kt)("inlineCode",{parentName:"p"},"pip")," as usual:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka\n")),(0,o.kt)("p",null,"To install fastkafka with testing features please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[test]\n")),(0,o.kt)("p",null,"To install fastkafka with asyncapi docs please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[docs]\n")),(0,o.kt)("p",null,"To install fastkafka with all the features please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[test,docs]\n")),(0,o.kt)("h2",{id:"tutorial"},"Tutorial"),(0,o.kt)("p",null,"You can start an interactive tutorial in Google Colab by clicking the\nbutton below:"),(0,o.kt)("a",{href:"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/index.ipynb",target:"_blank"},(0,o.kt)("img",{src:"https://colab.research.google.com/assets/colab-badge.svg",alt:"Open in Colab"})),(0,o.kt)("h2",{id:"writing-server-code"},"Writing server code"),(0,o.kt)("p",null,"To demonstrate FastKafka simplicity of using ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes"),"\ndecorators, we will focus on a simple app."),(0,o.kt)("p",null,"The app will consume jsons containig positive floats from one topic, log\nthem and then produce incremented values to another topic."),(0,o.kt)("h3",{id:"messages"},"Messages"),(0,o.kt)("p",null,"FastKafka uses ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,o.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,o.kt)("p",null,"This example defines one ",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," mesage class. This Class will model the\nconsumed and produced data in our app demo, it contains one\n",(0,o.kt)("inlineCode",{parentName:"p"},"NonNegativeFloat")," field ",(0,o.kt)("inlineCode",{parentName:"p"},"data"),' that will be logged and \u201cprocessed"\nbefore being produced to another topic.'),(0,o.kt)("p",null,"These message class will be used to parse and validate incoming data in\nKafka consumers and producers."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass Data(BaseModel):\n data: NonNegativeFloat = Field(\n ..., example=0.5, description="Float data example"\n )\n')),(0,o.kt)("h3",{id:"application"},"Application"),(0,o.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,o.kt)("p",null,"It starts by defining a dictionary called ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,o.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,o.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\nboth generating the documentation and later to run the actual server\nagainst one of the given kafka broker."),(0,o.kt)("p",null,"Next, an object of the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum set of arguments:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generation of documentation")),(0,o.kt)("p",null,"We will also import and create a logger so that we can log the incoming\ndata in our consuming function."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from logging import getLogger\nfrom fastkafka import FastKafka\n\nlogger = getLogger("Demo Kafka app")\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Demo Kafka app",\n kafka_brokers=kafka_brokers,\n)\n')),(0,o.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,o.kt)("p",null,"FastKafka provides convenient function decorators ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,o.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,o.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,o.kt)("p",null,"This following example shows how to use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," message class. Specifying the type of the\nsingle argument is instructing the Pydantic to use ",(0,o.kt)("inlineCode",{parentName:"p"},"Data.parse_raw()"),"\non the consumed message before passing it to the user defined function\n",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"to_output_data"),' function,\nwhich specifies that this function should produce a message to the\n\u201coutput_data" Kafka topic whenever it is called. The ',(0,o.kt)("inlineCode",{parentName:"p"},"to_output_data"),"\nfunction takes a single float argument ",(0,o.kt)("inlineCode",{parentName:"p"},"data"),". It it increments the\ndata returns it wrapped in a ",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," object. The framework will call\nthe ",(0,o.kt)("inlineCode",{parentName:"p"},'Data.json().encode("utf-8")')," function on the returned value and\nproduce it to the specified topic."))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: Data):\n logger.info(f"Got data: {msg.data}")\n await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic="output_data")\nasync def to_output_data(data: float) -> Data:\n processed_data = Data(data=data+1.0)\n return processed_data\n')),(0,o.kt)("h2",{id:"testing-the-service"},"Testing the service"),(0,o.kt)("p",null,"The service can be tested using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstances which internally starts InMemory implementation of Kafka\nbroker."),(0,o.kt)("p",null,"The Tester will redirect your consumes and produces decorated functions\nto the InMemory Kafka broker so that you can quickly test your app\nwithout the need for a running Kafka broker and all its dependencies."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka.testing import Tester\n\nmsg = Data(\n data=0.1,\n)\n\n# Start Tester app and create InMemory Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n # Send Data message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with incremented data in output_data topic\n await tester.awaited_mocks.on_output_data.assert_awaited_with(\n Data(data=1.1), timeout=2\n )\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] Demo Kafka app: Got data: 0.1\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,o.kt)("h3",{id:"recap"},"Recap"),(0,o.kt)("p",null,"We have created a simple fastkafka application. The app will consume the\n",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," from the ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic, log it and produce the incremented\ndata to ",(0,o.kt)("inlineCode",{parentName:"p"},"output_data")," topic."),(0,o.kt)("p",null,"To test the app we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Created the app")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Started our Tester class which mirrors the developed app topics for\ntesting purposes")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Sent Data message to ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Asserted and checked that the developed service has reacted to Data\nmessage"))),(0,o.kt)("h2",{id:"running-the-service"},"Running the service"),(0,o.kt)("p",null,"The service can be started using builtin faskafka run CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file ",(0,o.kt)("inlineCode",{parentName:"p"},'"application.py"')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass Data(BaseModel):\n data: NonNegativeFloat = Field(\n ..., example=0.5, description="Float data example"\n )\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Demo Kafka app",\n kafka_brokers=kafka_brokers,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: Data):\n logger.info(f"Got data: {msg.data}")\n await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic="output_data")\nasync def to_output_data(data: float) -> Data:\n processed_data = Data(data=data+1.0)\n return processed_data\n')),(0,o.kt)("p",null,"To run the service, use the FastKafka CLI command and pass the module\n(in this case, the file where the app implementation is located) and the\napp simbol to the command."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka run --num-workers=1 --kafka-broker localhost application:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see the following output in your\ncommand line:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[1504]: 23-05-31 11:36:45.874 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1504]: 23-05-31 11:36:45.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1506]: 23-05-31 11:36:45.993 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1506]: 23-05-31 11:36:45.994 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1506]: 23-05-31 11:36:46.014 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1506]: 23-05-31 11:36:46.015 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1506]: 23-05-31 11:36:46.040 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1506]: 23-05-31 11:36:46.042 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1506]: 23-05-31 11:36:46.043 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1506]: 23-05-31 11:36:46.043 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1506]: 23-05-31 11:36:46.068 [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata\n[1506]: 23-05-31 11:36:46.070 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1504]: 23-05-31 11:36:46.131 [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1504]: 23-05-31 11:36:46.132 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable to update metadata from [0]\n[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable to update metadata from [0]\n[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\nStarting process cleanup, this may take a few seconds...\n23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1504...\n23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1506...\n[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-31 11:37:00.607 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1506 was already terminated.\n23-05-31 11:37:00.822 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1504 was already terminated.\n")),(0,o.kt)("h2",{id:"documentation"},"Documentation"),(0,o.kt)("p",null,"The kafka app comes with builtin documentation generation using\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/tools/generator"},"AsyncApi HTML generator"),"."),(0,o.kt)("p",null,"AsyncApi requires Node.js to be installed and we provide the following\nconvenience command line for it:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"23-05-31 11:38:24.128 [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n")),(0,o.kt)("p",null,"To generate the documentation programatically you just need to call the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs generate application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"23-05-31 11:38:25.113 [INFO] fastkafka._components.asyncapi: Old async specifications at '/content/asyncapi/spec/asyncapi.yml' does not exist.\n23-05-31 11:38:25.118 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'\n23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /content/asyncapi/docs.\n")),(0,o.kt)("p",null,"This will generate the ",(0,o.kt)("em",{parentName:"p"},"asyncapi")," folder in relative path where all your\ndocumentation will be saved. You can check out the content of it with:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"ls -l asyncapi\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"total 8\ndrwxr-xr-x 4 root root 4096 May 31 11:38 docs\ndrwxr-xr-x 2 root root 4096 May 31 11:38 spec\n")),(0,o.kt)("p",null,"In docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka docs serve"),"\nCLI command (more on that in our guides)."),(0,o.kt)("p",null,"In spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application."),(0,o.kt)("p",null,"We can locally preview the generated documentation by running the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs serve application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},'23-05-31 11:38:45.250 [INFO] fastkafka._components.asyncapi: New async specifications generated at: \'/content/asyncapi/spec/asyncapi.yml\'\n23-05-31 11:39:04.410 [INFO] fastkafka._components.asyncapi: Async docs generated at \'asyncapi/docs\'\n23-05-31 11:39:04.411 [INFO] fastkafka._components.asyncapi: Output of \'$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write\'\n\nDone! \u2728\nCheck out your shiny new generated files at /content/asyncapi/docs.\n\n\nServing documentation on http://127.0.0.1:8000\n127.0.0.1 - - [31/May/2023 11:39:14] "GET / HTTP/1.1" 200 -\n127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/global.min.css HTTP/1.1" 200 -\n127.0.0.1 - - [31/May/2023 11:39:14] "GET /js/asyncapi-ui.min.js HTTP/1.1" 200 -\n127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/asyncapi.min.css HTTP/1.1" 200 -\nInterupting serving of documentation and cleaning up...\n')),(0,o.kt)("p",null,"From the parameters passed to the application constructor, we get the\ndocumentation bellow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Demo Kafka app",\n kafka_brokers=kafka_brokers,\n)\n')),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png",alt:"Kafka_servers"})),(0,o.kt)("p",null,"The following documentation snippet are for the consumer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png",alt:"Kafka_consumer"})),(0,o.kt)("p",null,"The following documentation snippet are for the producer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png",alt:"Kafka_producer"})),(0,o.kt)("p",null,"Finally, all messages as defined as subclasses of ",(0,o.kt)("em",{parentName:"p"},"BaseModel")," are\ndocumented as well:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png",alt:"Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)"})),(0,o.kt)("h2",{id:"license"},"License"),(0,o.kt)("p",null,"FastKafka is licensed under the Apache License 2.0"),(0,o.kt)("p",null,"A permissive license whose main conditions require preservation of\ncopyright and license notices. Contributors provide an express grant of\npatent rights. Licensed works, modifications, and larger works may be\ndistributed under different terms and without source code."),(0,o.kt)("p",null,"The full text of the license can be found\n",(0,o.kt)("a",{parentName:"p",href:"https://raw.githubusercontent.com/airtai/fastkafka/main/LICENSE"},"here"),"."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/a80d168f.2854157b.js b/assets/js/a80d168f.2854157b.js new file mode 100644 index 0000000..40fc92e --- /dev/null +++ b/assets/js/a80d168f.2854157b.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3725],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>m});var n=t(7294);function o(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function s(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function i(e){for(var a=1;a<arguments.length;a++){var t=null!=arguments[a]?arguments[a]:{};a%2?s(Object(t),!0).forEach((function(a){o(e,a,t[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(t,a))}))}return e}function r(e,a){if(null==e)return{};var t,n,o=function(e,a){if(null==e)return{};var t,n,o={},s=Object.keys(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||(o[t]=e[t]);return o}(e,a);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var p=n.createContext({}),l=function(e){var a=n.useContext(p),t=a;return e&&(t="function"==typeof e?e(a):i(i({},a),e)),t},c=function(e){var a=l(e.components);return n.createElement(p.Provider,{value:a},e.children)},k="mdxType",d={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},f=n.forwardRef((function(e,a){var t=e.components,o=e.mdxType,s=e.originalType,p=e.parentName,c=r(e,["components","mdxType","originalType","parentName"]),k=l(t),f=o,m=k["".concat(p,".").concat(f)]||k[f]||d[f]||s;return t?n.createElement(m,i(i({ref:a},c),{},{components:t})):n.createElement(m,i({ref:a},c))}));function m(e,a){var t=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var s=t.length,i=new Array(s);i[0]=f;var r={};for(var p in a)hasOwnProperty.call(a,p)&&(r[p]=a[p]);r.originalType=e,r[k]="string"==typeof e?e:o,i[1]=r;for(var l=2;l<s;l++)i[l]=t[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,t)}f.displayName="MDXCreateElement"},760:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>p,contentTitle:()=>i,default:()=>d,frontMatter:()=>s,metadata:()=>r,toc:()=>l});var n=t(7462),o=(t(7294),t(3905));const s={},i="FastKafka tutorial",r={unversionedId:"guides/Guide_00_FastKafka_Demo",id:"version-0.7.1/guides/Guide_00_FastKafka_Demo",title:"FastKafka tutorial",description:"FastKafka is a powerful and easy-to-use",source:"@site/versioned_docs/version-0.7.1/guides/Guide_00_FastKafka_Demo.md",sourceDirName:"guides",slug:"/guides/Guide_00_FastKafka_Demo",permalink:"/docs/0.7.1/guides/Guide_00_FastKafka_Demo",draft:!1,tags:[],version:"0.7.1",frontMatter:{}},p={},l=[{value:"Install",id:"install",level:2},{value:"Running in Colab",id:"running-in-colab",level:2},{value:"Writing server code",id:"writing-server-code",level:2},{value:"Preparing the demo model",id:"preparing-the-demo-model",level:3},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"Testing the service",id:"testing-the-service",level:2},{value:"Recap",id:"recap",level:3},{value:"Running the service",id:"running-the-service",level:2},{value:"Documentation",id:"documentation",level:2}],c={toc:l},k="wrapper";function d(e){let{components:a,...t}=e;return(0,o.kt)(k,(0,n.Z)({},c,t,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"fastkafka-tutorial"},"FastKafka tutorial"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,o.kt)("h2",{id:"install"},"Install"),(0,o.kt)("p",null,"FastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install it with ",(0,o.kt)("inlineCode",{parentName:"p"},"pip")," as usual:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"try:\n import fastkafka\nexcept:\n ! pip install fastkafka\n")),(0,o.kt)("h2",{id:"running-in-colab"},"Running in Colab"),(0,o.kt)("p",null,"You can start this interactive tutorial in Google Colab by clicking the\nbutton below:"),(0,o.kt)("a",{href:"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb",target:"_blank"},(0,o.kt)("img",{src:"https://colab.research.google.com/assets/colab-badge.svg",alt:"Open In Colab"})),(0,o.kt)("h2",{id:"writing-server-code"},"Writing server code"),(0,o.kt)("p",null,"Here is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic."),(0,o.kt)("h3",{id:"preparing-the-demo-model"},"Preparing the demo model"),(0,o.kt)("p",null,"First we will prepare our model using the Iris dataset so that we can\ndemonstrate the preditions using FastKafka. The following call downloads\nthe dataset and trains the model."),(0,o.kt)("p",null,"We will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n')),(0,o.kt)("h3",{id:"messages"},"Messages"),(0,o.kt)("p",null,"FastKafka uses ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,o.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,o.kt)("p",null,"This example defines two message classes for use in a FastKafka\napplication:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," class is used to represent input data for a\npredictive model. It has four fields of type\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/types/#constrained-types"},(0,o.kt)("inlineCode",{parentName:"a"},"NonNegativeFloat")),",\nwhich is a subclass of float that only allows non-negative floating\npoint values.")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," class is used to represent the output of the\npredictive model. It has a single field ",(0,o.kt)("inlineCode",{parentName:"p"},"species")," of type string\nrepresenting the predicted species."))),(0,o.kt)("p",null,"These message classes will be used to parse and validate incoming data\nin Kafka consumers and producers."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,o.kt)("h3",{id:"application"},"Application"),(0,o.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,o.kt)("p",null,"It starts by defining a dictionary called ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,o.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,o.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\ngenerating the documentation only and it is not being checked by the\nactual server."),(0,o.kt)("p",null,"Next, an object of the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum set of arguments:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generation of documentation")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n')),(0,o.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,o.kt)("p",null,"FastKafka provides convenient function decorators ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,o.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,o.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,o.kt)("p",null,"This following example shows how to use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message class. Specifying the\ntype of the single argument is instructing the Pydantic to use\n",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData.parse_raw()")," on the consumed message before passing it\nto the user defined function ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),' function,\nwhich specifies that this function should produce a message to the\n\u201cpredictions" Kafka topic whenever it is called. The ',(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),"\nfunction takes a single integer argument ",(0,o.kt)("inlineCode",{parentName:"p"},"species_class")," representing\none of three possible strign values predicted by the mdoel. It creates\na new ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," message using this value and then returns it.\nThe framework will call the ",(0,o.kt)("inlineCode",{parentName:"p"},'IrisPrediction.json().encode("utf-8")'),"\nfunction on the returned value and produce it to the specified topic."))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h2",{id:"testing-the-service"},"Testing the service"),(0,o.kt)("p",null,"The service can be tested using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstances which internally starts Kafka broker and zookeeper."),(0,o.kt)("p",null,"Before running tests, we have to install Java runtime and Apache Kafka\nlocally. To simplify the process, we provide the following convenience\ncommand:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka testing install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nmsg = IrisInputData(\n sepal_length=0.1,\n sepal_width=0.2,\n petal_length=0.3,\n petal_width=0.4,\n)\n\n# Start Tester app and create local Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n # Send IrisInputData message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with IrisPrediction in predictions topic\n await tester.awaited_mocks.on_predictions.assert_awaited_with(\n IrisPrediction(species="setosa"), timeout=2\n )\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,o.kt)("h3",{id:"recap"},"Recap"),(0,o.kt)("p",null,"We have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic and produce the predictions to ",(0,o.kt)("inlineCode",{parentName:"p"},"predictions")," topic."),(0,o.kt)("p",null,"To test the app we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Created the app")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Started our Tester class which mirrors the developed app topics for\ntesting purpuoses")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Sent IrisInputData message to ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Asserted and checked that the developed iris classification service\nhas reacted to IrisInputData message"))),(0,o.kt)("h2",{id:"running-the-service"},"Running the service"),(0,o.kt)("p",null,"The service can be started using builtin ",(0,o.kt)("inlineCode",{parentName:"p"},"faskafka run")," CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file ",(0,o.kt)("inlineCode",{parentName:"p"},'"application.py"')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("p",null,"To run the service, you will need a running Kafka broker on localhost as\nspecified in the ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")," parameter above. We can start the Kafka\nbroker locally using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/testing/ApacheKafkaBroker/#fastkafka.testing.ApacheKafkaBroker"},(0,o.kt)("inlineCode",{parentName:"a"},"ApacheKafkaBroker")),".\nNotice that the same happens automatically in the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\nas shown above."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n'127.0.0.1:9092'\n")),(0,o.kt)("p",null,"Then, we start the FastKafka service by running the following command in\nthe folder where the ",(0,o.kt)("inlineCode",{parentName:"p"},"application.py")," file is located:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n")),(0,o.kt)("p",null,"In the above command, we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--num-workers")," option to specify how many\nworkers to launch and we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--kafka-broker")," option to specify which\nkafka broker configuration to use from earlier specified ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[1200654]: [ERROR] aiokafka: Unable to update metadata from [0]\n[1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[1200656]: [ERROR] aiokafka: Unable to update metadata from [0]\n^C\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...\n")),(0,o.kt)("p",null,"You need to interupt running of the cell above by selecting\n",(0,o.kt)("inlineCode",{parentName:"p"},"Runtime->Interupt execution")," on the toolbar above."),(0,o.kt)("p",null,"Finally, we can stop the local Kafka Broker:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n")),(0,o.kt)("h2",{id:"documentation"},"Documentation"),(0,o.kt)("p",null,"The kafka app comes with builtin documentation generation using\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/tools/generator"},"AsyncApi HTML generator"),"."),(0,o.kt)("p",null,"When running in Colab, we need to update Node.js first:"),(0,o.kt)("p",null,"We need to install all dependancies for the generator using the\nfollowing command line:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n")),(0,o.kt)("p",null,"To generate the documentation programatically you just need to call the\nfolloving command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs generate application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n")),(0,o.kt)("p",null,". This will generate the ",(0,o.kt)("em",{parentName:"p"},"asyncapi")," folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"ls -l asyncapi\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"total 8\ndrwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs\ndrwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec\n")),(0,o.kt)("p",null,"In docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka docs serve"),"\nCLI command (more on that in our guides)."),(0,o.kt)("p",null,"In spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application."),(0,o.kt)("p",null,"We can locally preview the generated documentation by running the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs serve application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n\nServing documentation on http://127.0.0.1:8000\n^C\nInterupting serving of documentation and cleaning up...\n")),(0,o.kt)("p",null,"From the parameters passed to the application constructor, we get the\ndocumentation bellow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n bootstrap_servers="localhost:9092",\n)\n')),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png",alt:"Kafka_servers"})),(0,o.kt)("p",null,"The following documentation snippet are for the consumer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png",alt:"Kafka_consumer"})),(0,o.kt)("p",null,"The following documentation snippet are for the producer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png",alt:"Kafka_producer"})),(0,o.kt)("p",null,"Finally, all messages as defined as subclasses of ",(0,o.kt)("em",{parentName:"p"},"BaseModel")," are\ndocumented as well:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png",alt:"Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)"})))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/a9ab9f8f.bdfd8b0f.js b/assets/js/a9ab9f8f.bdfd8b0f.js new file mode 100644 index 0000000..6330cbf --- /dev/null +++ b/assets/js/a9ab9f8f.bdfd8b0f.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3979],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>f});var o=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function s(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?r(Object(n),!0).forEach((function(t){a(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function l(e,t){if(null==e)return{};var n,o,a=function(e,t){if(null==e)return{};var n,o,a={},r=Object.keys(e);for(o=0;o<r.length;o++)n=r[o],t.indexOf(n)>=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o<r.length;o++)n=r[o],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=o.createContext({}),c=function(e){var t=o.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):s(s({},t),e)),n},p=function(e){var t=c(e.components);return o.createElement(i.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},d=o.forwardRef((function(e,t){var n=e.components,a=e.mdxType,r=e.originalType,i=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),u=c(n),d=a,f=u["".concat(i,".").concat(d)]||u[d]||m[d]||r;return n?o.createElement(f,s(s({ref:t},p),{},{components:n})):o.createElement(f,s({ref:t},p))}));function f(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var r=n.length,s=new Array(r);s[0]=d;var l={};for(var i in t)hasOwnProperty.call(t,i)&&(l[i]=t[i]);l.originalType=e,l[u]="string"==typeof e?e:a,s[1]=l;for(var c=2;c<r;c++)s[c]=n[c];return o.createElement.apply(null,s)}return o.createElement.apply(null,n)}d.displayName="MDXCreateElement"},1034:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>m,frontMatter:()=>r,metadata:()=>l,toc:()=>c});var o=n(7462),a=(n(7294),n(3905));const r={},s="Batch consuming",l={unversionedId:"guides/Guide_12_Batch_Consuming",id:"version-0.8.0/guides/Guide_12_Batch_Consuming",title:"Batch consuming",description:"If you want to consume data in batches @consumes decorator makes that",source:"@site/versioned_docs/version-0.8.0/guides/Guide_12_Batch_Consuming.md",sourceDirName:"guides",slug:"/guides/Guide_12_Batch_Consuming",permalink:"/docs/guides/Guide_12_Batch_Consuming",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"@consumes basics",permalink:"/docs/guides/Guide_11_Consumes_Basics"},next:{title:"@produces basics",permalink:"/docs/guides/Guide_21_Produces_Basics"}},i={},c=[{value:"Consume function with batching",id:"consume-function-with-batching",level:2},{value:"App example",id:"app-example",level:2},{value:"Send the messages to kafka topic",id:"send-the-messages-to-kafka-topic",level:2}],p={toc:c},u="wrapper";function m(e){let{components:t,...n}=e;return(0,a.kt)(u,(0,o.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"batch-consuming"},"Batch consuming"),(0,a.kt)("p",null,"If you want to consume data in batches ",(0,a.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator makes that\npossible for you. By typing a consumed msg object as a ",(0,a.kt)("inlineCode",{parentName:"p"},"list")," of\nmessages the consumer will call your consuming function with a batch of\nmessages consumed from a single partition. Let\u2019s demonstrate that now."),(0,a.kt)("h2",{id:"consume-function-with-batching"},"Consume function with batching"),(0,a.kt)("p",null,"To consume messages in batches, you need to wrap you message type into a\nlist and the ",(0,a.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will take care of the rest for you.\nYour consumes function will be called with batches grouped by partition\nnow."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes(auto_offset_reset="earliest")\nasync def on_hello_world(msg: List[HelloWorld]):\n logger.info(f"Got msg batch: {msg}")\n')),(0,a.kt)("h2",{id:"app-example"},"App example"),(0,a.kt)("p",null,"We will modify the app example from ",(0,a.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_11_Consumes_Basics"},"@consumes\nbasics")," guide to consume\n",(0,a.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages batch. The final app will look like this (make\nsure you replace the ",(0,a.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,a.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values):"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\nfrom typing import List\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.consumes(auto_offset_reset="earliest")\nasync def on_hello_world(msg: List[HelloWorld]):\n logger.info(f"Got msg batch: {msg}")\n')),(0,a.kt)("h2",{id:"send-the-messages-to-kafka-topic"},"Send the messages to kafka topic"),(0,a.kt)("p",null,"Lets send a couple of ",(0,a.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages to the ",(0,a.kt)("em",{parentName:"p"},"hello_world")," topic\nand check if our consumer kafka application has logged the received\nmessages batch. In your terminal, run the following command at least two\ntimes to create multiple messages in your kafka queue:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre"},'echo { ^"msg^": ^"Hello world^" }\n')),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-shell"},'echo { ^"msg^": ^"Hello world^" } | kafka-console-producer.bat --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n')),(0,a.kt)("p",null,"Now we can run the app. Copy the code of the example app in\nconsumer_example.py and run it by running"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n")),(0,a.kt)("p",null,"You should see the your Kafka messages being logged in batches by your\nconsumer."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/aa946361.9a9d22e0.js b/assets/js/aa946361.9a9d22e0.js new file mode 100644 index 0000000..227502b --- /dev/null +++ b/assets/js/aa946361.9a9d22e0.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5171],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function i(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var l=n.createContext({}),p=function(e){var t=n.useContext(l),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},c=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,l=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),d=p(a),k=r,f=d["".concat(l,".").concat(k)]||d[k]||u[k]||o;return a?n.createElement(f,s(s({ref:t},c),{},{components:a})):n.createElement(f,s({ref:t},c))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,s=new Array(o);s[0]=k;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[d]="string"==typeof e?e:r,s[1]=i;for(var p=2;p<o;p++)s[p]=a[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,a)}k.displayName="MDXCreateElement"},5239:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>u,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={},s="Batch producing",i={unversionedId:"guides/Guide_23_Batch_Producing",id:"version-0.7.0/guides/Guide_23_Batch_Producing",title:"Batch producing",description:"If you want to send your data in batches @produces decorator makes",source:"@site/versioned_docs/version-0.7.0/guides/Guide_23_Batch_Producing.md",sourceDirName:"guides",slug:"/guides/Guide_23_Batch_Producing",permalink:"/docs/0.7.0/guides/Guide_23_Batch_Producing",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Defining a partition key",permalink:"/docs/0.7.0/guides/Guide_22_Partition_Keys"},next:{title:"Lifespan Events",permalink:"/docs/0.7.0/guides/Guide_05_Lifespan_Handler"}},l={},p=[{value:"Return a batch from the producing function",id:"return-a-batch-from-the-producing-function",level:2},{value:"App example",id:"app-example",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the batch was sent to the Kafka topic with the defined key",id:"check-if-the-batch-was-sent-to-the-kafka-topic-with-the-defined-key",level:2},{value:"Batch key",id:"batch-key",level:2},{value:"Check if the batch was sent to the Kafka topic",id:"check-if-the-batch-was-sent-to-the-kafka-topic",level:2}],c={toc:p},d="wrapper";function u(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"batch-producing"},"Batch producing"),(0,r.kt)("p",null,"If you want to send your data in batches ",(0,r.kt)("inlineCode",{parentName:"p"},"@produces")," decorator makes\nthat possible for you. By returning a ",(0,r.kt)("inlineCode",{parentName:"p"},"list")," of messages you want to\nsend in a batch the producer will collect the messages and send them in\na batch to a Kafka broker."),(0,r.kt)("p",null,"This guide will demonstrate how to use this feature."),(0,r.kt)("h2",{id:"return-a-batch-from-the-producing-function"},"Return a batch from the producing function"),(0,r.kt)("p",null,"To define a batch that you want to produce to Kafka topic, you need to\nreturn the ",(0,r.kt)("inlineCode",{parentName:"p"},"List")," of the messages that you want to be batched from your\nproducing function."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n return [HelloWorld(msg=msg) for msg in msgs]\n")),(0,r.kt)("p",null,"In the example, we want to return the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message class batch\nthat is created from a list of msgs we passed into our producing\nfunction."),(0,r.kt)("p",null,'Lets also prepare a backgound task that will send a batch of \u201chello\nworld" messages when the app starts.'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n')),(0,r.kt)("h2",{id:"app-example"},"App example"),(0,r.kt)("p",null,"We will modify the app example from ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_21_Produces_Basics"},"@producer\nbasics")," guide to return the\n",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," batch. The final app will look like this (make sure you\nreplace the ",(0,r.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,r.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values):"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n return [HelloWorld(msg=msg) for msg in msgs]\n')),(0,r.kt)("h2",{id:"run-the-app"},"Run the app"),(0,r.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n")),(0,r.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"[46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task\n[46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.\n")),(0,r.kt)("h2",{id:"check-if-the-batch-was-sent-to-the-kafka-topic-with-the-defined-key"},"Check if the batch was sent to the Kafka topic with the defined key"),(0,r.kt)("p",null,'Lets check the topic and see if there are \u201cHello world" messages in the\nhello_world topic. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n")),(0,r.kt)("p",null,"You should see the batch of messages in your topic."),(0,r.kt)("h2",{id:"batch-key"},"Batch key"),(0,r.kt)("p",null,"To define a key for your batch like in ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_22_Partition_Keys"},"Defining a partition\nkey")," guide you can wrap the\nreturning value in a ",(0,r.kt)("inlineCode",{parentName:"p"},"KafkaEvent")," class. To learn more about defining a\npartition ke and ",(0,r.kt)("inlineCode",{parentName:"p"},"KafkaEvent")," class, please, have a look at ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_22_Partition_Keys"},"Defining a\npartition key")," guide."),(0,r.kt)("p",null,"Let\u2019s demonstrate that."),(0,r.kt)("p",null,"To define a key, we just need to modify our producing function, like\nthis:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")\n')),(0,r.kt)("p",null,"Now our app looks like this:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")\n')),(0,r.kt)("h2",{id:"check-if-the-batch-was-sent-to-the-kafka-topic"},"Check if the batch was sent to the Kafka topic"),(0,r.kt)("p",null,'Lets check the topic and see if there are \u201cHello world" messages in the\nhello_world topic, containing a defined key. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n")),(0,r.kt)("p",null,"You should see the batch of messages with the defined key in your topic."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/aacd1d40.c4e40d4c.js b/assets/js/aacd1d40.c4e40d4c.js new file mode 100644 index 0000000..e6a47ab --- /dev/null +++ b/assets/js/aacd1d40.c4e40d4c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5394],{5745:a=>{a.exports=JSON.parse('{"name":"docusaurus-plugin-content-pages","id":"default"}')}}]); \ No newline at end of file diff --git a/assets/js/ac02e102.06e01491.js b/assets/js/ac02e102.06e01491.js new file mode 100644 index 0000000..bbce5ae --- /dev/null +++ b/assets/js/ac02e102.06e01491.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9942],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>m});var n=t(7294);function o(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function s(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function i(e){for(var a=1;a<arguments.length;a++){var t=null!=arguments[a]?arguments[a]:{};a%2?s(Object(t),!0).forEach((function(a){o(e,a,t[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(t,a))}))}return e}function r(e,a){if(null==e)return{};var t,n,o=function(e,a){if(null==e)return{};var t,n,o={},s=Object.keys(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||(o[t]=e[t]);return o}(e,a);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var p=n.createContext({}),l=function(e){var a=n.useContext(p),t=a;return e&&(t="function"==typeof e?e(a):i(i({},a),e)),t},c=function(e){var a=l(e.components);return n.createElement(p.Provider,{value:a},e.children)},k="mdxType",d={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},f=n.forwardRef((function(e,a){var t=e.components,o=e.mdxType,s=e.originalType,p=e.parentName,c=r(e,["components","mdxType","originalType","parentName"]),k=l(t),f=o,m=k["".concat(p,".").concat(f)]||k[f]||d[f]||s;return t?n.createElement(m,i(i({ref:a},c),{},{components:t})):n.createElement(m,i({ref:a},c))}));function m(e,a){var t=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var s=t.length,i=new Array(s);i[0]=f;var r={};for(var p in a)hasOwnProperty.call(a,p)&&(r[p]=a[p]);r.originalType=e,r[k]="string"==typeof e?e:o,i[1]=r;for(var l=2;l<s;l++)i[l]=t[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,t)}f.displayName="MDXCreateElement"},3768:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>p,contentTitle:()=>i,default:()=>d,frontMatter:()=>s,metadata:()=>r,toc:()=>l});var n=t(7462),o=(t(7294),t(3905));const s={},i="FastKafka tutorial",r={unversionedId:"guides/Guide_00_FastKafka_Demo",id:"version-0.6.0/guides/Guide_00_FastKafka_Demo",title:"FastKafka tutorial",description:"FastKafka is a powerful and easy-to-use",source:"@site/versioned_docs/version-0.6.0/guides/Guide_00_FastKafka_Demo.md",sourceDirName:"guides",slug:"/guides/Guide_00_FastKafka_Demo",permalink:"/docs/0.6.0/guides/Guide_00_FastKafka_Demo",draft:!1,tags:[],version:"0.6.0",frontMatter:{}},p={},l=[{value:"Install",id:"install",level:2},{value:"Running in Colab",id:"running-in-colab",level:2},{value:"Writing server code",id:"writing-server-code",level:2},{value:"Preparing the demo model",id:"preparing-the-demo-model",level:3},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"Testing the service",id:"testing-the-service",level:2},{value:"Recap",id:"recap",level:3},{value:"Running the service",id:"running-the-service",level:2},{value:"Documentation",id:"documentation",level:2}],c={toc:l},k="wrapper";function d(e){let{components:a,...t}=e;return(0,o.kt)(k,(0,n.Z)({},c,t,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"fastkafka-tutorial"},"FastKafka tutorial"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,o.kt)("h2",{id:"install"},"Install"),(0,o.kt)("p",null,"FastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install it with ",(0,o.kt)("inlineCode",{parentName:"p"},"pip")," as usual:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"try:\n import fastkafka\nexcept:\n ! pip install fastkafka\n")),(0,o.kt)("h2",{id:"running-in-colab"},"Running in Colab"),(0,o.kt)("p",null,"You can start this interactive tutorial in Google Colab by clicking the\nbutton below:"),(0,o.kt)("a",{href:"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb",target:"_blank"},(0,o.kt)("img",{src:"https://colab.research.google.com/assets/colab-badge.svg",alt:"Open In Colab"})),(0,o.kt)("h2",{id:"writing-server-code"},"Writing server code"),(0,o.kt)("p",null,"Here is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic."),(0,o.kt)("h3",{id:"preparing-the-demo-model"},"Preparing the demo model"),(0,o.kt)("p",null,"First we will prepare our model using the Iris dataset so that we can\ndemonstrate the preditions using FastKafka. The following call downloads\nthe dataset and trains the model."),(0,o.kt)("p",null,"We will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n')),(0,o.kt)("h3",{id:"messages"},"Messages"),(0,o.kt)("p",null,"FastKafka uses ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,o.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,o.kt)("p",null,"This example defines two message classes for use in a FastKafka\napplication:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," class is used to represent input data for a\npredictive model. It has four fields of type\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/types/#constrained-types"},(0,o.kt)("inlineCode",{parentName:"a"},"NonNegativeFloat")),",\nwhich is a subclass of float that only allows non-negative floating\npoint values.")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," class is used to represent the output of the\npredictive model. It has a single field ",(0,o.kt)("inlineCode",{parentName:"p"},"species")," of type string\nrepresenting the predicted species."))),(0,o.kt)("p",null,"These message classes will be used to parse and validate incoming data\nin Kafka consumers and producers."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,o.kt)("h3",{id:"application"},"Application"),(0,o.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,o.kt)("p",null,"It starts by defining a dictionary called ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,o.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,o.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\ngenerating the documentation only and it is not being checked by the\nactual server."),(0,o.kt)("p",null,"Next, an object of the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum set of arguments:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generation of documentation")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n')),(0,o.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,o.kt)("p",null,"FastKafka provides convenient function decorators ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,o.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,o.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,o.kt)("p",null,"This following example shows how to use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message class. Specifying the\ntype of the single argument is instructing the Pydantic to use\n",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData.parse_raw()")," on the consumed message before passing it\nto the user defined function ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),' function,\nwhich specifies that this function should produce a message to the\n\u201cpredictions" Kafka topic whenever it is called. The ',(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),"\nfunction takes a single integer argument ",(0,o.kt)("inlineCode",{parentName:"p"},"species_class")," representing\none of three possible strign values predicted by the mdoel. It creates\na new ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," message using this value and then returns it.\nThe framework will call the ",(0,o.kt)("inlineCode",{parentName:"p"},'IrisPrediction.json().encode("utf-8")'),"\nfunction on the returned value and produce it to the specified topic."))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h2",{id:"testing-the-service"},"Testing the service"),(0,o.kt)("p",null,"The service can be tested using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstances which internally starts Kafka broker and zookeeper."),(0,o.kt)("p",null,"Before running tests, we have to install Java runtime and Apache Kafka\nlocally. To simplify the process, we provide the following convenience\ncommand:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka testing install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n\n[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nmsg = IrisInputData(\n sepal_length=0.1,\n sepal_width=0.2,\n petal_length=0.3,\n petal_width=0.4,\n)\n\n# Start Tester app and create local Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n # Send IrisInputData message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with IrisPrediction in predictions topic\n await tester.awaited_mocks.on_predictions.assert_awaited_with(\n IrisPrediction(species="setosa"), timeout=2\n )\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,o.kt)("h3",{id:"recap"},"Recap"),(0,o.kt)("p",null,"We have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic and produce the predictions to ",(0,o.kt)("inlineCode",{parentName:"p"},"predictions")," topic."),(0,o.kt)("p",null,"To test the app we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Created the app")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Started our Tester class which mirrors the developed app topics for\ntesting purpuoses")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Sent IrisInputData message to ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Asserted and checked that the developed iris classification service\nhas reacted to IrisInputData message"))),(0,o.kt)("h2",{id:"running-the-service"},"Running the service"),(0,o.kt)("p",null,"The service can be started using builtin ",(0,o.kt)("inlineCode",{parentName:"p"},"faskafka run")," CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file ",(0,o.kt)("inlineCode",{parentName:"p"},'"application.py"')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("p",null,"To run the service, you will need a running Kafka broker on localhost as\nspecified in the ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")," parameter above. We can start the Kafka\nbroker locally using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker/#fastkafka.testing.ApacheKafkaBroker"},(0,o.kt)("inlineCode",{parentName:"a"},"ApacheKafkaBroker")),".\nNotice that the same happens automatically in the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\nas shown above."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n'127.0.0.1:9092'\n")),(0,o.kt)("p",null,"Then, we start the FastKafka service by running the following command in\nthe folder where the ",(0,o.kt)("inlineCode",{parentName:"p"},"application.py")," file is located:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n")),(0,o.kt)("p",null,"In the above command, we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--num-workers")," option to specify how many\nworkers to launch and we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--kafka-broker")," option to specify which\nkafka broker configuration to use from earlier specified ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[1200654]: [ERROR] aiokafka: Unable to update metadata from [0]\n[1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[1200656]: [ERROR] aiokafka: Unable to update metadata from [0]\n^C\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...\n")),(0,o.kt)("p",null,"You need to interupt running of the cell above by selecting\n",(0,o.kt)("inlineCode",{parentName:"p"},"Runtime->Interupt execution")," on the toolbar above."),(0,o.kt)("p",null,"Finally, we can stop the local Kafka Broker:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n")),(0,o.kt)("h2",{id:"documentation"},"Documentation"),(0,o.kt)("p",null,"The kafka app comes with builtin documentation generation using\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/tools/generator"},"AsyncApi HTML generator"),"."),(0,o.kt)("p",null,"When running in Colab, we need to update Node.js first:"),(0,o.kt)("p",null,"We need to install all dependancies for the generator using the\nfollowing command line:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n")),(0,o.kt)("p",null,"To generate the documentation programatically you just need to call the\nfolloving command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs generate application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n")),(0,o.kt)("p",null,". This will generate the ",(0,o.kt)("em",{parentName:"p"},"asyncapi")," folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"ls -l asyncapi\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"total 8\ndrwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs\ndrwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec\n")),(0,o.kt)("p",null,"In docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka docs serve"),"\nCLI command (more on that in our guides)."),(0,o.kt)("p",null,"In spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application."),(0,o.kt)("p",null,"We can locally preview the generated documentation by running the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs serve application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.\n\n\nServing documentation on http://127.0.0.1:8000\n^C\nInterupting serving of documentation and cleaning up...\n")),(0,o.kt)("p",null,"From the parameters passed to the application constructor, we get the\ndocumentation bellow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n bootstrap_servers="localhost:9092",\n)\n')),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png",alt:"Kafka_servers"})),(0,o.kt)("p",null,"The following documentation snippet are for the consumer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png",alt:"Kafka_consumer"})),(0,o.kt)("p",null,"The following documentation snippet are for the producer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png",alt:"Kafka_producer"})),(0,o.kt)("p",null,"Finally, all messages as defined as subclasses of ",(0,o.kt)("em",{parentName:"p"},"BaseModel")," are\ndocumented as well:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png",alt:"Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)"})))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/ae1efb81.ef2d54c6.js b/assets/js/ae1efb81.ef2d54c6.js new file mode 100644 index 0000000..f6a85a4 --- /dev/null +++ b/assets/js/ae1efb81.ef2d54c6.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[309],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>u});var n=a(7294);function i(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){i(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,i=function(e,t){if(null==e)return{};var a,n,i={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(i[a]=e[a]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var p=n.createContext({}),s=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},c=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},d="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=s(a),m=i,u=d["".concat(p,".").concat(m)]||d[m]||k[m]||r;return a?n.createElement(u,o(o({ref:t},c),{},{components:a})):n.createElement(u,o({ref:t},c))}));function u(e,t){var a=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=a.length,o=new Array(r);o[0]=m;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[d]="string"==typeof e?e:i,o[1]=l;for(var s=2;s<r;s++)o[s]=a[s];return n.createElement.apply(null,o)}return n.createElement.apply(null,a)}m.displayName="MDXCreateElement"},2026:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>k,frontMatter:()=>r,metadata:()=>l,toc:()=>s});var n=a(7462),i=(a(7294),a(3905));const r={},o="Deploying FastKafka using Docker",l={unversionedId:"guides/Guide_30_Using_docker_to_deploy_fastkafka",id:"version-0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka",title:"Deploying FastKafka using Docker",description:"Building a Docker Image",source:"@site/versioned_docs/version-0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",sourceDirName:"guides",slug:"/guides/Guide_30_Using_docker_to_deploy_fastkafka",permalink:"/docs/0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Deploy FastKafka docs to GitHub Pages",permalink:"/docs/0.7.1/guides/Guide_04_Github_Actions_Workflow"},next:{title:"Using FastAPI to Run FastKafka Application",permalink:"/docs/0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application"}},p={},s=[{value:"Building a Docker Image",id:"building-a-docker-image",level:2},{value:"Creating FastKafka Code",id:"creating-fastkafka-code",level:3},{value:"Creating requirements.txt file",id:"creating-requirementstxt-file",level:3},{value:"Creating Dockerfile",id:"creating-dockerfile",level:3},{value:"Build the Docker Image",id:"build-the-docker-image",level:3},{value:"Start the Docker Container",id:"start-the-docker-container",level:3},{value:"Additional Security",id:"additional-security",level:2},{value:"Example repo",id:"example-repo",level:2}],c={toc:s},d="wrapper";function k(e){let{components:t,...a}=e;return(0,i.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"deploying-fastkafka-using-docker"},"Deploying FastKafka using Docker"),(0,i.kt)("h2",{id:"building-a-docker-image"},"Building a Docker Image"),(0,i.kt)("p",null,"To build a Docker image for a FastKafka project, we need the following\nitems:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A library that is built using FastKafka."),(0,i.kt)("li",{parentName:"ol"},"A file in which the requirements are specified. This could be a\nrequirements.txt file, a setup.py file, or even a wheel file."),(0,i.kt)("li",{parentName:"ol"},"A Dockerfile to build an image that will include the two files\nmentioned above.")),(0,i.kt)("h3",{id:"creating-fastkafka-code"},"Creating FastKafka Code"),(0,i.kt)("p",null,"Let\u2019s create a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\napplication and write it to the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file based on the\n",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h3",{id:"creating-requirementstxt-file"},"Creating requirements.txt file"),(0,i.kt)("p",null,"The above code only requires ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka"),". So, we will add only\n",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka")," to the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file, but you can add additional\nrequirements to it as well."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"fastkafka>=0.3.0\n")),(0,i.kt)("p",null,"Here we are using ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," to store the project\u2019s\ndependencies. However, other methods like ",(0,i.kt)("inlineCode",{parentName:"p"},"setup.py"),", ",(0,i.kt)("inlineCode",{parentName:"p"},"pipenv"),", and\n",(0,i.kt)("inlineCode",{parentName:"p"},"wheel")," files can also be used. ",(0,i.kt)("inlineCode",{parentName:"p"},"setup.py")," is commonly used for\npackaging and distributing Python modules, while ",(0,i.kt)("inlineCode",{parentName:"p"},"pipenv")," is a tool used\nfor managing virtual environments and package dependencies. ",(0,i.kt)("inlineCode",{parentName:"p"},"wheel"),"\nfiles are built distributions of Python packages that can be installed\nwith pip."),(0,i.kt)("h3",{id:"creating-dockerfile"},"Creating Dockerfile"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-dockerfile"},'# (1)\nFROM python:3.9-slim-bullseye\n# (2)\nWORKDIR /project\n# (3)\nCOPY application.py requirements.txt /project/\n# (4)\nRUN pip install --no-cache-dir --upgrade -r /project/requirements.txt\n# (5)\nCMD ["fastkafka", "run", "--num-workers", "2", "--kafka-broker", "production", "application:kafka_app"]\n')),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Start from the official Python base image.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Set the current working directory to ",(0,i.kt)("inlineCode",{parentName:"p"},"/project"),"."),(0,i.kt)("p",{parentName:"li"},"This is where we\u2019ll put the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file and the\n",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Copy the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file and ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file inside\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"/project")," directory.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Install the package dependencies in the requirements file."),(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"--no-cache-dir")," option tells ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," to not save the downloaded\npackages locally, as that is only if ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," was going to be run again\nto install the same packages, but that\u2019s not the case when working\nwith containers."),(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"--upgrade")," option tells ",(0,i.kt)("inlineCode",{parentName:"p"},"pip")," to upgrade the packages if they\nare already installed.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Set the ",(0,i.kt)("strong",{parentName:"p"},"command")," to run the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command."),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"p"},"CMD")," takes a list of strings, each of these strings is what you\nwould type in the command line separated by spaces."),(0,i.kt)("p",{parentName:"li"},"This command will be run from the ",(0,i.kt)("strong",{parentName:"p"},"current working directory"),", the\nsame ",(0,i.kt)("inlineCode",{parentName:"p"},"/project")," directory you set above with ",(0,i.kt)("inlineCode",{parentName:"p"},"WORKDIR /project"),"."),(0,i.kt)("p",{parentName:"li"},"We supply additional parameters ",(0,i.kt)("inlineCode",{parentName:"p"},"--num-workers")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"--kafka-broker"),"\nfor the run command. Finally, we specify the location of our\n",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka")," application location as a command argument."),(0,i.kt)("p",{parentName:"li"},"To learn more about ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command please check the ",(0,i.kt)("a",{parentName:"p",href:"../../cli/fastkafka/#fastkafka-run"},"CLI\ndocs"),"."))),(0,i.kt)("h3",{id:"build-the-docker-image"},"Build the Docker Image"),(0,i.kt)("p",null,"Now that all the files are in place, let\u2019s build the container image."),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Go to the project directory (where your ",(0,i.kt)("inlineCode",{parentName:"p"},"Dockerfile")," is, containing\nyour ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file).")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Run the following command to build the image:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"docker build -t fastkafka_project_image .\n")),(0,i.kt)("p",{parentName:"li"},"This command will create a docker image with the name\n",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," and the ",(0,i.kt)("inlineCode",{parentName:"p"},"latest")," tag."))),(0,i.kt)("p",null,"That\u2019s it! You have now built a docker image for your FastKafka project."),(0,i.kt)("h3",{id:"start-the-docker-container"},"Start the Docker Container"),(0,i.kt)("p",null,"Run a container based on the built image:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"docker run -d --name fastkafka_project_container fastkafka_project_image\n")),(0,i.kt)("h2",{id:"additional-security"},"Additional Security"),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"Trivy")," is an open-source tool that scans Docker images for\nvulnerabilities. It can be integrated into your CI/CD pipeline to ensure\nthat your images are secure and free from known vulnerabilities. Here\u2019s\nhow you can use ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," to scan your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image"),":"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Install ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," on your local machine by following the instructions\nprovided in the ",(0,i.kt)("a",{parentName:"p",href:"https://aquasecurity.github.io/trivy/latest/getting-started/installation/"},"official ",(0,i.kt)("inlineCode",{parentName:"a"},"trivy"),"\ndocumentation"),".")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Run the following command to scan your fastkafka_project_image:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"trivy image fastkafka_project_image\n")),(0,i.kt)("p",{parentName:"li"},"This command will scan your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," for any\nvulnerabilities and provide you with a report of its findings.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Fix any vulnerabilities identified by ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy"),". You can do this by\nupdating the vulnerable package to a more secure version or by using\na different package altogether.")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Rebuild your ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka_project_image")," and repeat steps 2 and 3\nuntil ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," reports no vulnerabilities."))),(0,i.kt)("p",null,"By using ",(0,i.kt)("inlineCode",{parentName:"p"},"trivy")," to scan your Docker images, you can ensure that your\ncontainers are secure and free from known vulnerabilities."),(0,i.kt)("h2",{id:"example-repo"},"Example repo"),(0,i.kt)("p",null,"A\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nbased library which uses above mentioned Dockerfile to build a docker\nimage can be found\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_project/"},"here")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/b1b6a961.74208c3e.js b/assets/js/b1b6a961.74208c3e.js new file mode 100644 index 0000000..c258496 --- /dev/null +++ b/assets/js/b1b6a961.74208c3e.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4168],{3905:(e,a,n)=>{n.d(a,{Zo:()=>c,kt:()=>f});var t=n(7294);function o(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function i(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function r(e){for(var a=1;a<arguments.length;a++){var n=null!=arguments[a]?arguments[a]:{};a%2?i(Object(n),!0).forEach((function(a){o(e,a,n[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):i(Object(n)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(n,a))}))}return e}function s(e,a){if(null==e)return{};var n,t,o=function(e,a){if(null==e)return{};var n,t,o={},i=Object.keys(e);for(t=0;t<i.length;t++)n=i[t],a.indexOf(n)>=0||(o[n]=e[n]);return o}(e,a);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(t=0;t<i.length;t++)n=i[t],a.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=t.createContext({}),l=function(e){var a=t.useContext(p),n=a;return e&&(n="function"==typeof e?e(a):r(r({},a),e)),n},c=function(e){var a=l(e.components);return t.createElement(p.Provider,{value:a},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},m=t.forwardRef((function(e,a){var n=e.components,o=e.mdxType,i=e.originalType,p=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=l(n),m=o,f=d["".concat(p,".").concat(m)]||d[m]||u[m]||i;return n?t.createElement(f,r(r({ref:a},c),{},{components:n})):t.createElement(f,r({ref:a},c))}));function f(e,a){var n=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var i=n.length,r=new Array(i);r[0]=m;var s={};for(var p in a)hasOwnProperty.call(a,p)&&(s[p]=a[p]);s.originalType=e,s[d]="string"==typeof e?e:o,r[1]=s;for(var l=2;l<i;l++)r[l]=n[l];return t.createElement.apply(null,r)}return t.createElement.apply(null,n)}m.displayName="MDXCreateElement"},7251:(e,a,n)=>{n.r(a),n.d(a,{assets:()=>p,contentTitle:()=>r,default:()=>u,frontMatter:()=>i,metadata:()=>s,toc:()=>l});var t=n(7462),o=(n(7294),n(3905));const i={},r="Lifespan Events",s={unversionedId:"guides/Guide_05_Lifespan_Handler",id:"version-0.5.0/guides/Guide_05_Lifespan_Handler",title:"Lifespan Events",description:"Did you know that you can define some special code that runs before and",source:"@site/versioned_docs/version-0.5.0/guides/Guide_05_Lifespan_Handler.md",sourceDirName:"guides",slug:"/guides/Guide_05_Lifespan_Handler",permalink:"/docs/0.5.0/guides/Guide_05_Lifespan_Handler",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Defining a partition key",permalink:"/docs/0.5.0/guides/Guide_22_Partition_Keys"},next:{title:"Encoding and Decoding Kafka Messages with FastKafka",permalink:"/docs/0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"}},p={},l=[{value:"Lifespan example - Iris prediction model",id:"lifespan-example---iris-prediction-model",level:2},{value:"Lifespan",id:"lifespan",level:3},{value:"Async context manager",id:"async-context-manager",level:3},{value:"App demo",id:"app-demo",level:2},{value:"FastKafka app",id:"fastkafka-app",level:3},{value:"Data modeling",id:"data-modeling",level:3},{value:"Consumers and producers",id:"consumers-and-producers",level:3},{value:"Final app",id:"final-app",level:3},{value:"Running the app",id:"running-the-app",level:3},{value:"Recap",id:"recap",level:2}],c={toc:l},d="wrapper";function u(e){let{components:a,...n}=e;return(0,o.kt)(d,(0,t.Z)({},c,n,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"lifespan-events"},"Lifespan Events"),(0,o.kt)("p",null,"Did you know that you can define some special code that runs before and\nafter your Kafka application? This code will be executed just once, but\nit covers the whole lifespan of your app! \ud83d\ude80"),(0,o.kt)("p",null,"Lets break it down:"),(0,o.kt)("p",null,"You can define logic (code) that should be executed before the\napplication starts up. This is like a warm-up for your app, getting it\nready to consume and produce messages."),(0,o.kt)("p",null,"Similarly, you can define logic (code) that should be executed when the\napplication is shutting down. This is like a cool-down for your app,\nmaking sure everything is properly closed and cleaned up."),(0,o.kt)("p",null,"By executing code before consuming and after producing, you cover the\nentire lifecycle of your application \ud83c\udf89"),(0,o.kt)("p",null,"This is super handy for setting up shared resources that are needed\nacross consumers and producers, like a database connection pool or a\nmachine learning model. And the best part? You can clean up these\nresources when the app is shutting down!"),(0,o.kt)("p",null,"So lets give it a try and see how it can make your Kafka app even more\nawesome! \ud83d\udcaa"),(0,o.kt)("h2",{id:"lifespan-example---iris-prediction-model"},"Lifespan example - Iris prediction model"),(0,o.kt)("p",null,"Let\u2019s dive into an example to see how you can leverage the lifecycle\nhandler to solve a common use case. Imagine that you have some machine\nlearning models that need to consume incoming messages and produce\nresponse/prediction messages. These models are shared among consumers\nand producers, which means you don\u2019t want to load them for every\nmessage."),(0,o.kt)("p",null,"Here\u2019s where the lifecycle handler comes to the rescue! By loading the\nmodel before the messages are consumed and produced, but only right\nbefore the application starts receiving messages, you can ensure that\nthe model is ready to use without compromising the performance of your\ntests. In the upcoming sections, we\u2019ll walk you through how to\ninitialize an Iris species prediction model and use it in your developed\napplication."),(0,o.kt)("h3",{id:"lifespan"},"Lifespan"),(0,o.kt)("p",null,"You can define this startup and shutdown logic using the lifespan\nparameter of the FastKafka app, and an async context manager."),(0,o.kt)("p",null,"Let\u2019s start with an example and then see it in detail."),(0,o.kt)("p",null,"We create an async function lifespan() with yield like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n print("Loading the model!")\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n yield\n # Clean up the ML models and release the resources\n \n print("Exiting, clearing model dict!")\n ml_models.clear()\n \n')),(0,o.kt)("p",null,"The first thing to notice, is that we are defining an async function\nwith ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),". This is very similar to Dependencies with ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),"."),(0,o.kt)("p",null,"The first part of the function, before the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),", will be executed\n",(0,o.kt)("strong",{parentName:"p"},"before")," the application starts. And the part after the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield")," will\nbe executed ",(0,o.kt)("strong",{parentName:"p"},"after")," the application has finished."),(0,o.kt)("p",null,"This lifespan will create an iris_prediction model on application\nstartup and cleanup the references after the app is shutdown."),(0,o.kt)("p",null,"The lifespan will be passed an KafkaApp reference on startup of your\napplication, which you can use to reference your application on startup."),(0,o.kt)("p",null,"For demonstration sake, we also added prints so that when running the\napp we can see that our lifespan was called."),(0,o.kt)("h3",{id:"async-context-manager"},"Async context manager"),(0,o.kt)("p",null,"Context managers can be used in ",(0,o.kt)("inlineCode",{parentName:"p"},"with")," blocks, our lifespan, for example\ncould be used like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"ml_models = {}\nasync with lifespan(None):\n print(ml_models)\n")),(0,o.kt)("p",null,"When you create a context manager or an async context manager, what it\ndoes is that, before entering the ",(0,o.kt)("inlineCode",{parentName:"p"},"with")," block, it will execute the code\nbefore the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),", and after exiting the ",(0,o.kt)("inlineCode",{parentName:"p"},"with")," block, it will execute\nthe code after the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),"."),(0,o.kt)("p",null,"If you want to learn more about context managers and contextlib\ndecorators, please visit ",(0,o.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/contextlib.html"},"Python official\ndocs")),(0,o.kt)("h2",{id:"app-demo"},"App demo"),(0,o.kt)("h3",{id:"fastkafka-app"},"FastKafka app"),(0,o.kt)("p",null,"Lets now create our application using the created lifespan handler."),(0,o.kt)("p",null,"Notice how we passed our lifespan handler to the app when constructing\nit trough the ",(0,o.kt)("inlineCode",{parentName:"p"},"lifespan")," argument."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local development kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n')),(0,o.kt)("h3",{id:"data-modeling"},"Data modeling"),(0,o.kt)("p",null,"Lets model the Iris data for our app:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,o.kt)("h3",{id:"consumers-and-producers"},"Consumers and producers"),(0,o.kt)("p",null,"Lets create a consumer and producer for our app that will generate\npredictions from input iris data."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h3",{id:"final-app"},"Final app"),(0,o.kt)("p",null,"The final app looks like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n print("Loading the model!")\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n yield\n # Clean up the ML models and release the resources\n \n print("Exiting, clearing model dict!")\n ml_models.clear()\n \nkafka_brokers = {\n "localhost": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local development kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h3",{id:"running-the-app"},"Running the app"),(0,o.kt)("p",null,"Now we can run the app with your custom lifespan handler. Copy the code\nabove in lifespan_example.py and run it by running"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app\n")),(0,o.kt)("p",null,"When you run the app, you should see a simmilar output to the one below:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[262292]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[262292]: Loading the model!\n[262292]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Entering...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting send_stream\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Finished.\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[262292]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[262292]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[262292]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[262292]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 262292...\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Entering...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Exiting send_stream\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Finished.\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Stoping producer...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Finished\n[262292]: Exiting, clearing model dict!\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 262292 terminated.\n")),(0,o.kt)("h2",{id:"recap"},"Recap"),(0,o.kt)("p",null,"In this guide we have defined a lifespan handler and passed to our\nFastKafka app."),(0,o.kt)("p",null,"Some important points are:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Lifespan handler is implemented as\n",(0,o.kt)("a",{parentName:"li",href:"https://docs.python.org/3/library/contextlib.html#contextlib.asynccontextmanager"},"AsyncContextManager")),(0,o.kt)("li",{parentName:"ol"},"Code ",(0,o.kt)("strong",{parentName:"li"},"before")," yield in lifespan will be executed ",(0,o.kt)("strong",{parentName:"li"},"before"),"\napplication ",(0,o.kt)("strong",{parentName:"li"},"startup")),(0,o.kt)("li",{parentName:"ol"},"Code ",(0,o.kt)("strong",{parentName:"li"},"after")," yield in lifespan will be executed ",(0,o.kt)("strong",{parentName:"li"},"after"),"\napplication ",(0,o.kt)("strong",{parentName:"li"},"shutdown")),(0,o.kt)("li",{parentName:"ol"},"You can pass your lifespan handler to FastKafka app on\ninitialisation by passing a ",(0,o.kt)("inlineCode",{parentName:"li"},"lifespan")," argument")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/b24805c2.5ca38736.js b/assets/js/b24805c2.5ca38736.js new file mode 100644 index 0000000..4a953c1 --- /dev/null +++ b/assets/js/b24805c2.5ca38736.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1616],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>c});var i=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function n(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,i)}return a}function s(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?n(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):n(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,i,r=function(e,t){if(null==e)return{};var a,i,r={},n=Object.keys(e);for(i=0;i<n.length;i++)a=n[i],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);for(i=0;i<n.length;i++)a=n[i],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=i.createContext({}),k=function(e){var t=i.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},u=function(e){var t=k(e.components);return i.createElement(p.Provider,{value:t},e.children)},o="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},h=i.forwardRef((function(e,t){var a=e.components,r=e.mdxType,n=e.originalType,p=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),o=k(a),h=r,c=o["".concat(p,".").concat(h)]||o[h]||m[h]||n;return a?i.createElement(c,s(s({ref:t},u),{},{components:a})):i.createElement(c,s({ref:t},u))}));function c(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var n=a.length,s=new Array(n);s[0]=h;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[o]="string"==typeof e?e:r,s[1]=l;for(var k=2;k<n;k++)s[k]=a[k];return i.createElement.apply(null,s)}return i.createElement.apply(null,a)}h.displayName="MDXCreateElement"},5504:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>s,default:()=>m,frontMatter:()=>n,metadata:()=>l,toc:()=>k});var i=a(7462),r=(a(7294),a(3905));const n={},s="Release notes",l={unversionedId:"CHANGELOG",id:"version-0.5.0/CHANGELOG",title:"Release notes",description:"0.5.0",source:"@site/versioned_docs/version-0.5.0/CHANGELOG.md",sourceDirName:".",slug:"/CHANGELOG",permalink:"/docs/0.5.0/CHANGELOG",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"run_fastkafka_server_process",permalink:"/docs/0.5.0/cli/run_fastkafka_server_process"}},p={},k=[{value:"0.5.0",id:"050",level:2},{value:"New Features",id:"new-features",level:3},{value:"Bugs Squashed",id:"bugs-squashed",level:3},{value:"0.4.0",id:"040",level:2},{value:"New Features",id:"new-features-1",level:3},{value:"0.3.1",id:"031",level:2},{value:"0.3.0",id:"030",level:2},{value:"New Features",id:"new-features-2",level:3},{value:"Bugs Squashed",id:"bugs-squashed-1",level:3},{value:"0.2.3",id:"023",level:2},{value:"0.2.2",id:"022",level:2},{value:"New Features",id:"new-features-3",level:3},{value:"Bugs Squashed",id:"bugs-squashed-2",level:3},{value:"0.2.0",id:"020",level:2},{value:"New Features",id:"new-features-4",level:3},{value:"Bugs Squashed",id:"bugs-squashed-3",level:3},{value:"0.1.3",id:"013",level:2},{value:"0.1.2",id:"012",level:2},{value:"New Features",id:"new-features-5",level:3},{value:"Bugs Squashed",id:"bugs-squashed-4",level:3},{value:"0.1.1",id:"011",level:2},{value:"Bugs Squashed",id:"bugs-squashed-5",level:3},{value:"0.1.0",id:"010",level:2}],u={toc:k},o="wrapper";function m(e){let{components:t,...a}=e;return(0,r.kt)(o,(0,i.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"release-notes"},"Release notes"),(0,r.kt)("h2",{id:"050"},"0.5.0"),(0,r.kt)("h3",{id:"new-features"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Significant speedup of Kafka producer (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/236"},"#236"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Added support for AVRO encoding/decoding (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/231"},"#231"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("h3",{id:"bugs-squashed"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fixed sidebar to include guides in docusaurus documentation (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/238"},"#238"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fixed link to symbols in docusaurus docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/227"},"#227"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Removed bootstrap servers from constructor (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/220"},"#220"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl")))),(0,r.kt)("h2",{id:"040"},"0.4.0"),(0,r.kt)("h3",{id:"new-features-1"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Integrate fastkafka chat (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/208"},"#208"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add benchmarking (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/206"},"#206"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Enable fast testing without running kafka locally (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/198"},"#198"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Generate docs using Docusaurus (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/194"},"#194"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add test cases for LocalRedpandaBroker (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/189"},"#189"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Reimplement patch and delegates from fastcore (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/188"},"#188"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Rename existing functions into start and stop and add lifespan handler (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/117"},"#117"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios"},"https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios"))))),(0,r.kt)("h2",{id:"031"},"0.3.1"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"README.md file updated")),(0,r.kt)("h2",{id:"030"},"0.3.0"),(0,r.kt)("h3",{id:"new-features-2"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Guide for fastkafka produces using partition key (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/172"},"#172"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Closes #161"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add support for Redpanda for testing and deployment (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/181"},"#181"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Remove bootstrap_servers from ",(0,r.kt)("strong",{parentName:"p"},"init")," and use the name of broker as an option when running/testing (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/134"},"#134"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add a GH action file to check for broken links in the docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/163"},"#163"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Optimize requirements for testing and docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/151"},"#151"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Break requirements into base and optional for testing and dev (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/124"},"#124"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Minimize base requirements needed just for running the service."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add link to example git repo into guide for building docs using actions (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/81"},"#81"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add logging for run_in_background (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/46"},"#46"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Implement partition Key mechanism for producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/16"},"#16"),")"))),(0,r.kt)("h3",{id:"bugs-squashed-1"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Implement checks for npm installation and version (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/176"},"#176"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Closes #158 by checking if the npx is installed and more verbose error handling"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix the helper.py link in CHANGELOG.md (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/165"},"#165"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"fastkafka docs install_deps fails (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/157"},"#157"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Unexpected internal error: ","[Errno 2]"," No such file or directory: 'npx'"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Broken links in docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/141"},"#141"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"fastkafka run is not showing up in CLI docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/132"},"#132"),")"))),(0,r.kt)("h2",{id:"023"},"0.2.3"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Fixed broken links on PyPi index page")),(0,r.kt)("h2",{id:"022"},"0.2.2"),(0,r.kt)("h3",{id:"new-features-3"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Extract JDK and Kafka installation out of LocalKafkaBroker (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/131"},"#131"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"PyYAML version relaxed (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/119"},"#119"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/davorrunje"},"@davorrunje"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Replace docker based kafka with local (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/68"},"#68"),")"),(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","replace docker compose with a simple docker run (standard run_jupyter.sh should do)"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","replace all tests to use LocalKafkaBroker"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","update documentation")))),(0,r.kt)("h3",{id:"bugs-squashed-2"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix broken link for FastKafka docs in index notebook (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/145"},"#145"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix encoding issues when loading setup.py on windows OS (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/135"},"#135"),")"))),(0,r.kt)("h2",{id:"020"},"0.2.0"),(0,r.kt)("h3",{id:"new-features-4"},"New Features"),(0,r.kt)("ul",{className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul"},"Replace kafka container with LocalKafkaBroker (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/112"},"#112"),")",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Replace kafka container with LocalKafkaBroker in tests"))))),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Remove kafka container from tests environment"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Fix failing tests")),(0,r.kt)("h3",{id:"bugs-squashed-3"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Fix random failing in CI (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/109"},"#109"),")")),(0,r.kt)("h2",{id:"013"},"0.1.3"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"version update in ",(0,r.kt)("strong",{parentName:"li"},"init"),".py")),(0,r.kt)("h2",{id:"012"},"0.1.2"),(0,r.kt)("h3",{id:"new-features-5"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Git workflow action for publishing Kafka docs (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/78"},"#78"),")")),(0,r.kt)("h3",{id:"bugs-squashed-4"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Include missing requirement (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/110"},"#110"),")",(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Typer is imported in this ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/helpers.py"},"file")," but it is not included in ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/settings.ini"},"settings.ini")),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add aiohttp which is imported in this ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_helpers.py"},"file")),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add nbformat which is imported in _components/helpers.py"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add nbconvert which is imported in _components/helpers.py")))),(0,r.kt)("h2",{id:"011"},"0.1.1"),(0,r.kt)("h3",{id:"bugs-squashed-5"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"JDK install fails on Python 3.8 (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/106"},"#106"),")")),(0,r.kt)("h2",{id:"010"},"0.1.0"),(0,r.kt)("p",null,"Initial release"))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/b638c32b.ab27dab2.js b/assets/js/b638c32b.ab27dab2.js new file mode 100644 index 0000000..dac59d0 --- /dev/null +++ b/assets/js/b638c32b.ab27dab2.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4497],{3905:(e,t,n)=>{n.d(t,{Zo:()=>l,kt:()=>m});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?a(Object(n),!0).forEach((function(t){o(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):a(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function u(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=r.createContext({}),s=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},l=function(e){var t=s(e.components);return r.createElement(c.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,l=u(e,["components","mdxType","originalType","parentName"]),p=s(n),d=o,m=p["".concat(c,".").concat(d)]||p[d]||f[d]||a;return n?r.createElement(m,i(i({ref:t},l),{},{components:n})):r.createElement(m,i({ref:t},l))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=d;var u={};for(var c in t)hasOwnProperty.call(t,c)&&(u[c]=t[c]);u.originalType=e,u[p]="string"==typeof e?e:o,i[1]=u;for(var s=2;s<a;s++)i[s]=n[s];return r.createElement.apply(null,i)}return r.createElement.apply(null,n)}d.displayName="MDXCreateElement"},4962:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>f,frontMatter:()=>a,metadata:()=>u,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={},i="Authentication",u={unversionedId:"guides/Guide_03_Authentication",id:"version-0.5.0/guides/Guide_03_Authentication",title:"Authentication",description:"TLS Authentication",source:"@site/versioned_docs/version-0.5.0/guides/Guide_03_Authentication.md",sourceDirName:"guides",slug:"/guides/Guide_03_Authentication",permalink:"/docs/0.5.0/guides/Guide_03_Authentication",draft:!1,tags:[],version:"0.5.0",frontMatter:{}},c={},s=[{value:"TLS Authentication",id:"tls-authentication",level:2}],l={toc:s},p="wrapper";function f(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},l,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"authentication"},"Authentication"),(0,o.kt)("h2",{id:"tls-authentication"},"TLS Authentication"),(0,o.kt)("p",null,"sasl_mechanism (str) \u2013 Authentication mechanism when security_protocol\nis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN,\nGSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN"),(0,o.kt)("p",null,"sasl_plain_username (str) \u2013 username for SASL PLAIN authentication.\nDefault: None"),(0,o.kt)("p",null,"sasl_plain_password (str) \u2013 password for SASL PLAIN authentication.\nDefault: None"),(0,o.kt)("p",null,"sasl_oauth_token_provider (AbstractTokenProvider) \u2013 OAuthBearer token\nprovider instance. (See kafka.oauth.abstract). Default: None"))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/b70bee8d.45dd4d58.js b/assets/js/b70bee8d.45dd4d58.js new file mode 100644 index 0000000..5dd4bcf --- /dev/null +++ b/assets/js/b70bee8d.45dd4d58.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[608],{3905:(t,e,a)=>{a.d(e,{Zo:()=>k,kt:()=>c});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function i(t){for(var e=1;e<arguments.length;e++){var a=null!=arguments[e]?arguments[e]:{};e%2?l(Object(a),!0).forEach((function(e){r(t,e,a[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(a)):l(Object(a)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(a,e))}))}return t}function p(t,e){if(null==t)return{};var a,n,r=function(t,e){if(null==t)return{};var a,n,r={},l=Object.keys(t);for(n=0;n<l.length;n++)a=l[n],e.indexOf(a)>=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n<l.length;n++)a=l[n],e.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var d=n.createContext({}),o=function(t){var e=n.useContext(d),a=e;return t&&(a="function"==typeof t?t(e):i(i({},e),t)),a},k=function(t){var e=o(t.components);return n.createElement(d.Provider,{value:e},t.children)},m="mdxType",s={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},u=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,d=t.parentName,k=p(t,["components","mdxType","originalType","parentName"]),m=o(a),u=r,c=m["".concat(d,".").concat(u)]||m[u]||s[u]||l;return a?n.createElement(c,i(i({ref:e},k),{},{components:a})):n.createElement(c,i({ref:e},k))}));function c(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,i=new Array(l);i[0]=u;var p={};for(var d in e)hasOwnProperty.call(e,d)&&(p[d]=e[d]);p.originalType=t,p[m]="string"==typeof t?t:r,i[1]=p;for(var o=2;o<l;o++)i[o]=a[o];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},3428:(t,e,a)=>{a.r(e),a.d(e,{assets:()=>d,contentTitle:()=>i,default:()=>s,frontMatter:()=>l,metadata:()=>p,toc:()=>o});var n=a(7462),r=(a(7294),a(3905));const l={},i=void 0,p={unversionedId:"api/fastkafka/EventMetadata",id:"api/fastkafka/EventMetadata",title:"EventMetadata",description:"fastkafka.EventMetadata {fastkafka.EventMetadata}",source:"@site/docs/api/fastkafka/EventMetadata.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/EventMetadata",permalink:"/docs/next/api/fastkafka/EventMetadata",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Benchmarking FastKafka app",permalink:"/docs/next/guides/Guide_06_Benchmarking_FastKafka"},next:{title:"FastKafka",permalink:"/docs/next/api/fastkafka/"}},d={},o=[{value:"fastkafka.EventMetadata",id:"fastkafka.EventMetadata",level:2},{value:"<strong>init</strong>",id:"fastkafka.EventMetadata.init",level:3},{value:"create_event_metadata",id:"fastkafka._components.aiokafka_consumer_loop.EventMetadata.create_event_metadata",level:3}],k={toc:o},m="wrapper";function s(t){let{components:e,...a}=t;return(0,r.kt)(m,(0,n.Z)({},k,a,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.EventMetadata"},"fastkafka.EventMetadata"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/aiokafka_consumer_loop.py#L27-L77",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("p",null,"A class for encapsulating Kafka record metadata."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"topic")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"The topic this record is received from"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"partition")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"The partition from which this record is received"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"offset")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"The position of this record in the corresponding Kafka partition"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"timestamp")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"The timestamp of this record"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"timestamp_type")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"The timestamp type of this record"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[bytes]")),(0,r.kt)("td",{parentName:"tr",align:null},"The key (or ",(0,r.kt)("inlineCode",{parentName:"td"},"None")," if no key is specified)"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"value")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[bytes]")),(0,r.kt)("td",{parentName:"tr",align:null},"The value"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"serialized_key_size")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"The size of the serialized, uncompressed key in bytes"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"serialized_value_size")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"The size of the serialized, uncompressed value in bytes"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"headers")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Sequence[Tuple[str, bytes]]")),(0,r.kt)("td",{parentName:"tr",align:null},"The headers"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("h3",{id:"fastkafka.EventMetadata.init"},(0,r.kt)("strong",{parentName:"h3"},"init")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n self,\n topic,\n partition,\n offset,\n timestamp,\n timestamp_type,\n key,\n value,\n checksum,\n serialized_key_size,\n serialized_value_size,\n headers,\n)\n")),(0,r.kt)("h3",{id:"fastkafka._components.aiokafka_consumer_loop.EventMetadata.create_event_metadata"},"create_event_metadata"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/aiokafka_consumer_loop.py#L56-L77",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"@staticmethod\ncreate_event_metadata(\n record\n)\n")),(0,r.kt)("p",null,"Creates an instance of EventMetadata from a ConsumerRecord."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"record")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ConsumerRecord")),(0,r.kt)("td",{parentName:"tr",align:null},"The Kafka ConsumerRecord."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"EventMetadata")),(0,r.kt)("td",{parentName:"tr",align:null},"The created EventMetadata instance.")))))}s.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/b7f60777.7085564d.js b/assets/js/b7f60777.7085564d.js new file mode 100644 index 0000000..5e9a068 --- /dev/null +++ b/assets/js/b7f60777.7085564d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3679],{3905:(e,t,a)=>{a.d(t,{Zo:()=>d,kt:()=>m});var n=a(7294);function i(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function r(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){i(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function s(e,t){if(null==e)return{};var a,n,i=function(e,t){if(null==e)return{};var a,n,i={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(i[a]=e[a]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var l=n.createContext({}),p=function(e){var t=n.useContext(l),a=t;return e&&(a="function"==typeof e?e(t):r(r({},t),e)),a},d=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,i=e.mdxType,o=e.originalType,l=e.parentName,d=s(e,["components","mdxType","originalType","parentName"]),c=p(a),k=i,m=c["".concat(l,".").concat(k)]||c[k]||u[k]||o;return a?n.createElement(m,r(r({ref:t},d),{},{components:a})):n.createElement(m,r({ref:t},d))}));function m(e,t){var a=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=a.length,r=new Array(o);r[0]=k;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:i,r[1]=s;for(var p=2;p<o;p++)r[p]=a[p];return n.createElement.apply(null,r)}return n.createElement.apply(null,a)}k.displayName="MDXCreateElement"},6066:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>l,contentTitle:()=>r,default:()=>u,frontMatter:()=>o,metadata:()=>s,toc:()=>p});var n=a(7462),i=(a(7294),a(3905));const o={},r="Using Redpanda to test FastKafka",s={unversionedId:"guides/Guide_31_Using_redpanda_to_test_fastkafka",id:"version-0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka",title:"Using Redpanda to test FastKafka",description:"What is FastKafka?",source:"@site/versioned_docs/version-0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",sourceDirName:"guides",slug:"/guides/Guide_31_Using_redpanda_to_test_fastkafka",permalink:"/docs/0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using multiple Kafka clusters",permalink:"/docs/0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters"},next:{title:"Deploy FastKafka docs to GitHub Pages",permalink:"/docs/0.7.0/guides/Guide_04_Github_Actions_Workflow"}},l={},p=[{value:"What is FastKafka?",id:"what-is-fastkafka",level:2},{value:"What is Redpanda?",id:"what-is-redpanda",level:2},{value:"Example repo",id:"example-repo",level:2},{value:"The process",id:"the-process",level:2},{value:"1. Prerequisites",id:"1-prerequisites",level:2},{value:"2. Cloning and setting up the example repo",id:"2-cloning-and-setting-up-the-example-repo",level:2},{value:"Create a virtual environment",id:"create-a-virtual-environment",level:3},{value:"Install Python dependencies",id:"install-python-dependencies",level:3},{value:"3. Writing server code",id:"3-writing-server-code",level:2},{value:"Preparing the demo model",id:"preparing-the-demo-model",level:3},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"4. Writing the test code",id:"4-writing-the-test-code",level:2},{value:"5. Running the tests",id:"5-running-the-tests",level:2},{value:"Recap",id:"recap",level:3}],d={toc:p},c="wrapper";function u(e){let{components:t,...a}=e;return(0,i.kt)(c,(0,n.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"using-redpanda-to-test-fastkafka"},"Using Redpanda to test FastKafka"),(0,i.kt)("h2",{id:"what-is-fastkafka"},"What is FastKafka?"),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,i.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,i.kt)("h2",{id:"what-is-redpanda"},"What is Redpanda?"),(0,i.kt)("p",null,"Redpanda is a drop-in replacement for Kafka. Most of the Kafka tools\nwork out of the box with Redpanda."),(0,i.kt)("p",null,"From ",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/"},"redpanda.com"),":"),(0,i.kt)("blockquote",null,(0,i.kt)("p",{parentName:"blockquote"},"Redpanda is a Kafka\xae-compatible streaming data platform that is proven\nto be 10x faster and 6x lower in total costs. It is also JVM-free,\nZooKeeper\xae-free, Jepsen-tested and source available.")),(0,i.kt)("p",null,"Some of the advantages of Redpanda over Kafka are"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A single binary with built-in everything, no ZooKeeper\xae or JVM\nneeded."),(0,i.kt)("li",{parentName:"ol"},"Costs upto 6X less than Kafka."),(0,i.kt)("li",{parentName:"ol"},"Up to 10x lower average latencies and up to 6x faster Kafka\ntransactions without compromising correctness.")),(0,i.kt)("p",null,"To learn more about Redpanda, please visit their\n",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/"},"website")," or checkout this ",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark"},"blog\npost"),"\ncomparing Redpanda and Kafka\u2019s performance benchmarks."),(0,i.kt)("h2",{id:"example-repo"},"Example repo"),(0,i.kt)("p",null,"A sample fastkafka-based library that uses Redpanda for testing, based\non this guide, can be found\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_with_redpanda"},"here"),"."),(0,i.kt)("h2",{id:"the-process"},"The process"),(0,i.kt)("p",null,"Here are the steps we\u2019ll be walking through to build our example:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Set up the prerequisites."),(0,i.kt)("li",{parentName:"ol"},"Clone the example repo."),(0,i.kt)("li",{parentName:"ol"},"Explain how to write an application using FastKafka."),(0,i.kt)("li",{parentName:"ol"},"Explain how to write a test case to test FastKafka with Redpanda."),(0,i.kt)("li",{parentName:"ol"},"Run the test case and produce/consume messages.")),(0,i.kt)("h2",{id:"1-prerequisites"},"1. Prerequisites"),(0,i.kt)("p",null,"Before starting, make sure you have the following prerequisites set up:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Python 3.x"),": A Python 3.x installation is required to run\nFastKafka. You can download the latest version of Python from the\n",(0,i.kt)("a",{parentName:"li",href:"https://www.python.org/downloads/"},"official website"),". You\u2019ll also\nneed to have pip installed and updated, which is Python\u2019s package\ninstaller."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Docker Desktop"),": Docker is used to run Redpanda, which is\nrequired for testing FastKafka. You can download and install Docker\nDesktop from the ",(0,i.kt)("a",{parentName:"li",href:"https://www.docker.com/products/docker-desktop/"},"official\nwebsite"),"."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Git"),": You\u2019ll need to have Git installed to clone the example\nrepo. You can download Git from the ",(0,i.kt)("a",{parentName:"li",href:"https://git-scm.com/downloads"},"official\nwebsite"),".")),(0,i.kt)("h2",{id:"2-cloning-and-setting-up-the-example-repo"},"2. Cloning and setting up the example repo"),(0,i.kt)("p",null,"To get started with the example code, clone the ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_with_redpanda"},"GitHub\nrepository")," by\nrunning the following command in your terminal:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"git clone https://github.com/airtai/sample_fastkafka_with_redpanda.git\ncd sample_fastkafka_with_redpanda\n")),(0,i.kt)("p",null,"This will create a new directory called sample_fastkafka_with_redpanda\nand download all the necessary files."),(0,i.kt)("h3",{id:"create-a-virtual-environment"},"Create a virtual environment"),(0,i.kt)("p",null,"Before writing any code, let\u2019s ",(0,i.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/venv.html#module-venv"},"create a new virtual\nenvironment"),"\nfor our project."),(0,i.kt)("p",null,"A virtual environment is an isolated environment for a Python project,\nwhich allows you to manage project-specific dependencies and avoid\nconflicts between different projects."),(0,i.kt)("p",null,"To create a new virtual environment, run the following commands in your\nterminal:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"python3 -m venv venv\n")),(0,i.kt)("p",null,"This will create a new directory called ",(0,i.kt)("inlineCode",{parentName:"p"},"venv")," in your project\ndirectory, which will contain the virtual environment."),(0,i.kt)("p",null,"To activate the virtual environment, run the following command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"source venv/bin/activate\n")),(0,i.kt)("p",null,"This will change your shell\u2019s prompt to indicate that you are now\nworking inside the virtual environment."),(0,i.kt)("p",null,"Finally, run the following command to upgrade ",(0,i.kt)("inlineCode",{parentName:"p"},"pip"),", the Python package\ninstaller:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pip install --upgrade pip\n")),(0,i.kt)("h3",{id:"install-python-dependencies"},"Install Python dependencies"),(0,i.kt)("p",null,"Next, let\u2019s install the required Python dependencies. In this guide,\nwe\u2019ll be using ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," to write our application code and ",(0,i.kt)("inlineCode",{parentName:"p"},"pytest"),"\nand ",(0,i.kt)("inlineCode",{parentName:"p"},"pytest-asyncio")," to test it."),(0,i.kt)("p",null,"You can install the dependencies from the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file\nprovided in the cloned repository by running:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pip install -r requirements.txt\n")),(0,i.kt)("p",null,"This will install all the required packages and their dependencies."),(0,i.kt)("h2",{id:"3-writing-server-code"},"3. Writing server code"),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file in the cloned repository demonstrates how to\nuse FastKafka to consume messages from a Kafka topic, make predictions\nusing a predictive model, and publish the predictions to another Kafka\ntopic. Here is an explanation of the code:"),(0,i.kt)("h3",{id:"preparing-the-demo-model"},"Preparing the demo model"),(0,i.kt)("p",null,"First we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model."),(0,i.kt)("p",null,"We will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n')),(0,i.kt)("h3",{id:"messages"},"Messages"),(0,i.kt)("p",null,"FastKafka uses ",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,i.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,i.kt)("p",null,"This example defines two message classes for use in a FastKafka\napplication:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," class is used to represent input data for a\npredictive model. It has four fields of type\n",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/latest/api/types/#pydantic.types.NonNegativeFloat"},(0,i.kt)("inlineCode",{parentName:"a"},"NonNegativeFloat")),",\nwhich is a subclass of float that only allows non-negative floating\npoint values.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," class is used to represent the output of the\npredictive model. It has a single field ",(0,i.kt)("inlineCode",{parentName:"p"},"species")," of type string\nrepresenting the predicted species."))),(0,i.kt)("p",null,"These message classes will be used to parse and validate incoming data\nin Kafka consumers and producers."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("h3",{id:"application"},"Application"),(0,i.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,i.kt)("p",null,"It starts by defining a dictionary called ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,i.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,i.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used both\nto generate documentation and to later run the server against one of the\ngiven kafka broker."),(0,i.kt)("p",null,"Next, an instance of the ",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," class is initialized with the\nminimum required arguments:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generating documentation")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n')),(0,i.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,i.kt)("p",null,"FastKafka provides convenient function decorators ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,i.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,i.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,i.kt)("p",null,"This following example shows how to use the ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message class. Specifying the\ntype of the single argument is instructing the Pydantic to use\n",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData.parse_raw()")," on the consumed message before passing it\nto the user defined function ",(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,i.kt)("inlineCode",{parentName:"p"},"to_predictions"),' function,\nwhich specifies that this function should produce a message to the\n\u201cpredictions" Kafka topic whenever it is called. The ',(0,i.kt)("inlineCode",{parentName:"p"},"to_predictions"),"\nfunction takes a single integer argument ",(0,i.kt)("inlineCode",{parentName:"p"},"species_class")," representing\none of three possible strign values predicted by the mdoel. It creates\na new ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," message using this value and then returns it.\nThe framework will call the ",(0,i.kt)("inlineCode",{parentName:"p"},'IrisPrediction.json().encode("utf-8")'),"\nfunction on the returned value and produce it to the specified topic."))),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h2",{id:"4-writing-the-test-code"},"4. Writing the test code"),(0,i.kt)("p",null,"The service can be tested using the ",(0,i.kt)("inlineCode",{parentName:"p"},"Tester")," instance which can be\nconfigured to start a ",(0,i.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/LocalRedpandaBroker/"},"Redpanda\nbroker")," for testing\npurposes. The ",(0,i.kt)("inlineCode",{parentName:"p"},"test.py")," file in the cloned repository contains the\nfollowing code for testing."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'import pytest\nfrom application import IrisInputData, IrisPrediction, kafka_app\n\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n sepal_length=0.1,\n sepal_width=0.2,\n petal_length=0.3,\n petal_width=0.4,\n)\n\n\n@pytest.mark.asyncio\nasync def test():\n # Start Tester app and create local Redpanda broker for testing\n async with Tester(kafka_app).using_local_redpanda(\n tag="v23.1.2", listener_port=9092\n ) as tester:\n # Send IrisInputData message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with IrisPrediction in predictions topic\n await tester.awaited_mocks.on_predictions.assert_awaited_with(\n IrisPrediction(species="setosa"), timeout=2\n )\n')),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"Tester")," module utilizes uses ",(0,i.kt)("inlineCode",{parentName:"p"},"LocalRedpandaBroker")," to start and\nstop a Redpanda broker for testing purposes using Docker"),(0,i.kt)("h2",{id:"5-running-the-tests"},"5. Running the tests"),(0,i.kt)("p",null,"We can run the tests which is in ",(0,i.kt)("inlineCode",{parentName:"p"},"test.py")," file by executing the\nfollowing command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pytest test.py\n")),(0,i.kt)("p",null,"This will start a Redpanda broker using Docker and executes tests. The\noutput of the command is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest\n============================== test session starts ===============================\nplatform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0\nrootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py\nplugins: asyncio-0.21.0, anyio-3.6.2\nasyncio: mode=strict\ncollected 1 item \n\ntest.py . [100%]\n\n=============================== 1 passed in 7.28s ================================\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$\n")),(0,i.kt)("p",null,"Running the tests with the Redpanda broker ensures that your code is\nworking correctly with a real Kafka-like message broker, making your\ntests more reliable."),(0,i.kt)("h3",{id:"recap"},"Recap"),(0,i.kt)("p",null,"We have created an Iris classification model and encapulated it into our\n",(0,i.kt)("inlineCode",{parentName:"p"},"FastKafka")," application. The app will consume the ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," from\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic and produce the predictions to ",(0,i.kt)("inlineCode",{parentName:"p"},"predictions"),"\ntopic."),(0,i.kt)("p",null,"To test the app we have:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Created the app")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Started our ",(0,i.kt)("inlineCode",{parentName:"p"},"Tester")," class with ",(0,i.kt)("inlineCode",{parentName:"p"},"Redpanda")," broker which mirrors the\ndeveloped app topics for testing purposes")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Sent ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message to ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Asserted and checked that the developed iris classification service\nhas reacted to ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message"))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/b91921d6.198b04b6.js b/assets/js/b91921d6.198b04b6.js new file mode 100644 index 0000000..5b8eb55 --- /dev/null +++ b/assets/js/b91921d6.198b04b6.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8270],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function f(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),l=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},c=function(e){var t=l(e.components);return n.createElement(s.Provider,{value:t},e.children)},p="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,c=f(e,["components","mdxType","originalType","parentName"]),p=l(a),u=r,d=p["".concat(s,".").concat(u)]||p[u]||k[u]||o;return a?n.createElement(d,i(i({ref:t},c),{},{components:a})):n.createElement(d,i({ref:t},c))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=u;var f={};for(var s in t)hasOwnProperty.call(t,s)&&(f[s]=t[s]);f.originalType=e,f[p]="string"==typeof e?e:r,i[1]=f;for(var l=2;l<o;l++)i[l]=a[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},3960:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>k,frontMatter:()=>o,metadata:()=>f,toc:()=>l});var n=a(7462),r=(a(7294),a(3905));const o={},i=void 0,f={unversionedId:"api/fastkafka/KafkaEvent",id:"version-0.7.0/api/fastkafka/KafkaEvent",title:"KafkaEvent",description:"fastkafka.KafkaEvent {fastkafka.KafkaEvent}",source:"@site/versioned_docs/version-0.7.0/api/fastkafka/KafkaEvent.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/KafkaEvent",permalink:"/docs/0.7.0/api/fastkafka/KafkaEvent",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"FastKafka",permalink:"/docs/0.7.0/api/fastkafka/"},next:{title:"AvroBase",permalink:"/docs/0.7.0/api/fastkafka/encoder/AvroBase"}},s={},l=[{value:"<code>fastkafka.KafkaEvent</code>",id:"fastkafka.KafkaEvent",level:2}],c={toc:l},p="wrapper";function k(e){let{components:t,...a}=e;return(0,r.kt)(p,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.KafkaEvent")),(0,r.kt)("p",null,"A generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"message"),": The message contained in the Kafka event, can be of type pydantic.BaseModel."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"key"),": The optional key used to identify the Kafka event.")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/b9d0db8e.9fbaa8a9.js b/assets/js/b9d0db8e.9fbaa8a9.js new file mode 100644 index 0000000..96a3b38 --- /dev/null +++ b/assets/js/b9d0db8e.9fbaa8a9.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9170],{3905:(e,t,r)=>{r.d(t,{Zo:()=>d,kt:()=>k});var a=r(7294);function n(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,a)}return r}function l(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?o(Object(r),!0).forEach((function(t){n(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):o(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function c(e,t){if(null==e)return{};var r,a,n=function(e,t){if(null==e)return{};var r,a,n={},o=Object.keys(e);for(a=0;a<o.length;a++)r=o[a],t.indexOf(r)>=0||(n[r]=e[r]);return n}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)r=o[a],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(n[r]=e[r])}return n}var i=a.createContext({}),p=function(e){var t=a.useContext(i),r=t;return e&&(r="function"==typeof e?e(t):l(l({},t),e)),r},d=function(e){var t=p(e.components);return a.createElement(i.Provider,{value:t},e.children)},s="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var r=e.components,n=e.mdxType,o=e.originalType,i=e.parentName,d=c(e,["components","mdxType","originalType","parentName"]),s=p(r),u=n,k=s["".concat(i,".").concat(u)]||s[u]||f[u]||o;return r?a.createElement(k,l(l({ref:t},d),{},{components:r})):a.createElement(k,l({ref:t},d))}));function k(e,t){var r=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var o=r.length,l=new Array(o);l[0]=u;var c={};for(var i in t)hasOwnProperty.call(t,i)&&(c[i]=t[i]);c.originalType=e,c[s]="string"==typeof e?e:n,l[1]=c;for(var p=2;p<o;p++)l[p]=r[p];return a.createElement.apply(null,l)}return a.createElement.apply(null,r)}u.displayName="MDXCreateElement"},9743:(e,t,r)=>{r.r(t),r.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>f,frontMatter:()=>o,metadata:()=>c,toc:()=>p});var a=r(7462),n=(r(7294),r(3905));const o={},l=void 0,c={unversionedId:"api/fastkafka/encoder/avro_encoder",id:"version-0.8.0/api/fastkafka/encoder/avro_encoder",title:"avro_encoder",description:"avroencoder {fastkafka.encoder.avroencoder}",source:"@site/versioned_docs/version-0.8.0/api/fastkafka/encoder/avro_encoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avro_encoder",permalink:"/docs/api/fastkafka/encoder/avro_encoder",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avro_decoder",permalink:"/docs/api/fastkafka/encoder/avro_decoder"},next:{title:"avsc_to_pydantic",permalink:"/docs/api/fastkafka/encoder/avsc_to_pydantic"}},i={},p=[{value:"avro_encoder",id:"fastkafka.encoder.avro_encoder",level:3}],d={toc:p},s="wrapper";function f(e){let{components:t,...r}=e;return(0,n.kt)(s,(0,a.Z)({},d,r,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("h3",{id:"fastkafka.encoder.avro_encoder"},"avro_encoder"),(0,n.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/avro.py#L239-L259",class:"link-to-source",target:"_blank"},"View source"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-py"},"avro_encoder(\n msg\n)\n")),(0,n.kt)("p",null,"Encoder to encode pydantic instances to avro message"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Name"),(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"),(0,n.kt)("th",{parentName:"tr",align:null},"Default"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"msg")),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"BaseModel")),(0,n.kt)("td",{parentName:"tr",align:null},"An instance of pydantic basemodel"),(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("em",{parentName:"td"},"required"))))),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("table",null,(0,n.kt)("thead",{parentName:"table"},(0,n.kt)("tr",{parentName:"thead"},(0,n.kt)("th",{parentName:"tr",align:null},"Type"),(0,n.kt)("th",{parentName:"tr",align:null},"Description"))),(0,n.kt)("tbody",{parentName:"table"},(0,n.kt)("tr",{parentName:"tbody"},(0,n.kt)("td",{parentName:"tr",align:null},(0,n.kt)("inlineCode",{parentName:"td"},"bytes")),(0,n.kt)("td",{parentName:"tr",align:null},"A bytes message which is encoded from pydantic basemodel")))))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/ba3b9f5c.009751df.js b/assets/js/ba3b9f5c.009751df.js new file mode 100644 index 0000000..3b1f6f9 --- /dev/null +++ b/assets/js/ba3b9f5c.009751df.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4358],{3905:(e,r,t)=>{t.d(r,{Zo:()=>s,kt:()=>k});var n=t(7294);function a(e,r,t){return r in e?Object.defineProperty(e,r,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[r]=t,e}function o(e,r){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);r&&(n=n.filter((function(r){return Object.getOwnPropertyDescriptor(e,r).enumerable}))),t.push.apply(t,n)}return t}function c(e){for(var r=1;r<arguments.length;r++){var t=null!=arguments[r]?arguments[r]:{};r%2?o(Object(t),!0).forEach((function(r){a(e,r,t[r])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):o(Object(t)).forEach((function(r){Object.defineProperty(e,r,Object.getOwnPropertyDescriptor(t,r))}))}return e}function i(e,r){if(null==e)return{};var t,n,a=function(e,r){if(null==e)return{};var t,n,a={},o=Object.keys(e);for(n=0;n<o.length;n++)t=o[n],r.indexOf(t)>=0||(a[t]=e[t]);return a}(e,r);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)t=o[n],r.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var d=n.createContext({}),l=function(e){var r=n.useContext(d),t=r;return e&&(t="function"==typeof e?e(r):c(c({},r),e)),t},s=function(e){var r=l(e.components);return n.createElement(d.Provider,{value:r},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var r=e.children;return n.createElement(n.Fragment,{},r)}},u=n.forwardRef((function(e,r){var t=e.components,a=e.mdxType,o=e.originalType,d=e.parentName,s=i(e,["components","mdxType","originalType","parentName"]),p=l(t),u=a,k=p["".concat(d,".").concat(u)]||p[u]||f[u]||o;return t?n.createElement(k,c(c({ref:r},s),{},{components:t})):n.createElement(k,c({ref:r},s))}));function k(e,r){var t=arguments,a=r&&r.mdxType;if("string"==typeof e||a){var o=t.length,c=new Array(o);c[0]=u;var i={};for(var d in r)hasOwnProperty.call(r,d)&&(i[d]=r[d]);i.originalType=e,i[p]="string"==typeof e?e:a,c[1]=i;for(var l=2;l<o;l++)c[l]=t[l];return n.createElement.apply(null,c)}return n.createElement.apply(null,t)}u.displayName="MDXCreateElement"},9462:(e,r,t)=>{t.r(r),t.d(r,{assets:()=>d,contentTitle:()=>c,default:()=>f,frontMatter:()=>o,metadata:()=>i,toc:()=>l});var n=t(7462),a=(t(7294),t(3905));const o={},c=void 0,i={unversionedId:"api/fastkafka/encoder/avro_encoder",id:"version-0.7.0/api/fastkafka/encoder/avro_encoder",title:"avro_encoder",description:"fastkafka.encoder.avroencoder {fastkafka.encoder.avroencoder}",source:"@site/versioned_docs/version-0.7.0/api/fastkafka/encoder/avro_encoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avro_encoder",permalink:"/docs/0.7.0/api/fastkafka/encoder/avro_encoder",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avro_decoder",permalink:"/docs/0.7.0/api/fastkafka/encoder/avro_decoder"},next:{title:"avsc_to_pydantic",permalink:"/docs/0.7.0/api/fastkafka/encoder/avsc_to_pydantic"}},d={},l=[{value:"<code>fastkafka.encoder.avro_encoder</code>",id:"fastkafka.encoder.avro_encoder",level:2},{value:"<code>avro_encoder</code>",id:"avro_encoder",level:3}],s={toc:l},p="wrapper";function f(e){let{components:r,...t}=e;return(0,a.kt)(p,(0,n.Z)({},s,t,{components:r,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"fastkafka.encoder.avro_encoder"},(0,a.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.avro_encoder")),(0,a.kt)("h3",{id:"avro_encoder"},(0,a.kt)("inlineCode",{parentName:"h3"},"avro_encoder")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def avro_encoder(msg: pydantic.main.BaseModel) -> bytes")),(0,a.kt)("p",null,"Encoder to encode pydantic instances to avro message"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"msg"),": An instance of pydantic basemodel")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Returns"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"A bytes message which is encoded from pydantic basemodel")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/ba9d536d.e1fae3ca.js b/assets/js/ba9d536d.e1fae3ca.js new file mode 100644 index 0000000..02d169f --- /dev/null +++ b/assets/js/ba9d536d.e1fae3ca.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1384],{3905:(e,t,n)=>{n.d(t,{Zo:()=>l,kt:()=>m});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?a(Object(n),!0).forEach((function(t){o(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):a(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function u(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=r.createContext({}),s=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},l=function(e){var t=s(e.components);return r.createElement(c.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,l=u(e,["components","mdxType","originalType","parentName"]),p=s(n),d=o,m=p["".concat(c,".").concat(d)]||p[d]||f[d]||a;return n?r.createElement(m,i(i({ref:t},l),{},{components:n})):r.createElement(m,i({ref:t},l))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=d;var u={};for(var c in t)hasOwnProperty.call(t,c)&&(u[c]=t[c]);u.originalType=e,u[p]="string"==typeof e?e:o,i[1]=u;for(var s=2;s<a;s++)i[s]=n[s];return r.createElement.apply(null,i)}return r.createElement.apply(null,n)}d.displayName="MDXCreateElement"},2435:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>f,frontMatter:()=>a,metadata:()=>u,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={},i="Authentication",u={unversionedId:"guides/Guide_03_Authentication",id:"version-0.8.0/guides/Guide_03_Authentication",title:"Authentication",description:"TLS Authentication",source:"@site/versioned_docs/version-0.8.0/guides/Guide_03_Authentication.md",sourceDirName:"guides",slug:"/guides/Guide_03_Authentication",permalink:"/docs/guides/Guide_03_Authentication",draft:!1,tags:[],version:"0.8.0",frontMatter:{}},c={},s=[{value:"TLS Authentication",id:"tls-authentication",level:2}],l={toc:s},p="wrapper";function f(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},l,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"authentication"},"Authentication"),(0,o.kt)("h2",{id:"tls-authentication"},"TLS Authentication"),(0,o.kt)("p",null,"sasl_mechanism (str) \u2013 Authentication mechanism when security_protocol\nis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN,\nGSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN"),(0,o.kt)("p",null,"sasl_plain_username (str) \u2013 username for SASL PLAIN authentication.\nDefault: None"),(0,o.kt)("p",null,"sasl_plain_password (str) \u2013 password for SASL PLAIN authentication.\nDefault: None"),(0,o.kt)("p",null,"sasl_oauth_token_provider (AbstractTokenProvider) \u2013 OAuthBearer token\nprovider instance. (See kafka.oauth.abstract). Default: None"))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/be529d37.6621d11d.js b/assets/js/be529d37.6621d11d.js new file mode 100644 index 0000000..49ffc4c --- /dev/null +++ b/assets/js/be529d37.6621d11d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3033],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>k});var a=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function r(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){i(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function l(e,t){if(null==e)return{};var n,a,i=function(e,t){if(null==e)return{};var n,a,i={},o=Object.keys(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var s=a.createContext({}),p=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},d=function(e){var t=p(e.components);return a.createElement(s.Provider,{value:t},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,o=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),m=p(n),u=i,k=m["".concat(s,".").concat(u)]||m[u]||c[u]||o;return n?a.createElement(k,r(r({ref:t},d),{},{components:n})):a.createElement(k,r({ref:t},d))}));function k(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=n.length,r=new Array(o);r[0]=u;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[m]="string"==typeof e?e:i,r[1]=l;for(var p=2;p<o;p++)r[p]=n[p];return a.createElement.apply(null,r)}return a.createElement.apply(null,n)}u.displayName="MDXCreateElement"},6524:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>r,default:()=>c,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var a=n(7462),i=(n(7294),n(3905));const o={},r=void 0,l={unversionedId:"api/fastkafka/testing/Tester",id:"version-0.7.1/api/fastkafka/testing/Tester",title:"Tester",description:"fastkafka.testing.Tester {fastkafka.testing.Tester}",source:"@site/versioned_docs/version-0.7.1/api/fastkafka/testing/Tester.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/Tester",permalink:"/docs/0.7.1/api/fastkafka/testing/Tester",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"LocalRedpandaBroker",permalink:"/docs/0.7.1/api/fastkafka/testing/LocalRedpandaBroker"},next:{title:"fastkafka",permalink:"/docs/0.7.1/cli/fastkafka"}},s={},p=[{value:"<code>fastkafka.testing.Tester</code>",id:"fastkafka.testing.Tester",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>benchmark</code>",id:"benchmark",level:3},{value:"<code>consumes</code>",id:"consumes",level:3},{value:"<code>create_docs</code>",id:"create_docs",level:3},{value:"<code>create_mocks</code>",id:"create_mocks",level:3},{value:"<code>fastapi_lifespan</code>",id:"fastapi_lifespan",level:3},{value:"<code>get_topics</code>",id:"get_topics",level:3},{value:"<code>produces</code>",id:"produces",level:3},{value:"<code>run_in_background</code>",id:"run_in_background",level:3},{value:"<code>set_kafka_broker</code>",id:"set_kafka_broker",level:3},{value:"<code>using_local_kafka</code>",id:"using_local_kafka",level:3},{value:"<code>using_local_redpanda</code>",id:"using_local_redpanda",level:3}],d={toc:p},m="wrapper";function c(e){let{components:t,...n}=e;return(0,i.kt)(m,(0,a.Z)({},d,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"h2"},"fastkafka.testing.Tester")),(0,i.kt)("h3",{id:"init"},(0,i.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def __init__(self, app: Union[fastkafka.FastKafka, List[fastkafka.FastKafka]], broker: Optional[fastkafka.testing.ApacheKafkaBroker, fastkafka.testing.LocalRedpandaBroker, fastkafka._testing.in_memory_broker.InMemoryBroker] = None, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None")),(0,i.kt)("p",null,"Mirror-like object for testing a FastFafka application"),(0,i.kt)("p",null,"Can be used as context manager"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"app"),": The FastKafka application to be tested."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"broker"),": An optional broker to start and to use for testing."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"zookeeper_port"),": Port for clients (Kafka brokes) to connect"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect")),(0,i.kt)("h3",{id:"benchmark"},(0,i.kt)("inlineCode",{parentName:"h3"},"benchmark")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]")),(0,i.kt)("p",null,"Decorator to benchmark produces/consumes functions"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"interval"),": Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sliding_window_size"),": The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated")),(0,i.kt)("h3",{id:"consumes"},(0,i.kt)("inlineCode",{parentName:"h3"},"consumes")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, prefix: str = 'on_', description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.1', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]], typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when a message is received in a topic."),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"decoder"),": Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"executor"),': Type of executor to choose for consuming tasks. Avaliable options\nare "SequentialExecutor" and "DynamicTaskExecutor". The default option is\n"SequentialExecutor" which will execute the consuming tasks sequentially.\nIf the consuming tasks have high latency it is recommended to use\n"DynamicTaskExecutor" which will wrap the consuming functions into tasks\nand run them in on asyncio loop in background. This comes with a cost of\nincreased overhead so use it only in cases when your consume functions have\nhigh latency such as database queries or some other type of networking.'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: "on_". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"brokers"),": Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"description"),": Optional description of the consuming function async docs.\nIf not provided, consuming function ",(0,i.kt)("strong",{parentName:"li"},"doc")," attr will be used."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"*topics"),": optional list of topics to subscribe to. If not set,\ncall :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," or :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".assign")," before consuming records.\nPassing topics directly is same as calling :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," API."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string (or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings) that the consumer should contact to bootstrap\ninitial cluster metadata.")),(0,i.kt)("p",null,"This does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ",(0,i.kt)("inlineCode",{parentName:"p"},"localhost:9092"),"."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~.consumer.group_coordinator.GroupCoordinator"),"\nfor logging with respect to consumer group administration. Default:\n",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-{version}")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"group_id"),": name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_deserializer"),": Any callable that takes a\nraw message key and returns a deserialized key."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_deserializer"),": Any callable that takes a\nraw message value and returns a deserialized value."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_min_bytes"),": Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n",(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_bytes"),": The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms"),": The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_partition_fetch_bytes"),": The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ",(0,i.kt)("inlineCode",{parentName:"li"},"= #partitions * max_partition_fetch_bytes"),".\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_records"),": The maximum number of records returned in a\nsingle call to :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),". Defaults ",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", no limit."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Client request timeout in milliseconds.\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_offset_reset"),": A policy for resetting offsets on\n:exc:",(0,i.kt)("inlineCode",{parentName:"li"},".OffsetOutOfRangeError")," errors: ",(0,i.kt)("inlineCode",{parentName:"li"},"earliest")," will move to the oldest\navailable message, ",(0,i.kt)("inlineCode",{parentName:"li"},"latest")," will move to the most recent, and\n",(0,i.kt)("inlineCode",{parentName:"li"},"none")," will raise an exception so you can handle this case.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"latest"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_auto_commit"),": If true the consumer's offset will be\nperiodically committed in the background. Default: True."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_commit_interval_ms"),": milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"check_crcs"),": Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partition_assignment_strategy"),": List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: ","[:class:",(0,i.kt)("inlineCode",{parentName:"li"},".RoundRobinPartitionAssignor"),"]"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_interval_ms"),": Maximum allowed time between calls to\nconsume messages (e.g., :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),"). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See ",(0,i.kt)("inlineCode",{parentName:"li"},"KIP-62"),"_ for more\ninformation. Default 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"rebalance_timeout_ms"),": The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to ",(0,i.kt)("inlineCode",{parentName:"li"},"max.poll.interval.ms")," configuration,\nbut as ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka")," will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},".ConsumerRebalanceListener")," to delay rebalacing. Defaults\nto ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),": Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(",(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe ",(0,i.kt)("strong",{parentName:"li"},"broker")," configuration properties\n",(0,i.kt)("inlineCode",{parentName:"li"},"group.min.session.timeout.ms")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"group.max.session.timeout.ms"),".\nDefault: 10000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat_interval_ms"),": The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),", but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"consumer_timeout_ms"),": maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"AIOKafkaConsumer")," supports Kafka API versions >=0.9 only.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more information see\n:ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),". Default: None."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"exclude_internal_topics"),": Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying ",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"isolation_level"),": Controls how to read messages written\ntransactionally.")),(0,i.kt)("p",null,"If set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed"),", :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\ntransactional messages which have been committed.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")," (the default), :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will\nreturn all messages, even transactional messages which have been\naborted."),(0,i.kt)("p",null,"Non-transactional messages will be returned unconditionally in\neither mode."),(0,i.kt)("p",null,"Messages will always be returned in offset order. Hence, in\n",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," mode, :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," the seek_to_end method will\nreturn the LSO. See method docs below. Default: ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values are:\n",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider"),": OAuthBearer token provider instance. (See :mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: None")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("h3",{id:"create_docs"},(0,i.kt)("inlineCode",{parentName:"h3"},"create_docs")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def create_docs(self: fastkafka.FastKafka) -> None")),(0,i.kt)("p",null,"Create the asyncapi documentation based on the configured consumers and producers."),(0,i.kt)("p",null,"This function exports the asyncapi specification based on the configured consumers\nand producers in the FastKafka instance. It generates the asyncapi documentation by\nextracting the topics and callbacks from the consumers and producers."),(0,i.kt)("p",null,"Note:\nThe asyncapi documentation is saved to the location specified by the ",(0,i.kt)("inlineCode",{parentName:"p"},"_asyncapi_path"),"\nattribute of the FastKafka instance."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"None")),(0,i.kt)("h3",{id:"create_mocks"},(0,i.kt)("inlineCode",{parentName:"h3"},"create_mocks")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def create_mocks(self: fastkafka.FastKafka) -> None")),(0,i.kt)("p",null,"Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock"),(0,i.kt)("h3",{id:"fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"h3"},"fastapi_lifespan")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def fastapi_lifespan(self: fastkafka.FastKafka, kafka_broker_name: str) -> typing.Callable[[ForwardRef('FastAPI')], typing.AsyncIterator[NoneType]]")),(0,i.kt)("p",null,"Method for managing the lifespan of a FastAPI application with a specific Kafka broker."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_broker_name"),": The name of the Kafka broker to start FastKafka")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"Lifespan function to use for initializing FastAPI")),(0,i.kt)("h3",{id:"get_topics"},(0,i.kt)("inlineCode",{parentName:"h3"},"get_topics")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def get_topics(self: fastkafka.FastKafka) -> typing.Iterable[str]")),(0,i.kt)("p",null,"Get all topics for both producing and consuming."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"A set of topics for both producing and consuming.")),(0,i.kt)("h3",{id:"produces"},(0,i.kt)("inlineCode",{parentName:"h3"},"produces")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fcedfc68f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fcedec6c850>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when delivery report for a produced message is received"),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"encoder"),": Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: "to_". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"brokers"),": Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"description"),": Optional description of the producing function async docs.\nIf not provided, producing function ",(0,i.kt)("strong",{parentName:"li"},"doc")," attr will be used."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list. It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ",(0,i.kt)("inlineCode",{parentName:"li"},"localhost:9092"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-producer-#")," (appended with a unique number\nper instance)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_serializer"),": used to convert user-supplied keys to bytes\nIf not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as ",(0,i.kt)("inlineCode",{parentName:"li"},"f(key),")," should return\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_serializer"),": used to convert user-supplied message\nvalues to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),". If not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as\n",(0,i.kt)("inlineCode",{parentName:"li"},"f(value)"),", should return :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"acks"),": one of ",(0,i.kt)("inlineCode",{parentName:"li"},"0"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"1"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"0"),": Producer will not wait for any acknowledgment from the server\nat all. The message will immediately be added to the socket\nbuffer and considered sent. No guarantee can be made that the\nserver has received the record in this case, and the retries\nconfiguration will not take effect (as the client won't\ngenerally know of any failures). The offset given back for each\nrecord will always be set to -1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"1"),": The broker leader will write the record to its local log but\nwill respond without awaiting full acknowledgement from all\nfollowers. In this case should the leader fail immediately\nafter acknowledging the record but before the followers have\nreplicated it then the record will be lost."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"all"),": The broker leader will wait for the full set of in-sync\nreplicas to acknowledge the record. This guarantees that the\nrecord will not be lost as long as at least one in-sync replica\nremains alive. This is the strongest available guarantee.")),(0,i.kt)("p",null,"If unset, defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=1"),". If ",(0,i.kt)("inlineCode",{parentName:"p"},"enable_idempotence")," is\n:data:",(0,i.kt)("inlineCode",{parentName:"p"},"True")," defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=all")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"compression_type"),": The compression type for all data generated by\nthe producer. Valid values are ",(0,i.kt)("inlineCode",{parentName:"li"},"gzip"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"snappy"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"lz4"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"zstd"),"\nor :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),".\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_batch_size"),": Maximum size of buffered data per partition.\nAfter this amount :meth:",(0,i.kt)("inlineCode",{parentName:"li"},"send")," coroutine will block until batch is\ndrained.\nDefault: 16384"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),": The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),", producer will wait ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms - process_time"),".\nDefault: 0 (i.e. no delay)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partitioner"),": Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n",(0,i.kt)("inlineCode",{parentName:"li"},"partitioner(key_bytes, all_partitions, available_partitions)"),".\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_request_size"),": The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"~kafka.protocol.produce.ProduceRequest")," (it's a blocking\ncall), maximum waiting time can be up to ",(0,i.kt)("inlineCode",{parentName:"li"},"2 *\nrequest_timeout_ms"),".\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more\ninformation see :ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_idempotence"),": When set to :data:",(0,i.kt)("inlineCode",{parentName:"li"},"True"),", the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:",(0,i.kt)("inlineCode",{parentName:"li"},"False"),", producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:",(0,i.kt)("inlineCode",{parentName:"li"},"ValueError")," will be thrown.\nNew in version 0.5.0."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values\nare: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider ("),": class:",(0,i.kt)("inlineCode",{parentName:"li"},"~aiokafka.abc.AbstractTokenProvider"),"):\nOAuthBearer token provider instance. (See\n:mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"))),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ValueError"),": when needed")),(0,i.kt)("h3",{id:"run_in_background"},(0,i.kt)("inlineCode",{parentName:"h3"},"run_in_background")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]")),(0,i.kt)("p",null,"Decorator to schedule a task to be run in the background."),(0,i.kt)("p",null,"This decorator is used to schedule a task to be run in the background when the app's ",(0,i.kt)("inlineCode",{parentName:"p"},"_on_startup")," event is triggered."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"A decorator function that takes a background task as an input and stores it to be run in the backround.")),(0,i.kt)("h3",{id:"set_kafka_broker"},(0,i.kt)("inlineCode",{parentName:"h3"},"set_kafka_broker")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def set_kafka_broker(self, kafka_broker_name: str) -> None")),(0,i.kt)("p",null,"Sets the Kafka broker to start FastKafka with"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_broker_name"),": The name of the Kafka broker to start FastKafka")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"None")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ValueError"),": If the provided kafka_broker_name is not found in dictionary of kafka_brokers")),(0,i.kt)("h3",{id:"using_local_kafka"},(0,i.kt)("inlineCode",{parentName:"h3"},"using_local_kafka")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def using_local_kafka(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> Tester")),(0,i.kt)("p",null,"Starts local Kafka broker used by the Tester instance"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"zookeeper_port"),": Port for clients (Kafka brokes) to connect"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"An instance of tester with Kafka as broker")),(0,i.kt)("h3",{id:"using_local_redpanda"},(0,i.kt)("inlineCode",{parentName:"h3"},"using_local_redpanda")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def using_local_redpanda(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug') -> Tester")),(0,i.kt)("p",null,"Starts local Redpanda broker used by the Tester instance"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"tag"),": Tag of Redpanda image to use to start container"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"seastar_core"),": Core(s) to use byt Seastar (the framework Redpanda uses under the hood)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"memory"),": The amount of memory to make available to Redpanda"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"mode"),": Mode to use to load configuration properties in container"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"default_log_level"),": Log levels to use for Redpanda")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"An instance of tester with Redpanda as broker")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/beaba6c2.94675e66.js b/assets/js/beaba6c2.94675e66.js new file mode 100644 index 0000000..f643423 --- /dev/null +++ b/assets/js/beaba6c2.94675e66.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3023],{3905:(e,t,a)=>{a.d(t,{Zo:()=>p,kt:()=>k});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function i(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function r(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?i(Object(a),!0).forEach((function(t){o(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):i(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,o=function(e,t){if(null==e)return{};var a,n,o={},i=Object.keys(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var s=n.createContext({}),u=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):r(r({},t),e)),a},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},h="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},c=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),h=u(a),c=o,k=h["".concat(s,".").concat(c)]||h[c]||d[c]||i;return a?n.createElement(k,r(r({ref:t},p),{},{components:a})):n.createElement(k,r({ref:t},p))}));function k(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=a.length,r=new Array(i);r[0]=c;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[h]="string"==typeof e?e:o,r[1]=l;for(var u=2;u<i;u++)r[u]=a[u];return n.createElement.apply(null,r)}return n.createElement.apply(null,a)}c.displayName="MDXCreateElement"},1566:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>r,default:()=>d,frontMatter:()=>i,metadata:()=>l,toc:()=>u});var n=a(7462),o=(a(7294),a(3905));const i={},r="Contributing to fastkafka",l={unversionedId:"CONTRIBUTING",id:"version-0.6.0/CONTRIBUTING",title:"Contributing to fastkafka",description:"First off, thanks for taking the time to contribute! \u2764\ufe0f",source:"@site/versioned_docs/version-0.6.0/CONTRIBUTING.md",sourceDirName:".",slug:"/CONTRIBUTING",permalink:"/docs/0.6.0/CONTRIBUTING",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"LICENSE",permalink:"/docs/0.6.0/LICENSE"},next:{title:"Release notes",permalink:"/docs/0.6.0/CHANGELOG"}},s={},u=[{value:"Table of Contents",id:"table-of-contents",level:2},{value:"I Have a Question",id:"i-have-a-question",level:2},{value:"I Want To Contribute",id:"i-want-to-contribute",level:2},{value:"Reporting Bugs",id:"reporting-bugs",level:3},{value:"Before Submitting a Bug Report",id:"before-submitting-a-bug-report",level:4},{value:"How Do I Submit a Good Bug Report?",id:"how-do-i-submit-a-good-bug-report",level:4},{value:"Suggesting Enhancements",id:"suggesting-enhancements",level:3},{value:"Before Submitting an Enhancement",id:"before-submitting-an-enhancement",level:4},{value:"How Do I Submit a Good Enhancement Suggestion?",id:"how-do-i-submit-a-good-enhancement-suggestion",level:4},{value:"Development",id:"development",level:2},{value:"Prepare the dev environment",id:"prepare-the-dev-environment",level:3},{value:"Clone the fastkafka repository",id:"clone-the-fastkafka-repository",level:4},{value:"Optional: create a virtual python environment",id:"optional-create-a-virtual-python-environment",level:4},{value:"Install fastkafka",id:"install-fastkafka",level:4},{value:"Install JRE and Kafka toolkit",id:"install-jre-and-kafka-toolkit",level:4},{value:"Install npm",id:"install-npm",level:4},{value:"Install docusaurus",id:"install-docusaurus",level:4},{value:"Check if everything works",id:"check-if-everything-works",level:4},{value:"Way of working",id:"way-of-working",level:3},{value:"Before a PR",id:"before-a-pr",level:3},{value:"Attribution",id:"attribution",level:2}],p={toc:u},h="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(h,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"contributing-to-fastkafka"},"Contributing to fastkafka"),(0,o.kt)("p",null,"First off, thanks for taking the time to contribute! \u2764\ufe0f"),(0,o.kt)("p",null,"All types of contributions are encouraged and valued. See the ",(0,o.kt)("a",{parentName:"p",href:"#table-of-contents"},"Table of Contents")," for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. \ud83c\udf89"),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},"And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:"),(0,o.kt)("ul",{parentName:"blockquote"},(0,o.kt)("li",{parentName:"ul"},"Star the project"),(0,o.kt)("li",{parentName:"ul"},"Tweet about it"),(0,o.kt)("li",{parentName:"ul"},"Refer this project in your project's readme"),(0,o.kt)("li",{parentName:"ul"},"Mention the project at local meetups and tell your friends/colleagues"))),(0,o.kt)("h2",{id:"table-of-contents"},"Table of Contents"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#i-have-a-question"},"I Have a Question")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#i-want-to-contribute"},"I Want To Contribute"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#reporting-bugs"},"Reporting Bugs")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#suggesting-enhancements"},"Suggesting Enhancements")))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#development"},"Development"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#prepare-the-dev-environment"},"Prepare the dev environment")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#way-of-working"},"Way of working")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#before-a-pr"},"Before a PR"))))),(0,o.kt)("h2",{id:"i-have-a-question"},"I Have a Question"),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},"If you want to ask a question, we assume that you have read the available ",(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/docs"},"Documentation"),".")),(0,o.kt)("p",null,"Before you ask a question, it is best to search for existing ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues"},"Issues")," that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue."),(0,o.kt)("p",null,"If you then still feel the need to ask a question and need clarification, we recommend the following:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Contact us on ",(0,o.kt)("a",{parentName:"li",href:"https://discord.com/invite/CJWmYpyFbc"},"Discord")),(0,o.kt)("li",{parentName:"ul"},"Open an ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/new"},"Issue"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Provide as much context as you can about what you're running into")))),(0,o.kt)("p",null,"We will then take care of the issue as soon as possible."),(0,o.kt)("h2",{id:"i-want-to-contribute"},"I Want To Contribute"),(0,o.kt)("blockquote",null,(0,o.kt)("h3",{parentName:"blockquote",id:"legal-notice"},"Legal Notice"),(0,o.kt)("p",{parentName:"blockquote"},"When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.")),(0,o.kt)("h3",{id:"reporting-bugs"},"Reporting Bugs"),(0,o.kt)("h4",{id:"before-submitting-a-bug-report"},"Before Submitting a Bug Report"),(0,o.kt)("p",null,"A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Make sure that you are using the latest version."),(0,o.kt)("li",{parentName:"ul"},"Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the ",(0,o.kt)("a",{parentName:"li",href:"https://fastkafka.airt.ai/docs"},"documentation"),". If you are looking for support, you might want to check ",(0,o.kt)("a",{parentName:"li",href:"#i-have-a-question"},"this section"),")."),(0,o.kt)("li",{parentName:"ul"},"To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues?q=label%3Abug"},"bug tracker"),"."),(0,o.kt)("li",{parentName:"ul"},"Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue."),(0,o.kt)("li",{parentName:"ul"},"Collect information about the bug:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Stack trace (Traceback)"),(0,o.kt)("li",{parentName:"ul"},"OS, Platform and Version (Windows, Linux, macOS, x86, ARM)"),(0,o.kt)("li",{parentName:"ul"},"Python version"),(0,o.kt)("li",{parentName:"ul"},"Possibly your input and the output"),(0,o.kt)("li",{parentName:"ul"},"Can you reliably reproduce the issue? And can you also reproduce it with older versions?")))),(0,o.kt)("h4",{id:"how-do-i-submit-a-good-bug-report"},"How Do I Submit a Good Bug Report?"),(0,o.kt)("p",null,"We use GitHub issues to track bugs and errors. If you run into an issue with the project:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Open an ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/new"},"Issue"),". (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)"),(0,o.kt)("li",{parentName:"ul"},"Explain the behavior you would expect and the actual behavior."),(0,o.kt)("li",{parentName:"ul"},"Please provide as much context as possible and describe the ",(0,o.kt)("em",{parentName:"li"},"reproduction steps")," that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case."),(0,o.kt)("li",{parentName:"ul"},"Provide the information you collected in the previous section.")),(0,o.kt)("p",null,"Once it's filed:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"The project team will label the issue accordingly."),(0,o.kt)("li",{parentName:"ul"},"A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-repro"),". Bugs with the ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-repro")," tag will not be addressed until they are reproduced."),(0,o.kt)("li",{parentName:"ul"},"If the team is able to reproduce the issue, it will be marked ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-fix"),", as well as possibly other tags (such as ",(0,o.kt)("inlineCode",{parentName:"li"},"critical"),"), and the issue will be left to be implemented.")),(0,o.kt)("h3",{id:"suggesting-enhancements"},"Suggesting Enhancements"),(0,o.kt)("p",null,"This section guides you through submitting an enhancement suggestion for fastkafka, ",(0,o.kt)("strong",{parentName:"p"},"including completely new features and minor improvements to existing functionality"),". Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions."),(0,o.kt)("h4",{id:"before-submitting-an-enhancement"},"Before Submitting an Enhancement"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Make sure that you are using the latest version."),(0,o.kt)("li",{parentName:"ul"},"Read the ",(0,o.kt)("a",{parentName:"li",href:"https://fastkafka.airt.ai/docs"},"documentation")," carefully and find out if the functionality is already covered, maybe by an individual configuration."),(0,o.kt)("li",{parentName:"ul"},"Perform a ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues"},"search")," to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one."),(0,o.kt)("li",{parentName:"ul"},"Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library."),(0,o.kt)("li",{parentName:"ul"},"If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on ",(0,o.kt)("a",{parentName:"li",href:"https://discord.com/invite/CJWmYpyFbc"},"Discord"))),(0,o.kt)("h4",{id:"how-do-i-submit-a-good-enhancement-suggestion"},"How Do I Submit a Good Enhancement Suggestion?"),(0,o.kt)("p",null,"Enhancement suggestions are tracked as ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues"},"GitHub issues"),"."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Use a ",(0,o.kt)("strong",{parentName:"li"},"clear and descriptive title")," for the issue to identify the suggestion."),(0,o.kt)("li",{parentName:"ul"},"Provide a ",(0,o.kt)("strong",{parentName:"li"},"step-by-step description of the suggested enhancement")," in as many details as possible."),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("strong",{parentName:"li"},"Describe the current behavior")," and ",(0,o.kt)("strong",{parentName:"li"},"explain which behavior you expected to see instead")," and why. At this point you can also tell which alternatives do not work for you."),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("strong",{parentName:"li"},"Explain why this enhancement would be useful")," to most fastkafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.")),(0,o.kt)("h2",{id:"development"},"Development"),(0,o.kt)("h3",{id:"prepare-the-dev-environment"},"Prepare the dev environment"),(0,o.kt)("p",null,"To start contributing to fastkafka, you first have to prepare the development environment."),(0,o.kt)("h4",{id:"clone-the-fastkafka-repository"},"Clone the fastkafka repository"),(0,o.kt)("p",null,"To clone the repository, run the following command in the CLI:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"git clone https://github.com/airtai/fastkafka.git\n")),(0,o.kt)("h4",{id:"optional-create-a-virtual-python-environment"},"Optional: create a virtual python environment"),(0,o.kt)("p",null,"To prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your fastkafka project by running:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"python3 -m venv fastkafka-env\n")),(0,o.kt)("p",null,"And to activate your virtual environment run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"source fastkafka-env/bin/activate\n")),(0,o.kt)("p",null,"To learn more about virtual environments, please have a look at ",(0,o.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/venv.html#:~:text=A%20virtual%20environment%20is%20created,the%20virtual%20environment%20are%20available."},"official python documentation")),(0,o.kt)("h4",{id:"install-fastkafka"},"Install fastkafka"),(0,o.kt)("p",null,"To install fastkafka, navigate to the root directory of the cloned fastkafka project and run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'pip install fastkafka -e [."dev"]\n')),(0,o.kt)("h4",{id:"install-jre-and-kafka-toolkit"},"Install JRE and Kafka toolkit"),(0,o.kt)("p",null,"To be able to run tests and use all the functionalities of fastkafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Use our ",(0,o.kt)("inlineCode",{parentName:"li"},"fastkafka testing install-deps")," CLI command which will install JRE and Kafka toolkit for you in your .local folder\nOR"),(0,o.kt)("li",{parentName:"ol"},"Install JRE and Kafka manually.\nTo do this, please refer to ",(0,o.kt)("a",{parentName:"li",href:"https://docs.oracle.com/javase/9/install/toc.htm"},"JDK and JRE installation guide")," and ",(0,o.kt)("a",{parentName:"li",href:"https://kafka.apache.org/quickstart"},"Apache Kafka quickstart"))),(0,o.kt)("h4",{id:"install-npm"},"Install npm"),(0,o.kt)("p",null,"To be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Use our ",(0,o.kt)("inlineCode",{parentName:"li"},"fastkafka docs install_deps")," CLI command which will install npm for you in your .local folder\nOR"),(0,o.kt)("li",{parentName:"ol"},"Install npm manually.\nTo do this, please refer to ",(0,o.kt)("a",{parentName:"li",href:"https://docs.npmjs.com/downloading-and-installing-node-js-and-npm"},"NPM installation guide"))),(0,o.kt)("h4",{id:"install-docusaurus"},"Install docusaurus"),(0,o.kt)("p",null,"To generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of fastkafka project."),(0,o.kt)("h4",{id:"check-if-everything-works"},"Check if everything works"),(0,o.kt)("p",null,"After installing fastkafka and all the necessary dependencies, run ",(0,o.kt)("inlineCode",{parentName:"p"},"nbdev_test"),' in the root of fastkafka project. This will take a couple of minutes as it will run all the tests on fastkafka project. If everythng is setup correctly, you will get a "Success." message in your terminal, otherwise please refer to previous steps.'),(0,o.kt)("h3",{id:"way-of-working"},"Way of working"),(0,o.kt)("p",null,"The development of fastkafka is done in Jupyter notebooks. Inside the ",(0,o.kt)("inlineCode",{parentName:"p"},"nbs")," directory you will find all the source code of fastkafka, this is where you will implement your changes."),(0,o.kt)("p",null,"The testing, cleanup and exporting of the code is being handled by ",(0,o.kt)("inlineCode",{parentName:"p"},"nbdev"),", please, before starting the work on fastkafka, get familiar with it by reading ",(0,o.kt)("a",{parentName:"p",href:"https://nbdev.fast.ai/getting_started.html"},"nbdev documentation"),"."),(0,o.kt)("p",null,"The general philosopy you should follow when writing code for fastkafka is:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Function should be an atomic functionality, short and concise",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Good rule of thumb: your function should be 5-10 lines long usually"))),(0,o.kt)("li",{parentName:"ul"},"If there are more than 2 params, enforce keywording using *",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"E.g.: ",(0,o.kt)("inlineCode",{parentName:"li"},"def function(param1, *, param2, param3): ...")))),(0,o.kt)("li",{parentName:"ul"},"Define typing of arguments and return value",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected"))),(0,o.kt)("li",{parentName:"ul"},"After the function cell, write test cells using the assert keyword",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Whenever you implement something you should test tat functionality immediateli in the cells below "))),(0,o.kt)("li",{parentName:"ul"},"Add Google style python docstrings when function is implemented and tested")),(0,o.kt)("h3",{id:"before-a-pr"},"Before a PR"),(0,o.kt)("p",null,"After you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as painless for you and us, please do the following before opening the request (all the commands are to be run in the root of fastkafka project):"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Format your notebooks: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbqa black nbs")),(0,o.kt)("li",{parentName:"ol"},"Close, shutdown, and clean the metadata from your notebooks: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_clean")),(0,o.kt)("li",{parentName:"ol"},"Export your code: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_export")),(0,o.kt)("li",{parentName:"ol"},"Run the tests: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_test")),(0,o.kt)("li",{parentName:"ol"},"Test code typing: ",(0,o.kt)("inlineCode",{parentName:"li"},"mypy fastkafka")),(0,o.kt)("li",{parentName:"ol"},"Test code safety with bandit: ",(0,o.kt)("inlineCode",{parentName:"li"},"bandit -r fastkafka")),(0,o.kt)("li",{parentName:"ol"},"Test code safety with semgrep: ",(0,o.kt)("inlineCode",{parentName:"li"},"semgrep --config auto -r fastkafka"))),(0,o.kt)("p",null,"When you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge."),(0,o.kt)("h2",{id:"attribution"},"Attribution"),(0,o.kt)("p",null,"This guide is based on the ",(0,o.kt)("strong",{parentName:"p"},"contributing-gen"),". ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/bttger/contributing-gen"},"Make your own"),"!"))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/bfac6a8d.33d1069c.js b/assets/js/bfac6a8d.33d1069c.js new file mode 100644 index 0000000..eaee379 --- /dev/null +++ b/assets/js/bfac6a8d.33d1069c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6064],{3905:(e,a,n)=>{n.d(a,{Zo:()=>k,kt:()=>m});var t=n(7294);function o(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function r(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function s(e){for(var a=1;a<arguments.length;a++){var n=null!=arguments[a]?arguments[a]:{};a%2?r(Object(n),!0).forEach((function(a){o(e,a,n[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(n,a))}))}return e}function l(e,a){if(null==e)return{};var n,t,o=function(e,a){if(null==e)return{};var n,t,o={},r=Object.keys(e);for(t=0;t<r.length;t++)n=r[t],a.indexOf(n)>=0||(o[n]=e[n]);return o}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(t=0;t<r.length;t++)n=r[t],a.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var i=t.createContext({}),p=function(e){var a=t.useContext(i),n=a;return e&&(n="function"==typeof e?e(a):s(s({},a),e)),n},k=function(e){var a=p(e.components);return t.createElement(i.Provider,{value:a},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},f=t.forwardRef((function(e,a){var n=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,k=l(e,["components","mdxType","originalType","parentName"]),c=p(n),f=o,m=c["".concat(i,".").concat(f)]||c[f]||u[f]||r;return n?t.createElement(m,s(s({ref:a},k),{},{components:n})):t.createElement(m,s({ref:a},k))}));function m(e,a){var n=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var r=n.length,s=new Array(r);s[0]=f;var l={};for(var i in a)hasOwnProperty.call(a,i)&&(l[i]=a[i]);l.originalType=e,l[c]="string"==typeof e?e:o,s[1]=l;for(var p=2;p<r;p++)s[p]=n[p];return t.createElement.apply(null,s)}return t.createElement.apply(null,n)}f.displayName="MDXCreateElement"},7227:(e,a,n)=>{n.r(a),n.d(a,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var t=n(7462),o=(n(7294),n(3905));const r={},s="First Steps",l={unversionedId:"guides/Guide_02_First_Steps",id:"version-0.5.0/guides/Guide_02_First_Steps",title:"First Steps",description:"Creating a simple Kafka consumer app",source:"@site/versioned_docs/version-0.5.0/guides/Guide_02_First_Steps.md",sourceDirName:"guides",slug:"/guides/Guide_02_First_Steps",permalink:"/docs/0.5.0/guides/Guide_02_First_Steps",draft:!1,tags:[],version:"0.5.0",frontMatter:{}},i={},p=[{value:"Creating a simple Kafka consumer app",id:"creating-a-simple-kafka-consumer-app",level:2},{value:"Sending first message to your consumer",id:"sending-first-message-to-your-consumer",level:2},{value:"Creating a hello Kafka producer",id:"creating-a-hello-kafka-producer",level:2},{value:"Recap",id:"recap",level:2}],k={toc:p},c="wrapper";function u(e){let{components:a,...n}=e;return(0,o.kt)(c,(0,t.Z)({},k,n,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"first-steps"},"First Steps"),(0,o.kt)("h2",{id:"creating-a-simple-kafka-consumer-app"},"Creating a simple Kafka consumer app"),(0,o.kt)("p",null,"For our first demo we will create the simplest possible Kafka consumer\nand run it using \u2018fastkafka run\u2019 command."),(0,o.kt)("p",null,"The consumer will:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Connect to the Kafka Broker we setup in the Intro guide")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Listen to the hello topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Write any message received from the hello topic to stdout"))),(0,o.kt)("p",null,"To create the consumer, first, create a file named"),(0,o.kt)("b",null,"hello_kafka_consumer.py")," and copy the following code to it:",(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom os import environ\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nkafka_server_url = environ["KAFKA_HOSTNAME"]\nkafka_server_port = environ["KAFKA_PORT"]\n\nkafka_brokers = {\n "localhost": {\n "description": "local development kafka",\n "url": kafka_server_url,\n "port": kafka_server_port\n }\n}\n\nclass HelloKafkaMsg(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_app = FastKafka(\n kafka_brokers=kafka_brokers\n)\n \n@kafka_app.consumes()\nasync def on_hello(msg: HelloKafkaMsg):\n print(f"Got data, msg={msg.msg}", flush=True)\n')),(0,o.kt)("p",null,'!!! info "Kafka configuration"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n")),(0,o.kt)("p",null,'!!! warning "Remember to flush"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.\n")),(0,o.kt)("p",null,"To run this consumer, in your terminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see something similar to the ouput\nbelow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})\n[878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n[878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}. \nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.\n\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.\n")),(0,o.kt)("p",null,"Now you can interact with your consumer, by sending the messages to the\nsubscribed \u2018hello\u2019 topic, don\u2019t worry, we will cover this in the next\nstep of this guide."),(0,o.kt)("h2",{id:"sending-first-message-to-your-consumer"},"Sending first message to your consumer"),(0,o.kt)("p",null,"After we have created and run our first consumer, we should send a\nmessage to it, to make sure it is working properly."),(0,o.kt)("p",null,"If you are using the Kafka setup as described in the Intro guide, you\ncan follow the steps listed here to send a message to the hello topic."),(0,o.kt)("p",null,"First, connect to your running kafka broker by running:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"docker run -it kafka /bin/bash\n")),(0,o.kt)("p",null,"Then, when connected to the container, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello\n")),(0,o.kt)("p",null,"This will open an interactive connection to the hello topic, now you can\nwrite your mesages to the topic and they will be consumed by our\nconsumer."),(0,o.kt)("p",null,"In the shell, type:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'{"msg":"hello"}\n')),(0,o.kt)("p",null,"and press enter. This will send a hello message to the topic which will\nbe read by our running consumer and outputed to stdout."),(0,o.kt)("p",null,"Check the output of your consumer (terminal where you ran the \u2018fastkafka\nrun\u2019 command) and confirm that your consumer has read the Kafka message.\nYou shoud see something like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"Got data, msg=hello\n")),(0,o.kt)("h2",{id:"creating-a-hello-kafka-producer"},"Creating a hello Kafka producer"),(0,o.kt)("p",null,"Consuming messages is only a part of this Library functionality, the\nother big part is producing the messages. So, let\u2019s create our first\nkafka producer which will send it\u2019s greetings to our consumer\nperiodically."),(0,o.kt)("p",null,"The producer will:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Connect to the Kafka Broker we setup in the Intro guide"),(0,o.kt)("li",{parentName:"ol"},"Connect to the hello topic"),(0,o.kt)("li",{parentName:"ol"},"Periodically send a message to the hello world topic")),(0,o.kt)("p",null,"To create the producer, first, create a file named"),(0,o.kt)("b",null,"hello_kafka_producer.py")," and copy the following code to it:",(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom os import environ\n\nimport asyncio\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nkafka_server_url = environ["KAFKA_HOSTNAME"]\nkafka_server_port = environ["KAFKA_PORT"]\n\nkafka_brokers = {\n "localhost": {\n "description": "local development kafka",\n "url": kafka_server_url,\n "port": kafka_server_port\n }\n}\n\nclass HelloKafkaMsg(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_app = FastKafka(\n kafka_brokers=kafka_brokers\n)\n\nlogger = get_logger(__name__)\n\n@kafka_app.produces()\nasync def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:\n logger.info(f"Producing: {msg}")\n return msg\n\n@kafka_app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello(HelloKafkaMsg(msg="hello"))\n await asyncio.sleep(1)\n')),(0,o.kt)("p",null,'!!! info "Kafka configuration"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n")),(0,o.kt)("p",null,"To run this producer, in your terminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see something similar to the ouput\nbelow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.\n\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.\n")),(0,o.kt)("p",null,"Now, while the producer is running, it will send a HelloKafkaMsg every\nsecond to the hello kafka topic. If your consumer is still running, you\nshould see the messages appear in its log."),(0,o.kt)("h2",{id:"recap"},"Recap"),(0,o.kt)("p",null,"In this guide we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Created a simple Kafka consumer using FastKafka"),(0,o.kt)("li",{parentName:"ol"},"Sent a message to our consumer trough Kafka"),(0,o.kt)("li",{parentName:"ol"},"Created a simple Kafka producer using FastKafka")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c0e3ff8b.4348bcbf.js b/assets/js/c0e3ff8b.4348bcbf.js new file mode 100644 index 0000000..fc12d64 --- /dev/null +++ b/assets/js/c0e3ff8b.4348bcbf.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[2368],{3905:(t,e,a)=>{a.d(e,{Zo:()=>m,kt:()=>N});var n=a(7294);function l(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function r(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function d(t){for(var e=1;e<arguments.length;e++){var a=null!=arguments[e]?arguments[e]:{};e%2?r(Object(a),!0).forEach((function(e){l(t,e,a[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(a,e))}))}return t}function i(t,e){if(null==t)return{};var a,n,l=function(t,e){if(null==t)return{};var a,n,l={},r=Object.keys(t);for(n=0;n<r.length;n++)a=r[n],e.indexOf(a)>=0||(l[a]=t[a]);return l}(t,e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(t);for(n=0;n<r.length;n++)a=r[n],e.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(l[a]=t[a])}return l}var p=n.createContext({}),o=function(t){var e=n.useContext(p),a=e;return t&&(a="function"==typeof t?t(e):d(d({},e),t)),a},m=function(t){var e=o(t.components);return n.createElement(p.Provider,{value:e},t.children)},k="mdxType",s={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},u=n.forwardRef((function(t,e){var a=t.components,l=t.mdxType,r=t.originalType,p=t.parentName,m=i(t,["components","mdxType","originalType","parentName"]),k=o(a),u=l,N=k["".concat(p,".").concat(u)]||k[u]||s[u]||r;return a?n.createElement(N,d(d({ref:e},m),{},{components:a})):n.createElement(N,d({ref:e},m))}));function N(t,e){var a=arguments,l=e&&e.mdxType;if("string"==typeof t||l){var r=a.length,d=new Array(r);d[0]=u;var i={};for(var p in e)hasOwnProperty.call(e,p)&&(i[p]=e[p]);i.originalType=t,i[k]="string"==typeof t?t:l,d[1]=i;for(var o=2;o<r;o++)d[o]=a[o];return n.createElement.apply(null,d)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},567:(t,e,a)=>{a.r(e),a.d(e,{assets:()=>p,contentTitle:()=>d,default:()=>s,frontMatter:()=>r,metadata:()=>i,toc:()=>o});var n=a(7462),l=(a(7294),a(3905));const r={},d=void 0,i={unversionedId:"api/fastkafka/encoder/AvroBase",id:"version-0.8.0/api/fastkafka/encoder/AvroBase",title:"AvroBase",description:"fastkafka.encoder.AvroBase {fastkafka.encoder.AvroBase}",source:"@site/versioned_docs/version-0.8.0/api/fastkafka/encoder/AvroBase.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/AvroBase",permalink:"/docs/api/fastkafka/encoder/AvroBase",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"KafkaEvent",permalink:"/docs/api/fastkafka/KafkaEvent"},next:{title:"avro_decoder",permalink:"/docs/api/fastkafka/encoder/avro_decoder"}},p={},o=[{value:"fastkafka.encoder.AvroBase",id:"fastkafka.encoder.AvroBase",level:2},{value:"<strong>init</strong>",id:"pydantic.main.BaseModel.init",level:3},{value:"avro_schema",id:"fastkafka._components.encoder.avro.AvroBase.avro_schema",level:3},{value:"avro_schema_for_pydantic_class",id:"fastkafka._components.encoder.avro.AvroBase.avro_schema_for_pydantic_class",level:3},{value:"avro_schema_for_pydantic_object",id:"fastkafka._components.encoder.avro.AvroBase.avro_schema_for_pydantic_object",level:3},{value:"copy",id:"pydantic.main.BaseModel.copy",level:3},{value:"model_computed_fields",id:"pydantic.main.BaseModel.model_computed_fields",level:3},{value:"model_construct",id:"pydantic.main.BaseModel.model_construct",level:3},{value:"model_copy",id:"pydantic.main.BaseModel.model_copy",level:3},{value:"model_dump",id:"pydantic.main.BaseModel.model_dump",level:3},{value:"model_dump_json",id:"pydantic.main.BaseModel.model_dump_json",level:3},{value:"model_extra",id:"pydantic.main.BaseModel.model_extra",level:3},{value:"model_fields_set",id:"pydantic.main.BaseModel.model_fields_set",level:3},{value:"model_json_schema",id:"pydantic.main.BaseModel.model_json_schema",level:3},{value:"model_parametrized_name",id:"pydantic.main.BaseModel.model_parametrized_name",level:3},{value:"model_post_init",id:"pydantic.main.BaseModel.model_post_init",level:3},{value:"model_rebuild",id:"pydantic.main.BaseModel.model_rebuild",level:3},{value:"model_validate",id:"pydantic.main.BaseModel.model_validate",level:3},{value:"model_validate_json",id:"pydantic.main.BaseModel.model_validate_json",level:3}],m={toc:o},k="wrapper";function s(t){let{components:e,...a}=t;return(0,l.kt)(k,(0,n.Z)({},m,a,{components:e,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"fastkafka.encoder.AvroBase"},"fastkafka.encoder.AvroBase"),(0,l.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/avro.py#L22-L235",class:"link-to-source",target:"_blank"},"View source"),(0,l.kt)("p",null,"This is base pydantic class that will add some methods"),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.init"},(0,l.kt)("strong",{parentName:"h3"},"init")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n __pydantic_self__, data\n)\n")),(0,l.kt)("p",null,"Create a new model by parsing and validating input data from keyword arguments."),(0,l.kt)("p",null,"Raises ValidationError if the input data cannot be parsed to form a valid model."),(0,l.kt)("p",null,"Uses ",(0,l.kt)("inlineCode",{parentName:"p"},"__pydantic_self__")," instead of the more common ",(0,l.kt)("inlineCode",{parentName:"p"},"self")," for the first arg to\nallow ",(0,l.kt)("inlineCode",{parentName:"p"},"self")," as a field name."),(0,l.kt)("h3",{id:"fastkafka._components.encoder.avro.AvroBase.avro_schema"},"avro_schema"),(0,l.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/avro.py#L80-L99",class:"link-to-source",target:"_blank"},"View source"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\navro_schema(\n by_alias=True, namespace=None\n)\n")),(0,l.kt)("p",null,"Returns the Avro schema for the Pydantic class."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"by_alias")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"bool")),(0,l.kt)("td",{parentName:"tr",align:null},"Generate schemas using aliases defined. Defaults to True."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"True"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"namespace")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,l.kt)("td",{parentName:"tr",align:null},"Optional namespace string for schema generation."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"None"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Dict[str, Any]")),(0,l.kt)("td",{parentName:"tr",align:null},"The Avro schema for the model.")))),(0,l.kt)("h3",{id:"fastkafka._components.encoder.avro.AvroBase.avro_schema_for_pydantic_class"},"avro_schema_for_pydantic_class"),(0,l.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/avro.py#L53-L77",class:"link-to-source",target:"_blank"},"View source"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\navro_schema_for_pydantic_class(\n pydantic_model, by_alias=True, namespace=None\n)\n")),(0,l.kt)("p",null,"Returns the Avro schema for the given Pydantic class."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"pydantic_model")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Type[pydantic.main.BaseModel]")),(0,l.kt)("td",{parentName:"tr",align:null},"The Pydantic class."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("em",{parentName:"td"},"required"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"by_alias")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"bool")),(0,l.kt)("td",{parentName:"tr",align:null},"Generate schemas using aliases defined. Defaults to True."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"True"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"namespace")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,l.kt)("td",{parentName:"tr",align:null},"Optional namespace string for schema generation."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"None"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Dict[str, Any]")),(0,l.kt)("td",{parentName:"tr",align:null},"The Avro schema for the model.")))),(0,l.kt)("h3",{id:"fastkafka._components.encoder.avro.AvroBase.avro_schema_for_pydantic_object"},"avro_schema_for_pydantic_object"),(0,l.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/encoder/avro.py#L26-L50",class:"link-to-source",target:"_blank"},"View source"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\navro_schema_for_pydantic_object(\n pydantic_model, by_alias=True, namespace=None\n)\n")),(0,l.kt)("p",null,"Returns the Avro schema for the given Pydantic object."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"pydantic_model")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"BaseModel")),(0,l.kt)("td",{parentName:"tr",align:null},"The Pydantic object."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("em",{parentName:"td"},"required"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"by_alias")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"bool")),(0,l.kt)("td",{parentName:"tr",align:null},"Generate schemas using aliases defined. Defaults to True."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"True"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"namespace")),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,l.kt)("td",{parentName:"tr",align:null},"Optional namespace string for schema generation."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"None"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Dict[str, Any]")),(0,l.kt)("td",{parentName:"tr",align:null},"The Avro schema for the model.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.copy"},"copy"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"copy(\n self, include=None, exclude=None, update=None, deep=False\n)\n")),(0,l.kt)("p",null,"Returns a copy of the model."),(0,l.kt)("p",null,"This method is now deprecated; use ",(0,l.kt)("inlineCode",{parentName:"p"},"model_copy")," instead. If you need ",(0,l.kt)("inlineCode",{parentName:"p"},"include")," or ",(0,l.kt)("inlineCode",{parentName:"p"},"exclude"),", use:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"data = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"include")),(0,l.kt)("td",{parentName:"tr",align:null},"AbstractSetIntStr"),(0,l.kt)("td",{parentName:"tr",align:null},"MappingIntStrAny"),(0,l.kt)("td",{parentName:"tr",align:null},"None")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude")),(0,l.kt)("td",{parentName:"tr",align:null},"AbstractSetIntStr"),(0,l.kt)("td",{parentName:"tr",align:null},"MappingIntStrAny"),(0,l.kt)("td",{parentName:"tr",align:null},"None")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"update")),(0,l.kt)("td",{parentName:"tr",align:null},"`Dict","[str, Any]"),(0,l.kt)("td",{parentName:"tr",align:null},"None`"),(0,l.kt)("td",{parentName:"tr",align:null},"Optional dictionary of field-value pairs to override field valuesin the copied model.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"deep")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"If True, the values of fields that are Pydantic models will be deep copied."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Model")),(0,l.kt)("td",{parentName:"tr",align:null},"A copy of the model with included, excluded and updated fields as specified.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_computed_fields"},"model_computed_fields"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@property\nmodel_computed_fields(\n self\n)\n")),(0,l.kt)("p",null,"Get the computed fields of this model instance."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"dict[str, ComputedFieldInfo]")),(0,l.kt)("td",{parentName:"tr",align:null},"A dictionary of computed field names and their corresponding ",(0,l.kt)("inlineCode",{parentName:"td"},"ComputedFieldInfo")," objects.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_construct"},"model_construct"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nmodel_construct(\n _fields_set=None, values\n)\n")),(0,l.kt)("p",null,"Creates a new instance of the ",(0,l.kt)("inlineCode",{parentName:"p"},"Model")," class with validated data."),(0,l.kt)("p",null,"Creates a new model setting ",(0,l.kt)("inlineCode",{parentName:"p"},"__dict__")," and ",(0,l.kt)("inlineCode",{parentName:"p"},"__pydantic_fields_set__")," from trusted or pre-validated data.\nDefault values are respected, but no other validation is performed.\nBehaves as if ",(0,l.kt)("inlineCode",{parentName:"p"},"Config.extra = 'allow'")," was set since it adds all passed values"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"_fields_set")),(0,l.kt)("td",{parentName:"tr",align:null},"set","[str]"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"The set of field names accepted for the Model instance.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"values")),(0,l.kt)("td",{parentName:"tr",align:null},"Any"),(0,l.kt)("td",{parentName:"tr",align:null},"Trusted or pre-validated data dictionary."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("em",{parentName:"td"},"required"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Model")),(0,l.kt)("td",{parentName:"tr",align:null},"A new instance of the ",(0,l.kt)("inlineCode",{parentName:"td"},"Model")," class with validated data.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_copy"},"model_copy"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"model_copy(\n self, update=None, deep=False\n)\n")),(0,l.kt)("p",null,"Returns a copy of the model."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"update")),(0,l.kt)("td",{parentName:"tr",align:null},"dict","[str, Any]"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"deep")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Set to ",(0,l.kt)("inlineCode",{parentName:"td"},"True")," to make a deep copy of the model."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Model")),(0,l.kt)("td",{parentName:"tr",align:null},"New model instance.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_dump"},"model_dump"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"model_dump(\n self,\n mode='python',\n include=None,\n exclude=None,\n by_alias=False,\n exclude_unset=False,\n exclude_defaults=False,\n exclude_none=False,\n round_trip=False,\n warnings=True,\n)\n")),(0,l.kt)("p",null,"Usage docs: ",(0,l.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/dev-v2/usage/serialization/#modelmodel_dump"},"https://docs.pydantic.dev/dev-v2/usage/serialization/#modelmodel_dump")),(0,l.kt)("p",null,"Generate a dictionary representation of the model, optionally specifying which fields to include or exclude."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"mode")),(0,l.kt)("td",{parentName:"tr",align:null},"Literal","['json', 'python']"),(0,l.kt)("td",{parentName:"tr",align:null},"str"),(0,l.kt)("td",{parentName:"tr",align:null},"The mode in which ",(0,l.kt)("inlineCode",{parentName:"td"},"to_python")," should run.If mode is 'json', the dictionary will only contain JSON serializable types.If mode is 'python', the dictionary may contain any Python objects.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"include")),(0,l.kt)("td",{parentName:"tr",align:null},"IncEx"),(0,l.kt)("td",{parentName:"tr",align:null},"A list of fields to include in the output."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"None"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude")),(0,l.kt)("td",{parentName:"tr",align:null},"IncEx"),(0,l.kt)("td",{parentName:"tr",align:null},"A list of fields to exclude from the output."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"None"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"by_alias")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to use the field's alias in the dictionary key if defined."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude_unset")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to exclude fields that are unset or None from the output."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude_defaults")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to exclude fields that are set to their default value from the output."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude_none")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to exclude fields that have a value of ",(0,l.kt)("inlineCode",{parentName:"td"},"None")," from the output."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"round_trip")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to enable serialization and deserialization round-trip support."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"warnings")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to log warnings when invalid fields are encountered."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"True"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"dict[str, Any]")),(0,l.kt)("td",{parentName:"tr",align:null},"A dictionary representation of the model.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_dump_json"},"model_dump_json"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"model_dump_json(\n self,\n indent=None,\n include=None,\n exclude=None,\n by_alias=False,\n exclude_unset=False,\n exclude_defaults=False,\n exclude_none=False,\n round_trip=False,\n warnings=True,\n)\n")),(0,l.kt)("p",null,"Usage docs: ",(0,l.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/dev-v2/usage/serialization/#modelmodel_dump_json"},"https://docs.pydantic.dev/dev-v2/usage/serialization/#modelmodel_dump_json")),(0,l.kt)("p",null,"Generates a JSON representation of the model using Pydantic's ",(0,l.kt)("inlineCode",{parentName:"p"},"to_json")," method."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"indent")),(0,l.kt)("td",{parentName:"tr",align:null},"int"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"Indentation to use in the JSON output. If None is passed, the output will be compact.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"include")),(0,l.kt)("td",{parentName:"tr",align:null},"IncEx"),(0,l.kt)("td",{parentName:"tr",align:null},"Field(s) to include in the JSON output. Can take either a string or set of strings."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"None"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude")),(0,l.kt)("td",{parentName:"tr",align:null},"IncEx"),(0,l.kt)("td",{parentName:"tr",align:null},"Field(s) to exclude from the JSON output. Can take either a string or set of strings."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"None"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"by_alias")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to serialize using field aliases."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude_unset")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to exclude fields that have not been explicitly set."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude_defaults")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to exclude fields that have the default value."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"exclude_none")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to exclude fields that have a value of ",(0,l.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"round_trip")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to use serialization/deserialization between JSON and class instance."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"warnings")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to show any warnings that occurred during serialization."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"True"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"str")),(0,l.kt)("td",{parentName:"tr",align:null},"A JSON string representation of the model.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_extra"},"model_extra"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@property\nmodel_extra(\n self\n)\n")),(0,l.kt)("p",null,"Get extra fields set during validation."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"`dict","[str, Any]"),(0,l.kt)("td",{parentName:"tr",align:null},"None`")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_fields_set"},"model_fields_set"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@property\nmodel_fields_set(\n self\n)\n")),(0,l.kt)("p",null,"Returns the set of fields that have been set on this model instance."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"set[str]")),(0,l.kt)("td",{parentName:"tr",align:null},"A set of strings representing the fields that have been set,i.e. that were not filled from defaults.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_json_schema"},"model_json_schema"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nmodel_json_schema(\n by_alias=True,\n ref_template='#/$defs/{model}',\n schema_generator=<class 'pydantic.json_schema.GenerateJsonSchema'>,\n mode='validation',\n)\n")),(0,l.kt)("p",null,"Generates a JSON schema for a model class."),(0,l.kt)("p",null,"To override the logic used to generate the JSON schema, you can create a subclass of ",(0,l.kt)("inlineCode",{parentName:"p"},"GenerateJsonSchema"),"\nwith your desired modifications, then override this method on a custom base class and set the default\nvalue of ",(0,l.kt)("inlineCode",{parentName:"p"},"schema_generator")," to be your subclass."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"by_alias")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to use attribute aliases or not."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"True"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"ref_template")),(0,l.kt)("td",{parentName:"tr",align:null},"str"),(0,l.kt)("td",{parentName:"tr",align:null},"The reference template."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"'#/$defs/{model}'"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"schema_generator")),(0,l.kt)("td",{parentName:"tr",align:null},"type","[GenerateJsonSchema]"),(0,l.kt)("td",{parentName:"tr",align:null},"The JSON schema generator."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"<class 'pydantic.json_schema.GenerateJsonSchema'>"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"mode")),(0,l.kt)("td",{parentName:"tr",align:null},"JsonSchemaMode"),(0,l.kt)("td",{parentName:"tr",align:null},"The mode in which to generate the schema."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"'validation'"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"dict[str, Any]")),(0,l.kt)("td",{parentName:"tr",align:null},"The JSON schema for the given model class.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_parametrized_name"},"model_parametrized_name"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nmodel_parametrized_name(\n params\n)\n")),(0,l.kt)("p",null,"Compute the class name for parametrizations of generic classes."),(0,l.kt)("p",null,"This method can be overridden to achieve a custom naming scheme for generic BaseModels."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"params")),(0,l.kt)("td",{parentName:"tr",align:null},"tuple[type","[Any]",", ...]"),(0,l.kt)("td",{parentName:"tr",align:null},"Tuple of types of the class. Given a generic class",(0,l.kt)("inlineCode",{parentName:"td"},"Model")," with 2 type variables and a concrete model ",(0,l.kt)("inlineCode",{parentName:"td"},"Model[str, int]"),",the value ",(0,l.kt)("inlineCode",{parentName:"td"},"(str, int)")," would be passed to ",(0,l.kt)("inlineCode",{parentName:"td"},"params"),"."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("em",{parentName:"td"},"required"))))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"str")),(0,l.kt)("td",{parentName:"tr",align:null},"String representing the new class where ",(0,l.kt)("inlineCode",{parentName:"td"},"params")," are passed to ",(0,l.kt)("inlineCode",{parentName:"td"},"cls")," as type variables.")))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"TypeError")),(0,l.kt)("td",{parentName:"tr",align:null},"Raised when trying to generate concrete names for non-generic models.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_post_init"},"model_post_init"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"model_post_init(\n self, _BaseModel__context\n)\n")),(0,l.kt)("p",null,"Override this method to perform additional initialization after ",(0,l.kt)("inlineCode",{parentName:"p"},"__init__")," and ",(0,l.kt)("inlineCode",{parentName:"p"},"model_construct"),"."),(0,l.kt)("p",null,"This is useful if you want to do some validation that requires the entire model to be initialized."),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_rebuild"},"model_rebuild"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nmodel_rebuild(\n force=False,\n raise_errors=True,\n _parent_namespace_depth=2,\n _types_namespace=None,\n)\n")),(0,l.kt)("p",null,"Try to rebuild the pydantic-core schema for the model."),(0,l.kt)("p",null,"This may be necessary when one of the annotations is a ForwardRef which could not be resolved during\nthe initial attempt to build the schema, and automatic rebuilding fails."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"force")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to force the rebuilding of the model schema, defaults to ",(0,l.kt)("inlineCode",{parentName:"td"},"False"),"."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"False"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"raise_errors")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to raise errors, defaults to ",(0,l.kt)("inlineCode",{parentName:"td"},"True"),"."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"True"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"_parent_namespace_depth")),(0,l.kt)("td",{parentName:"tr",align:null},"int"),(0,l.kt)("td",{parentName:"tr",align:null},"The depth level of the parent namespace, defaults to 2."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"2"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"_types_namespace")),(0,l.kt)("td",{parentName:"tr",align:null},"dict","[str, Any]"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"The types namespace, defaults to ",(0,l.kt)("inlineCode",{parentName:"td"},"None"),".")))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"`bool"),(0,l.kt)("td",{parentName:"tr",align:null},"None`")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_validate"},"model_validate"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nmodel_validate(\n obj, strict=None, from_attributes=None, context=None\n)\n")),(0,l.kt)("p",null,"Validate a pydantic model instance."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"obj")),(0,l.kt)("td",{parentName:"tr",align:null},"Any"),(0,l.kt)("td",{parentName:"tr",align:null},"The object to validate."),(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("em",{parentName:"td"},"required"))),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"strict")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to raise an exception on invalid fields.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"from_attributes")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to extract data from object attributes.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"context")),(0,l.kt)("td",{parentName:"tr",align:null},"dict","[str, Any]"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"Additional context to pass to the validator.")))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Model")),(0,l.kt)("td",{parentName:"tr",align:null},"The validated model instance.")))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"ValidationError")),(0,l.kt)("td",{parentName:"tr",align:null},"If the object could not be validated.")))),(0,l.kt)("h3",{id:"pydantic.main.BaseModel.model_validate_json"},"model_validate_json"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-py"},"@classmethod\nmodel_validate_json(\n json_data, strict=None, context=None\n)\n")),(0,l.kt)("p",null,"Validate the given JSON data against the Pydantic model."),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"),(0,l.kt)("th",{parentName:"tr",align:null},"Default"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"json_data")),(0,l.kt)("td",{parentName:"tr",align:null},"str"),(0,l.kt)("td",{parentName:"tr",align:null},"bytes"),(0,l.kt)("td",{parentName:"tr",align:null},"bytearray")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"strict")),(0,l.kt)("td",{parentName:"tr",align:null},"bool"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"Whether to enforce types strictly.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"context")),(0,l.kt)("td",{parentName:"tr",align:null},"dict","[str, Any]"),(0,l.kt)("td",{parentName:"tr",align:null},"None"),(0,l.kt)("td",{parentName:"tr",align:null},"Extra variables to pass to the validator.")))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Returns"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"Model")),(0,l.kt)("td",{parentName:"tr",align:null},"The validated Pydantic model.")))),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Type"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("inlineCode",{parentName:"td"},"ValueError")),(0,l.kt)("td",{parentName:"tr",align:null},"If ",(0,l.kt)("inlineCode",{parentName:"td"},"json_data")," is not a JSON string.")))))}s.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c16f65ec.04e07d23.js b/assets/js/c16f65ec.04e07d23.js new file mode 100644 index 0000000..ee7c0da --- /dev/null +++ b/assets/js/c16f65ec.04e07d23.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[2798],{3905:(a,e,n)=>{n.d(e,{Zo:()=>k,kt:()=>m});var t=n(7294);function i(a,e,n){return e in a?Object.defineProperty(a,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):a[e]=n,a}function o(a,e){var n=Object.keys(a);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(a);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(a,e).enumerable}))),n.push.apply(n,t)}return n}function r(a){for(var e=1;e<arguments.length;e++){var n=null!=arguments[e]?arguments[e]:{};e%2?o(Object(n),!0).forEach((function(e){i(a,e,n[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(a,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(e){Object.defineProperty(a,e,Object.getOwnPropertyDescriptor(n,e))}))}return a}function s(a,e){if(null==a)return{};var n,t,i=function(a,e){if(null==a)return{};var n,t,i={},o=Object.keys(a);for(t=0;t<o.length;t++)n=o[t],e.indexOf(n)>=0||(i[n]=a[n]);return i}(a,e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(a);for(t=0;t<o.length;t++)n=o[t],e.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(a,n)&&(i[n]=a[n])}return i}var p=t.createContext({}),l=function(a){var e=t.useContext(p),n=e;return a&&(n="function"==typeof a?a(e):r(r({},e),a)),n},k=function(a){var e=l(a.components);return t.createElement(p.Provider,{value:e},a.children)},c="mdxType",d={inlineCode:"code",wrapper:function(a){var e=a.children;return t.createElement(t.Fragment,{},e)}},f=t.forwardRef((function(a,e){var n=a.components,i=a.mdxType,o=a.originalType,p=a.parentName,k=s(a,["components","mdxType","originalType","parentName"]),c=l(n),f=i,m=c["".concat(p,".").concat(f)]||c[f]||d[f]||o;return n?t.createElement(m,r(r({ref:e},k),{},{components:n})):t.createElement(m,r({ref:e},k))}));function m(a,e){var n=arguments,i=e&&e.mdxType;if("string"==typeof a||i){var o=n.length,r=new Array(o);r[0]=f;var s={};for(var p in e)hasOwnProperty.call(e,p)&&(s[p]=e[p]);s.originalType=a,s[c]="string"==typeof a?a:i,r[1]=s;for(var l=2;l<o;l++)r[l]=n[l];return t.createElement.apply(null,r)}return t.createElement.apply(null,n)}f.displayName="MDXCreateElement"},9942:(a,e,n)=>{n.r(e),n.d(e,{assets:()=>p,contentTitle:()=>r,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var t=n(7462),i=(n(7294),n(3905));const o={},r="Benchmarking FastKafka app",s={unversionedId:"guides/Guide_06_Benchmarking_FastKafka",id:"version-0.7.1/guides/Guide_06_Benchmarking_FastKafka",title:"Benchmarking FastKafka app",description:"Prerequisites",source:"@site/versioned_docs/version-0.7.1/guides/Guide_06_Benchmarking_FastKafka.md",sourceDirName:"guides",slug:"/guides/Guide_06_Benchmarking_FastKafka",permalink:"/docs/0.7.1/guides/Guide_06_Benchmarking_FastKafka",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using FastAPI to Run FastKafka Application",permalink:"/docs/0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application"},next:{title:"EventMetadata",permalink:"/docs/0.7.1/api/fastkafka/EventMetadata"}},p={},l=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Creating FastKafka Code",id:"creating-fastkafka-code",level:3},{value:"Starting Kafka",id:"starting-kafka",level:3},{value:"Installing Java and Kafka",id:"installing-java-and-kafka",level:4},{value:"Creating configuration for Zookeeper and Kafka",id:"creating-configuration-for-zookeeper-and-kafka",level:4},{value:"Starting Zookeeper and Kafka",id:"starting-zookeeper-and-kafka",level:4},{value:"Creating topics in Kafka",id:"creating-topics-in-kafka",level:4},{value:"Populating topics with dummy data",id:"populating-topics-with-dummy-data",level:4},{value:"Benchmarking FastKafka",id:"benchmarking-fastkafka",level:3}],k={toc:l},c="wrapper";function d(a){let{components:e,...n}=a;return(0,i.kt)(c,(0,t.Z)({},k,n,{components:e,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"benchmarking-fastkafka-app"},"Benchmarking FastKafka app"),(0,i.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("p",null,"To benchmark a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nproject, you will need the following:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A library built with\n",(0,i.kt)("a",{parentName:"li",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"."),(0,i.kt)("li",{parentName:"ol"},"A running ",(0,i.kt)("inlineCode",{parentName:"li"},"Kafka")," instance to benchmark the FastKafka application\nagainst.")),(0,i.kt)("h3",{id:"creating-fastkafka-code"},"Creating FastKafka Code"),(0,i.kt)("p",null,"Let\u2019s create a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\napplication and write it to the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file based on the\n",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nhas a decorator for benchmarking which is appropriately called as\n",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark"),". Let\u2019s edit our ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file and add the\n",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark")," decorator to the consumes method."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file with benchmark\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\n@kafka_app.benchmark(interval=1, sliding_window_size=5)\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"Here we are conducting a benchmark of a function that consumes data from\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic with an interval of 1 second and a sliding window\nsize of 5."),(0,i.kt)("p",null,"This ",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark")," method uses the ",(0,i.kt)("inlineCode",{parentName:"p"},"interval")," parameter to calculate the\nresults over a specific time period, and the ",(0,i.kt)("inlineCode",{parentName:"p"},"sliding_window_size"),"\nparameter to determine the maximum number of results to use in\ncalculating the average throughput and standard deviation."),(0,i.kt)("p",null,"This benchmark is important to ensure that the function is performing\noptimally and to identify any areas for improvement."),(0,i.kt)("h3",{id:"starting-kafka"},"Starting Kafka"),(0,i.kt)("p",null,"If you already have a ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," running somewhere, then you can skip this\nstep."),(0,i.kt)("p",null,"Please keep in mind that your benchmarking results may be affected by\nbottlenecks such as network, CPU cores in the Kafka machine, or even the\nKafka configuration itself."),(0,i.kt)("h4",{id:"installing-java-and-kafka"},"Installing Java and Kafka"),(0,i.kt)("p",null,"We need a working ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),"instance to benchmark our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp, and to run ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," we need ",(0,i.kt)("inlineCode",{parentName:"p"},"Java"),". Thankfully,\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\ncomes with a CLI to install both ",(0,i.kt)("inlineCode",{parentName:"p"},"Java")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," on our machine."),(0,i.kt)("p",null,"So, let\u2019s install ",(0,i.kt)("inlineCode",{parentName:"p"},"Java")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," by executing the following command."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"fastkafka testing install_deps\n")),(0,i.kt)("p",null,"The above command will extract ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),' scripts at the location\n\u201c\\$HOME/.local/kafka_2.13-3.3.2" on your machine.'),(0,i.kt)("h4",{id:"creating-configuration-for-zookeeper-and-kafka"},"Creating configuration for Zookeeper and Kafka"),(0,i.kt)("p",null,"Now we need to start ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," separately, and to start\nthem we need ",(0,i.kt)("inlineCode",{parentName:"p"},"zookeeper.properties")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka.properties")," files."),(0,i.kt)("p",null,"Let\u2019s create a folder inside the folder where ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," scripts were\nextracted and change directory into it."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"mkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir\n")),(0,i.kt)("p",null,"Let\u2019s create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"zookeeper.properties")," and write the\nfollowing content to the file:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"dataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper\nclientPort=2181\nmaxClientCnxns=0\n")),(0,i.kt)("p",null,"Similarly, let\u2019s create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka.properties")," and write the\nfollowing content to the file:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"broker.id=0\nlisteners=PLAINTEXT://:9092\n\nnum.network.threads=3\nnum.io.threads=8\nsocket.send.buffer.bytes=102400\nsocket.receive.buffer.bytes=102400\nsocket.request.max.bytes=104857600\n\nnum.partitions=1\nnum.recovery.threads.per.data.dir=1\noffsets.topic.replication.factor=1\ntransaction.state.log.replication.factor=1\ntransaction.state.log.min.isr=1\n\nlog.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs\nlog.flush.interval.messages=10000\nlog.flush.interval.ms=1000\nlog.retention.hours=168\nlog.retention.bytes=1073741824\nlog.segment.bytes=1073741824\nlog.retention.check.interval.ms=300000\n\nzookeeper.connect=localhost:2181\nzookeeper.connection.timeout.ms=18000\n")),(0,i.kt)("h4",{id:"starting-zookeeper-and-kafka"},"Starting Zookeeper and Kafka"),(0,i.kt)("p",null,"We need two different terminals to run ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," in one and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," in\nanother. Let\u2019s open a new terminal and run the following commands to\nstart ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./zookeeper-server-start.sh ../data_dir/zookeeper.properties\n")),(0,i.kt)("p",null,"Once ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," is up and running, open a new terminal and execute the\nfollwing commands to start ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-server-start.sh ../data_dir/kafka.properties\n")),(0,i.kt)("p",null,"Now we have both ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," up and running."),(0,i.kt)("h4",{id:"creating-topics-in-kafka"},"Creating topics in Kafka"),(0,i.kt)("p",null,"In a new terminal, please execute the following command to create\nnecessary topics in ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092\n./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092\n")),(0,i.kt)("h4",{id:"populating-topics-with-dummy-data"},"Populating topics with dummy data"),(0,i.kt)("p",null,"To benchmark our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp, we need some data in ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," topics."),(0,i.kt)("p",null,"In the same terminal, let\u2019s create some dummy data:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},'yes \'{"sepal_length": 0.7739560486, "sepal_width": 0.8636615789, "petal_length": 0.6122663046, "petal_width": 0.1338914722}\' | head -n 1000000 > /tmp/test_data\n')),(0,i.kt)("p",null,"This command will create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"test_data")," in the ",(0,i.kt)("inlineCode",{parentName:"p"},"tmp")," folder\nwith one million rows of text. This will act as dummy data to populate\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic."),(0,i.kt)("p",null,"Let\u2019s populate the created topic ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," with the dummy data which\nwe created above:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data\n")),(0,i.kt)("p",null,"Now our topic ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," has one million records/messages in it. If\nyou want more messages in topic, you can simply execute the above\ncommand again and again."),(0,i.kt)("h3",{id:"benchmarking-fastkafka"},"Benchmarking FastKafka"),(0,i.kt)("p",null,"Once ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," are ready, benchmarking\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp is as simple as running the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"fastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app\n")),(0,i.kt)("p",null,"This command will start the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp and begin consuming messages from ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),", which we spun up earlier.\nAdditionally, the same command will output all of the benchmark\nthroughputs based on the ",(0,i.kt)("inlineCode",{parentName:"p"},"interval")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"sliding_window_size")," values."),(0,i.kt)("p",null,"The output for the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh\nost:9092', 'max_poll_records': 100}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition\n=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)\n")),(0,i.kt)("p",null,"Based on the output, when using 1 worker, our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.7.1/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp achieved a ",(0,i.kt)("inlineCode",{parentName:"p"},"throughput")," of 93k messages per second and an\n",(0,i.kt)("inlineCode",{parentName:"p"},"average throughput")," of 93k messages per second."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c192c597.2da137e0.js b/assets/js/c192c597.2da137e0.js new file mode 100644 index 0000000..1e419f8 --- /dev/null +++ b/assets/js/c192c597.2da137e0.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1984],{3905:(e,t,a)=>{a.d(t,{Zo:()=>p,kt:()=>m});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function i(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function r(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?i(Object(a),!0).forEach((function(t){o(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):i(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,o=function(e,t){if(null==e)return{};var a,n,o={},i=Object.keys(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var s=n.createContext({}),u=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):r(r({},t),e)),a},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},h="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},c=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),h=u(a),c=o,m=h["".concat(s,".").concat(c)]||h[c]||d[c]||i;return a?n.createElement(m,r(r({ref:t},p),{},{components:a})):n.createElement(m,r({ref:t},p))}));function m(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=a.length,r=new Array(i);r[0]=c;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[h]="string"==typeof e?e:o,r[1]=l;for(var u=2;u<i;u++)r[u]=a[u];return n.createElement.apply(null,r)}return n.createElement.apply(null,a)}c.displayName="MDXCreateElement"},6649:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>r,default:()=>d,frontMatter:()=>i,metadata:()=>l,toc:()=>u});var n=a(7462),o=(a(7294),a(3905));const i={},r="Contributing to FastKafka",l={unversionedId:"CONTRIBUTING",id:"version-0.8.0/CONTRIBUTING",title:"Contributing to FastKafka",description:"First off, thanks for taking the time to contribute! \u2764\ufe0f",source:"@site/versioned_docs/version-0.8.0/CONTRIBUTING.md",sourceDirName:".",slug:"/CONTRIBUTING",permalink:"/docs/CONTRIBUTING",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"LICENSE",permalink:"/docs/LICENSE"},next:{title:"Release notes",permalink:"/docs/CHANGELOG"}},s={},u=[{value:"Table of Contents",id:"table-of-contents",level:2},{value:"I Have a Question",id:"i-have-a-question",level:2},{value:"I Want To Contribute",id:"i-want-to-contribute",level:2},{value:"Reporting Bugs",id:"reporting-bugs",level:3},{value:"Before Submitting a Bug Report",id:"before-submitting-a-bug-report",level:4},{value:"How Do I Submit a Good Bug Report?",id:"how-do-i-submit-a-good-bug-report",level:4},{value:"Suggesting Enhancements",id:"suggesting-enhancements",level:3},{value:"Before Submitting an Enhancement",id:"before-submitting-an-enhancement",level:4},{value:"How Do I Submit a Good Enhancement Suggestion?",id:"how-do-i-submit-a-good-enhancement-suggestion",level:4},{value:"Your First Code Contribution",id:"your-first-code-contribution",level:3},{value:"Development",id:"development",level:2},{value:"Prepare the dev environment",id:"prepare-the-dev-environment",level:3},{value:"Clone the FastKafka repository",id:"clone-the-fastkafka-repository",level:4},{value:"Optional: create a virtual python environment",id:"optional-create-a-virtual-python-environment",level:4},{value:"Install FastKafka",id:"install-fastkafka",level:4},{value:"Install JRE and Kafka toolkit",id:"install-jre-and-kafka-toolkit",level:4},{value:"Install npm",id:"install-npm",level:4},{value:"Install docusaurus",id:"install-docusaurus",level:4},{value:"Check if everything works",id:"check-if-everything-works",level:4},{value:"Way of working",id:"way-of-working",level:3},{value:"Before a PR",id:"before-a-pr",level:3},{value:"Attribution",id:"attribution",level:2}],p={toc:u},h="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(h,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"contributing-to-fastkafka"},"Contributing to FastKafka"),(0,o.kt)("p",null,"First off, thanks for taking the time to contribute! \u2764\ufe0f"),(0,o.kt)("p",null,"All types of contributions are encouraged and valued. See the ",(0,o.kt)("a",{parentName:"p",href:"#table-of-contents"},"Table of Contents")," for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. \ud83c\udf89"),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},"And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:"),(0,o.kt)("ul",{parentName:"blockquote"},(0,o.kt)("li",{parentName:"ul"},"Star the project"),(0,o.kt)("li",{parentName:"ul"},"Tweet about it"),(0,o.kt)("li",{parentName:"ul"},"Refer this project in your project's readme"),(0,o.kt)("li",{parentName:"ul"},"Mention the project at local meetups and tell your friends/colleagues"))),(0,o.kt)("h2",{id:"table-of-contents"},"Table of Contents"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#i-have-a-question"},"I Have a Question")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#i-want-to-contribute"},"I Want To Contribute"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#reporting-bugs"},"Reporting Bugs")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#suggesting-enhancements"},"Suggesting Enhancements")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#your-first-code-contribution"},"Your First Code Contribution")))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#development"},"Development"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#prepare-the-dev-environment"},"Prepare the dev environment")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#way-of-working"},"Way of working")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"#before-a-pr"},"Before a PR"))))),(0,o.kt)("h2",{id:"i-have-a-question"},"I Have a Question"),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},"If you want to ask a question, we assume that you have read the available ",(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/docs"},"Documentation"),".")),(0,o.kt)("p",null,"Before you ask a question, it is best to search for existing ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues"},"Issues")," that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue."),(0,o.kt)("p",null,"If you then still feel the need to ask a question and need clarification, we recommend the following:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Contact us on ",(0,o.kt)("a",{parentName:"li",href:"https://discord.com/invite/CJWmYpyFbc"},"Discord")),(0,o.kt)("li",{parentName:"ul"},"Open an ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/new"},"Issue"),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Provide as much context as you can about what you're running into")))),(0,o.kt)("p",null,"We will then take care of the issue as soon as possible."),(0,o.kt)("h2",{id:"i-want-to-contribute"},"I Want To Contribute"),(0,o.kt)("blockquote",null,(0,o.kt)("h3",{parentName:"blockquote",id:"legal-notice"},"Legal Notice"),(0,o.kt)("p",{parentName:"blockquote"},"When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.")),(0,o.kt)("h3",{id:"reporting-bugs"},"Reporting Bugs"),(0,o.kt)("h4",{id:"before-submitting-a-bug-report"},"Before Submitting a Bug Report"),(0,o.kt)("p",null,"A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Make sure that you are using the latest version."),(0,o.kt)("li",{parentName:"ul"},"Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the ",(0,o.kt)("a",{parentName:"li",href:"https://fastkafka.airt.ai/docs"},"documentation"),". If you are looking for support, you might want to check ",(0,o.kt)("a",{parentName:"li",href:"#i-have-a-question"},"this section"),")."),(0,o.kt)("li",{parentName:"ul"},"To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues?q=label%3Abug"},"bug tracker"),"."),(0,o.kt)("li",{parentName:"ul"},"Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue."),(0,o.kt)("li",{parentName:"ul"},"Collect information about the bug:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Stack trace (Traceback)"),(0,o.kt)("li",{parentName:"ul"},"OS, Platform and Version (Windows, Linux, macOS, x86, ARM)"),(0,o.kt)("li",{parentName:"ul"},"Python version"),(0,o.kt)("li",{parentName:"ul"},"Possibly your input and the output"),(0,o.kt)("li",{parentName:"ul"},"Can you reliably reproduce the issue? And can you also reproduce it with older versions?")))),(0,o.kt)("h4",{id:"how-do-i-submit-a-good-bug-report"},"How Do I Submit a Good Bug Report?"),(0,o.kt)("p",null,"We use GitHub issues to track bugs and errors. If you run into an issue with the project:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Open an ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/new"},"Issue"),". (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)"),(0,o.kt)("li",{parentName:"ul"},"Explain the behavior you would expect and the actual behavior."),(0,o.kt)("li",{parentName:"ul"},"Please provide as much context as possible and describe the ",(0,o.kt)("em",{parentName:"li"},"reproduction steps")," that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case."),(0,o.kt)("li",{parentName:"ul"},"Provide the information you collected in the previous section.")),(0,o.kt)("p",null,"Once it's filed:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"The project team will label the issue accordingly."),(0,o.kt)("li",{parentName:"ul"},"A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-repro"),". Bugs with the ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-repro")," tag will not be addressed until they are reproduced."),(0,o.kt)("li",{parentName:"ul"},"If the team is able to reproduce the issue, it will be marked ",(0,o.kt)("inlineCode",{parentName:"li"},"needs-fix"),", as well as possibly other tags (such as ",(0,o.kt)("inlineCode",{parentName:"li"},"critical"),"), and the issue will be left to be implemented.")),(0,o.kt)("h3",{id:"suggesting-enhancements"},"Suggesting Enhancements"),(0,o.kt)("p",null,"This section guides you through submitting an enhancement suggestion for FastKafka, ",(0,o.kt)("strong",{parentName:"p"},"including completely new features and minor improvements to existing functionality"),". Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions."),(0,o.kt)("h4",{id:"before-submitting-an-enhancement"},"Before Submitting an Enhancement"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Make sure that you are using the latest version."),(0,o.kt)("li",{parentName:"ul"},"Read the ",(0,o.kt)("a",{parentName:"li",href:"https://fastkafka.airt.ai/docs"},"documentation")," carefully and find out if the functionality is already covered, maybe by an individual configuration."),(0,o.kt)("li",{parentName:"ul"},"Perform a ",(0,o.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues"},"search")," to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one."),(0,o.kt)("li",{parentName:"ul"},"Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library."),(0,o.kt)("li",{parentName:"ul"},"If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on ",(0,o.kt)("a",{parentName:"li",href:"https://discord.com/invite/CJWmYpyFbc"},"Discord"))),(0,o.kt)("h4",{id:"how-do-i-submit-a-good-enhancement-suggestion"},"How Do I Submit a Good Enhancement Suggestion?"),(0,o.kt)("p",null,"Enhancement suggestions are tracked as ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues"},"GitHub issues"),"."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Use a ",(0,o.kt)("strong",{parentName:"li"},"clear and descriptive title")," for the issue to identify the suggestion."),(0,o.kt)("li",{parentName:"ul"},"Provide a ",(0,o.kt)("strong",{parentName:"li"},"step-by-step description of the suggested enhancement")," in as many details as possible."),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("strong",{parentName:"li"},"Describe the current behavior")," and ",(0,o.kt)("strong",{parentName:"li"},"explain which behavior you expected to see instead")," and why. At this point you can also tell which alternatives do not work for you."),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("strong",{parentName:"li"},"Explain why this enhancement would be useful")," to most FastKafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.")),(0,o.kt)("h3",{id:"your-first-code-contribution"},"Your First Code Contribution"),(0,o.kt)("p",null,'A great way to start contributing to FastKafka would be by solving an issue tagged with "good first issue". To find a list of issues that are tagged as "good first issue" and are suitable for newcomers, please visit the following link: ',(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/labels/good%20first%20issue"},"Good first issues")),(0,o.kt)("p",null,"These issues are beginner-friendly and provide a great opportunity to get started with contributing to FastKafka. Choose an issue that interests you, follow the contribution process mentioned in ",(0,o.kt)("a",{parentName:"p",href:"#way-of-working"},"Way of working")," and ",(0,o.kt)("a",{parentName:"p",href:"#before-a-pr"},"Before a PR"),", and help us make FastKafka even better!"),(0,o.kt)("p",null,"If you have any questions or need further assistance, feel free to reach out to us. Happy coding!"),(0,o.kt)("h2",{id:"development"},"Development"),(0,o.kt)("h3",{id:"prepare-the-dev-environment"},"Prepare the dev environment"),(0,o.kt)("p",null,"To start contributing to FastKafka, you first have to prepare the development environment."),(0,o.kt)("h4",{id:"clone-the-fastkafka-repository"},"Clone the FastKafka repository"),(0,o.kt)("p",null,"To clone the repository, run the following command in the CLI:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"git clone https://github.com/airtai/fastkafka.git\n")),(0,o.kt)("h4",{id:"optional-create-a-virtual-python-environment"},"Optional: create a virtual python environment"),(0,o.kt)("p",null,"To prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your FastKafka project by running:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"python3 -m venv fastkafka-env\n")),(0,o.kt)("p",null,"And to activate your virtual environment run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"source fastkafka-env/bin/activate\n")),(0,o.kt)("p",null,"To learn more about virtual environments, please have a look at ",(0,o.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/venv.html#:~:text=A%20virtual%20environment%20is%20created,the%20virtual%20environment%20are%20available."},"official python documentation")),(0,o.kt)("h4",{id:"install-fastkafka"},"Install FastKafka"),(0,o.kt)("p",null,"To install FastKafka, navigate to the root directory of the cloned FastKafka project and run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'pip install fastkafka -e [."dev"]\n')),(0,o.kt)("h4",{id:"install-jre-and-kafka-toolkit"},"Install JRE and Kafka toolkit"),(0,o.kt)("p",null,"To be able to run tests and use all the functionalities of FastKafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Use our ",(0,o.kt)("inlineCode",{parentName:"li"},"fastkafka testing install-deps")," CLI command which will install JRE and Kafka toolkit for you in your .local folder\nOR"),(0,o.kt)("li",{parentName:"ol"},"Install JRE and Kafka manually.\nTo do this, please refer to ",(0,o.kt)("a",{parentName:"li",href:"https://docs.oracle.com/javase/9/install/toc.htm"},"JDK and JRE installation guide")," and ",(0,o.kt)("a",{parentName:"li",href:"https://kafka.apache.org/quickstart"},"Apache Kafka quickstart"))),(0,o.kt)("h4",{id:"install-npm"},"Install npm"),(0,o.kt)("p",null,"To be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Use our ",(0,o.kt)("inlineCode",{parentName:"li"},"fastkafka docs install_deps")," CLI command which will install npm for you in your .local folder\nOR"),(0,o.kt)("li",{parentName:"ol"},"Install npm manually.\nTo do this, please refer to ",(0,o.kt)("a",{parentName:"li",href:"https://docs.npmjs.com/downloading-and-installing-node-js-and-npm"},"NPM installation guide"))),(0,o.kt)("h4",{id:"install-docusaurus"},"Install docusaurus"),(0,o.kt)("p",null,"To generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of FastKafka project."),(0,o.kt)("h4",{id:"check-if-everything-works"},"Check if everything works"),(0,o.kt)("p",null,"After installing FastKafka and all the necessary dependencies, run ",(0,o.kt)("inlineCode",{parentName:"p"},"nbdev_test"),' in the root of FastKafka project. This will take a couple of minutes as it will run all the tests on FastKafka project. If everythng is setup correctly, you will get a "Success." message in your terminal, otherwise please refer to previous steps.'),(0,o.kt)("h3",{id:"way-of-working"},"Way of working"),(0,o.kt)("p",null,"The development of FastKafka is done in Jupyter notebooks. Inside the ",(0,o.kt)("inlineCode",{parentName:"p"},"nbs")," directory you will find all the source code of FastKafka, this is where you will implement your changes."),(0,o.kt)("p",null,"The testing, cleanup and exporting of the code is being handled by ",(0,o.kt)("inlineCode",{parentName:"p"},"nbdev"),", please, before starting the work on FastKafka, get familiar with it by reading ",(0,o.kt)("a",{parentName:"p",href:"https://nbdev.fast.ai/getting_started.html"},"nbdev documentation"),"."),(0,o.kt)("p",null,"The general philosopy you should follow when writing code for FastKafka is:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Function should be an atomic functionality, short and concise",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Good rule of thumb: your function should be 5-10 lines long usually"))),(0,o.kt)("li",{parentName:"ul"},"If there are more than 2 params, enforce keywording using *",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"E.g.: ",(0,o.kt)("inlineCode",{parentName:"li"},"def function(param1, *, param2, param3): ...")))),(0,o.kt)("li",{parentName:"ul"},"Define typing of arguments and return value",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected"))),(0,o.kt)("li",{parentName:"ul"},"After the function cell, write test cells using the assert keyword",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Whenever you implement something you should test that functionality immediately in the cells below "))),(0,o.kt)("li",{parentName:"ul"},"Add Google style python docstrings when function is implemented and tested")),(0,o.kt)("h3",{id:"before-a-pr"},"Before a PR"),(0,o.kt)("p",null,"After you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as smooth for you and us, please do the following before opening the request (all the commands are to be run in the root of FastKafka project):"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Format your notebooks: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbqa black nbs")),(0,o.kt)("li",{parentName:"ol"},"Close, shutdown, and clean the metadata from your notebooks: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_clean")),(0,o.kt)("li",{parentName:"ol"},"Export your code: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_export")),(0,o.kt)("li",{parentName:"ol"},"Run the tests: ",(0,o.kt)("inlineCode",{parentName:"li"},"nbdev_test")),(0,o.kt)("li",{parentName:"ol"},"Test code typing: ",(0,o.kt)("inlineCode",{parentName:"li"},"mypy fastkafka")),(0,o.kt)("li",{parentName:"ol"},"Test code safety with bandit: ",(0,o.kt)("inlineCode",{parentName:"li"},"bandit -r fastkafka")),(0,o.kt)("li",{parentName:"ol"},"Test code safety with semgrep: ",(0,o.kt)("inlineCode",{parentName:"li"},"semgrep --config auto -r fastkafka"))),(0,o.kt)("p",null,"When you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge."),(0,o.kt)("h2",{id:"attribution"},"Attribution"),(0,o.kt)("p",null,"This guide is based on the ",(0,o.kt)("strong",{parentName:"p"},"contributing-gen"),". ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/bttger/contributing-gen"},"Make your own"),"!"))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c248ee7e.56274b20.js b/assets/js/c248ee7e.56274b20.js new file mode 100644 index 0000000..01aaae9 --- /dev/null +++ b/assets/js/c248ee7e.56274b20.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8927],{3905:(t,e,a)=>{a.d(e,{Zo:()=>p,kt:()=>c});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function i(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function l(t){for(var e=1;e<arguments.length;e++){var a=null!=arguments[e]?arguments[e]:{};e%2?i(Object(a),!0).forEach((function(e){r(t,e,a[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(a)):i(Object(a)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(a,e))}))}return t}function o(t,e){if(null==t)return{};var a,n,r=function(t,e){if(null==t)return{};var a,n,r={},i=Object.keys(t);for(n=0;n<i.length;n++)a=i[n],e.indexOf(a)>=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(t);for(n=0;n<i.length;n++)a=i[n],e.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var d=n.createContext({}),s=function(t){var e=n.useContext(d),a=e;return t&&(a="function"==typeof t?t(e):l(l({},e),t)),a},p=function(t){var e=s(t.components);return n.createElement(d.Provider,{value:e},t.children)},m="mdxType",k={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},u=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,i=t.originalType,d=t.parentName,p=o(t,["components","mdxType","originalType","parentName"]),m=s(a),u=r,c=m["".concat(d,".").concat(u)]||m[u]||k[u]||i;return a?n.createElement(c,l(l({ref:e},p),{},{components:a})):n.createElement(c,l({ref:e},p))}));function c(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var i=a.length,l=new Array(i);l[0]=u;var o={};for(var d in e)hasOwnProperty.call(e,d)&&(o[d]=e[d]);o.originalType=t,o[m]="string"==typeof t?t:r,l[1]=o;for(var s=2;s<i;s++)l[s]=a[s];return n.createElement.apply(null,l)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},7872:(t,e,a)=>{a.r(e),a.d(e,{assets:()=>d,contentTitle:()=>l,default:()=>k,frontMatter:()=>i,metadata:()=>o,toc:()=>s});var n=a(7462),r=(a(7294),a(3905));const i={},l=void 0,o={unversionedId:"api/fastkafka/testing/Tester",id:"version-0.8.0/api/fastkafka/testing/Tester",title:"Tester",description:"init {fastkafka._application.tester.Tester.init}",source:"@site/versioned_docs/version-0.8.0/api/fastkafka/testing/Tester.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/Tester",permalink:"/docs/api/fastkafka/testing/Tester",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"LocalRedpandaBroker",permalink:"/docs/api/fastkafka/testing/LocalRedpandaBroker"},next:{title:"fastkafka",permalink:"/docs/cli/fastkafka"}},d={},s=[{value:"<strong>init</strong>",id:"fastkafka._application.tester.Tester.init",level:3},{value:"benchmark",id:"fastkafka._application.app.FastKafka.benchmark",level:3},{value:"consumes",id:"fastkafka._application.app.FastKafka.consumes",level:3},{value:"create_docs",id:"fastkafka._application.app.FastKafka.create_docs",level:3},{value:"create_mocks",id:"fastkafka._application.app.FastKafka.create_mocks",level:3},{value:"fastapi_lifespan",id:"fastkafka._application.app.FastKafka.fastapi_lifespan",level:3},{value:"get_topics",id:"fastkafka._application.app.FastKafka.get_topics",level:3},{value:"is_started",id:"fastkafka._application.app.FastKafka.is_started",level:3},{value:"produces",id:"fastkafka._application.app.FastKafka.produces",level:3},{value:"run_in_background",id:"fastkafka._application.app.FastKafka.run_in_background",level:3},{value:"set_kafka_broker",id:"fastkafka._application.app.FastKafka.set_kafka_broker",level:3}],p={toc:s},m="wrapper";function k(t){let{components:e,...a}=t;return(0,r.kt)(m,(0,n.Z)({},p,a,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h3",{id:"fastkafka._application.tester.Tester.init"},(0,r.kt)("strong",{parentName:"h3"},"init")),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/tester.py#L51-L77",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n self, app, use_in_memory_broker=True\n)\n")),(0,r.kt)("p",null,"Mirror-like object for testing a FastKafka application"),(0,r.kt)("p",null,"Can be used as context manager"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"app")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[fastkafka.FastKafka, List[fastkafka.FastKafka]]")),(0,r.kt)("td",{parentName:"tr",align:null},"The FastKafka application to be tested."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"use_in_memory_broker")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bool")),(0,r.kt)("td",{parentName:"tr",align:null},"Whether to use an in-memory broker for testing or not."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.benchmark"},"benchmark"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L1108-L1159",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"benchmark(\n self, interval=1, sliding_window_size=None\n)\n")),(0,r.kt)("p",null,"Decorator to benchmark produces/consumes functions"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"interval")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[int, datetime.timedelta]")),(0,r.kt)("td",{parentName:"tr",align:null},"Period to use to calculate throughput. If value is of type int,then it will be used as seconds. If value is of type timedelta,then it will be used as it is. default: 1 - one second"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sliding_window_size")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[int]")),(0,r.kt)("td",{parentName:"tr",align:null},"The size of the sliding window to use to calculateaverage throughput. default: None - By default average throughput isnot calculated"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.consumes"},"consumes"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L474-L557",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"consumes(\n self,\n topic=None,\n decoder='json',\n executor=None,\n brokers=None,\n prefix='on_',\n description=None,\n loop=None,\n bootstrap_servers='localhost',\n client_id='aiokafka-0.8.1',\n group_id=None,\n key_deserializer=None,\n value_deserializer=None,\n fetch_max_wait_ms=500,\n fetch_max_bytes=52428800,\n fetch_min_bytes=1,\n max_partition_fetch_bytes=1048576,\n request_timeout_ms=40000,\n retry_backoff_ms=100,\n auto_offset_reset='latest',\n enable_auto_commit=True,\n auto_commit_interval_ms=5000,\n check_crcs=True,\n metadata_max_age_ms=300000,\n partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),\n max_poll_interval_ms=300000,\n rebalance_timeout_ms=None,\n session_timeout_ms=10000,\n heartbeat_interval_ms=3000,\n consumer_timeout_ms=200,\n max_poll_records=None,\n ssl_context=None,\n security_protocol='PLAINTEXT',\n api_version='auto',\n exclude_internal_topics=True,\n connections_max_idle_ms=540000,\n isolation_level='read_uncommitted',\n sasl_mechanism='PLAIN',\n sasl_plain_password=None,\n sasl_plain_username=None,\n sasl_kerberos_service_name='kafka',\n sasl_kerberos_domain_name=None,\n sasl_oauth_token_provider=None,\n)\n")),(0,r.kt)("p",null,"Decorator registering the callback called when a message is received in a topic."),(0,r.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"topic")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Kafka topic that the consumer will subscribe to and execute thedecorated function when it receives a message from the topic,default: None. If the topic is not specified, topic name will beinferred from the decorated function name by stripping the defined prefix"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"decoder")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[str, Callable[[bytes, Type[pydantic.main.BaseModel]], Any]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Decoder to use to decode messages consumed from the topic,default: json - By default, it uses json decoder to decodebytes to json string and then it creates instance of pydanticBaseModel. It also accepts custom decoder function."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'json'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"executor")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[str, fastkafka._components.task_streaming.StreamExecutor, NoneType]")),(0,r.kt)("td",{parentName:"tr",align:null},'Type of executor to choose for consuming tasks. Avaliable optionsare "SequentialExecutor" and "DynamicTaskExecutor". The default option is"SequentialExecutor" which will execute the consuming tasks sequentially.If the consuming tasks have high latency it is recommended to use"DynamicTaskExecutor" which will wrap the consuming functions into tasksand run them in on asyncio loop in background. This comes with a cost ofincreased overhead so use it only in cases when your consume functions havehigh latency such as database queries or some other type of networking.'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"prefix")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},'Prefix stripped from the decorated function to define a topic nameif the topic argument is not passed, default: "on_". If the decoratedfunction name is not prefixed with the defined prefix and topic argumentis not passed, then this method will throw ValueError'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'on_'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"brokers")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"description")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional description of the consuming function async docs.If not provided, consuming function ",(0,r.kt)("strong",{parentName:"td"},"doc")," attr will be used."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bootstrap_servers")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a ",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," string (or list of",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," strings) that the consumer should contact to bootstrapinitial cluster metadata.This does not have to be the full node list.It just needs to have at least one broker that will respond to aMetadata API Request. Default port is 9092. If no servers arespecified, will default to ",(0,r.kt)("inlineCode",{parentName:"td"},"localhost:9092"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'localhost'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"client_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client. Alsosubmitted to :class:",(0,r.kt)("inlineCode",{parentName:"td"},"~.consumer.group_coordinator.GroupCoordinator"),"for logging with respect to consumer group administration. Default:",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka-{version}")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'aiokafka-0.8.1'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"group_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"name of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key_deserializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Any callable that takes araw message key and returns a deserialized key."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"value_deserializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Any callable that takes araw message value and returns a deserialized value."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_min_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Minimum amount of data the server shouldreturn for a fetch request, otherwise wait up to",(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"52428800"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"fetch_max_wait_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"500"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_partition_fetch_bytes")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum amount of dataper-partition the server will return. The maximum total memoryused for a request ",(0,r.kt)("inlineCode",{parentName:"td"},"= #partitions * max_partition_fetch_bytes"),".This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1048576"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_poll_records")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum number of records returned in asingle call to :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany"),". Defaults ",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", no limit."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"request_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Client request timeout in milliseconds.Default: 40000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"40000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"retry_backoff_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Milliseconds to backoff when retrying onerrors. Default: 100."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"auto_offset_reset")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"A policy for resetting offsets on:exc:",(0,r.kt)("inlineCode",{parentName:"td"},".OffsetOutOfRangeError")," errors: ",(0,r.kt)("inlineCode",{parentName:"td"},"earliest")," will move to the oldestavailable message, ",(0,r.kt)("inlineCode",{parentName:"td"},"latest")," will move to the most recent, and",(0,r.kt)("inlineCode",{parentName:"td"},"none")," will raise an exception so you can handle this case.Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"latest"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'latest'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"enable_auto_commit")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"If true the consumer's offset will beperiodically committed in the background. Default: True."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"auto_commit_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"milliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"5000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"check_crcs")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Automatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: True"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"metadata_max_age_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"partition_assignment_strategy")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"List of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: ","[:class:",(0,r.kt)("inlineCode",{parentName:"td"},".RoundRobinPartitionAssignor"),"]"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_poll_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum allowed time between calls toconsume messages (e.g., :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany"),"). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See ",(0,r.kt)("inlineCode",{parentName:"td"},"KIP-62"),"_ for moreinformation. Default 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"rebalance_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to ",(0,r.kt)("inlineCode",{parentName:"td"},"max.poll.interval.ms")," configuration,but as ",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka")," will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:",(0,r.kt)("inlineCode",{parentName:"td"},".ConsumerRebalanceListener")," to delay rebalacing. Defaultsto ",(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Client group session and failure detectiontimeout. The consumer sends periodic heartbeats(",(0,r.kt)("inlineCode",{parentName:"td"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe ",(0,r.kt)("strong",{parentName:"td"},"broker")," configuration properties",(0,r.kt)("inlineCode",{parentName:"td"},"group.min.session.timeout.ms")," and ",(0,r.kt)("inlineCode",{parentName:"td"},"group.max.session.timeout.ms"),".Default: 10000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"10000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"heartbeat_interval_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than ",(0,r.kt)("inlineCode",{parentName:"td"},"session_timeout_ms"),", but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 3000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"3000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"consumer_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"maximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"200"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"api_version")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"specify which kafka API version to use.:class:",(0,r.kt)("inlineCode",{parentName:"td"},"AIOKafkaConsumer")," supports Kafka API versions >=0.9 only.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"auto"),", will attempt to infer the broker version byprobing various APIs. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"auto")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'auto'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"security_protocol")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Protocol used to communicate with brokers.Valid values are: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SSL"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT"),",",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAINTEXT'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ssl_context")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"pre-configured :class:",(0,r.kt)("inlineCode",{parentName:"td"},"~ssl.SSLContext"),"for wrapping socket connections. Directly passed into asyncio's:meth:",(0,r.kt)("inlineCode",{parentName:"td"},"~asyncio.loop.create_connection"),". For more information see:ref:",(0,r.kt)("inlineCode",{parentName:"td"},"ssl_auth"),". Default: None."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"exclude_internal_topics")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Whether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: True"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"True"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"connections_max_idle_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Close idle connections after the numberof milliseconds specified by this config. Specifying ",(0,r.kt)("inlineCode",{parentName:"td"},"None")," willdisable idle checks. Default: 540000 (9 minutes)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"540000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"isolation_level")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Controls how to read messages writtentransactionally.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed"),", :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," will only returntransactional messages which have been committed.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"read_uncommitted")," (the default), :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, in",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," mode, :meth:",(0,r.kt)("inlineCode",{parentName:"td"},".getmany")," will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in ",(0,r.kt)("inlineCode",{parentName:"td"},"read_committed")," the seek_to_end method willreturn the LSO. See method docs below. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"read_uncommitted")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'read_uncommitted'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_mechanism")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Authentication mechanism when security_protocolis configured for ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT")," or ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Valid values are:",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"GSSAPI"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-256"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-512"),",",(0,r.kt)("inlineCode",{parentName:"td"},"OAUTHBEARER"),".Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAIN'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_username")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"username for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_password")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"password for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_oauth_token_provider")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"OAuthBearer token provider instance. (See :mod:",(0,r.kt)("inlineCode",{parentName:"td"},"kafka.oauth.abstract"),").Default: None"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]], Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]]")),(0,r.kt)("td",{parentName:"tr",align:null},": A function returning the same function")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.create_docs"},"create_docs"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L938-L964",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"create_docs(\n self\n)\n")),(0,r.kt)("p",null,"Create the asyncapi documentation based on the configured consumers and producers."),(0,r.kt)("p",null,"This function exports the asyncapi specification based on the configured consumers\nand producers in the FastKafka instance. It generates the asyncapi documentation by\nextracting the topics and callbacks from the consumers and producers."),(0,r.kt)("p",null,"Note:\nThe asyncapi documentation is saved to the location specified by the ",(0,r.kt)("inlineCode",{parentName:"p"},"_asyncapi_path"),"\nattribute of the FastKafka instance."),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.create_mocks"},"create_mocks"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L1026-L1104",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"create_mocks(\n self\n)\n")),(0,r.kt)("p",null,"Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock"),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.fastapi_lifespan"},"fastapi_lifespan"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L1163-L1182",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"fastapi_lifespan(\n self, kafka_broker_name\n)\n")),(0,r.kt)("p",null,"Method for managing the lifespan of a FastAPI application with a specific Kafka broker."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"kafka_broker_name")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"The name of the Kafka broker to start FastKafka"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[ForwardRef('FastAPI')], AsyncIterator[None]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Lifespan function to use for initializing FastAPI")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.get_topics"},"get_topics"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L663-L672",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"get_topics(\n self\n)\n")),(0,r.kt)("p",null,"Get all topics for both producing and consuming."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Iterable[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"A set of topics for both producing and consuming.")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.is_started"},"is_started"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L308-L319",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"@property\nis_started(\n self\n)\n")),(0,r.kt)("p",null,"Property indicating whether the FastKafka object is started."),(0,r.kt)("p",null,"The is_started property indicates if the FastKafka object is currently\nin a started state. This implies that all background tasks, producers,\nand consumers have been initiated, and the object is successfully connected\nto the Kafka broker."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bool")),(0,r.kt)("td",{parentName:"tr",align:null},"True if the object is started, False otherwise.")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.produces"},"produces"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L582-L659",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"produces(\n self,\n topic=None,\n encoder='json',\n prefix='to_',\n brokers=None,\n description=None,\n loop=None,\n bootstrap_servers='localhost',\n client_id=None,\n metadata_max_age_ms=300000,\n request_timeout_ms=40000,\n api_version='auto',\n acks=<object object at 0x7ff10d5f9100>,\n key_serializer=None,\n value_serializer=None,\n compression_type=None,\n max_batch_size=16384,\n partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>,\n max_request_size=1048576,\n linger_ms=0,\n send_backoff_ms=100,\n retry_backoff_ms=100,\n security_protocol='PLAINTEXT',\n ssl_context=None,\n connections_max_idle_ms=540000,\n enable_idempotence=False,\n transactional_id=None,\n transaction_timeout_ms=60000,\n sasl_mechanism='PLAIN',\n sasl_plain_password=None,\n sasl_plain_username=None,\n sasl_kerberos_service_name='kafka',\n sasl_kerberos_domain_name=None,\n sasl_oauth_token_provider=None,\n)\n")),(0,r.kt)("p",null,"Decorator registering the callback called when delivery report for a produced message is received"),(0,r.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"topic")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Kafka topic that the producer will send returned values fromthe decorated function to, default: None- If the topic is notspecified, topic name will be inferred from the decorated functionname by stripping the defined prefix."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"encoder")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[str, Callable[[pydantic.main.BaseModel], bytes]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Encoder to use to encode messages before sending it to topic,default: json - By default, it uses json encoder to convertpydantic basemodel to json string and then encodes the string to bytesusing 'utf-8' encoding. It also accepts custom encoder function."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'json'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"prefix")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},'Prefix stripped from the decorated function to define a topicname if the topic argument is not passed, default: "to_". If thedecorated function name is not prefixed with the defined prefixand topic argument is not passed, then this method will throw ValueError'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'to_'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"brokers")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Union[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"description")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"Optional description of the producing function async docs.If not provided, producing function ",(0,r.kt)("strong",{parentName:"td"},"doc")," attr will be used."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bootstrap_servers")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a ",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," string or list of",(0,r.kt)("inlineCode",{parentName:"td"},"host[:port]")," strings that the producer should contact tobootstrap initial cluster metadata. This does not have to be thefull node list. It just needs to have at least one broker that willrespond to a Metadata API Request. Default port is 9092. If noservers are specified, will default to ",(0,r.kt)("inlineCode",{parentName:"td"},"localhost:9092"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'localhost'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"client_id")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"a name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"aiokafka-producer-#")," (appended with a unique numberper instance)"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key_serializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"used to convert user-supplied keys to bytesIf not :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", called as ",(0,r.kt)("inlineCode",{parentName:"td"},"f(key),")," should return:class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"value_serializer")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"used to convert user-supplied messagevalues to :class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),". If not :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", called as",(0,r.kt)("inlineCode",{parentName:"td"},"f(value)"),", should return :class:",(0,r.kt)("inlineCode",{parentName:"td"},"bytes"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"acks")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"one of ",(0,r.kt)("inlineCode",{parentName:"td"},"0"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"1"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),". The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common:",(0,r.kt)("em",{parentName:"td"}," ",(0,r.kt)("inlineCode",{parentName:"em"},"0"),": Producer will not wait for any acknowledgment from the server at all. The message will immediately be added to the socket buffer and considered sent. No guarantee can be made that the server has received the record in this case, and the retries configuration will not take effect (as the client won't generally know of any failures). The offset given back for each record will always be set to -1.")," ",(0,r.kt)("inlineCode",{parentName:"td"},"1"),": The broker leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after acknowledging the record but before the followers have replicated it then the record will be lost.* ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),": The broker leader will wait for the full set of in-sync replicas to acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica remains alive. This is the strongest available guarantee.If unset, defaults to ",(0,r.kt)("inlineCode",{parentName:"td"},"acks=1"),". If ",(0,r.kt)("inlineCode",{parentName:"td"},"enable_idempotence")," is:data:",(0,r.kt)("inlineCode",{parentName:"td"},"True")," defaults to ",(0,r.kt)("inlineCode",{parentName:"td"},"acks=all")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"<object object at 0x7ff10d5f9100>"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"compression_type")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The compression type for all data generated bythe producer. Valid values are ",(0,r.kt)("inlineCode",{parentName:"td"},"gzip"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"snappy"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"lz4"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"zstd"),"or :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),".Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_batch_size")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum size of buffered data per partition.After this amount :meth:",(0,r.kt)("inlineCode",{parentName:"td"},"send")," coroutine will block until batch isdrained.Default: 16384"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"16384"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than ",(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms"),", producer will wait ",(0,r.kt)("inlineCode",{parentName:"td"},"linger_ms - process_time"),".Default: 0 (i.e. no delay)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"0"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"partitioner")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Callable used to determine which partitioneach message is assigned to. Called (after key serialization):",(0,r.kt)("inlineCode",{parentName:"td"},"partitioner(key_bytes, all_partitions, available_partitions)"),".The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None"),", the message is delivered to a random partition(filtered to partitions with available leaders only, if possible)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_request_size")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1048576"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"metadata_max_age_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"The period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"300000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"request_timeout_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Produce request timeout in milliseconds.As it's sent as part of:class:",(0,r.kt)("inlineCode",{parentName:"td"},"~kafka.protocol.produce.ProduceRequest")," (it's a blockingcall), maximum waiting time can be up to ",(0,r.kt)("inlineCode",{parentName:"td"},"2 *request_timeout_ms"),".Default: 40000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"40000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"retry_backoff_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Milliseconds to backoff when retrying onerrors. Default: 100."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"api_version")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"specify which kafka API version to use.If set to ",(0,r.kt)("inlineCode",{parentName:"td"},"auto"),", will attempt to infer the broker version byprobing various APIs. Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"auto")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'auto'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"security_protocol")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Protocol used to communicate with brokers.Valid values are: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SSL"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT"),",",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAINTEXT"),"."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAINTEXT'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ssl_context")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"pre-configured :class:",(0,r.kt)("inlineCode",{parentName:"td"},"~ssl.SSLContext"),"for wrapping socket connections. Directly passed into asyncio's:meth:",(0,r.kt)("inlineCode",{parentName:"td"},"~asyncio.loop.create_connection"),". For moreinformation see :ref:",(0,r.kt)("inlineCode",{parentName:"td"},"ssl_auth"),".Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"connections_max_idle_ms")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Close idle connections after the numberof milliseconds specified by this config. Specifying :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")," willdisable idle checks. Default: 540000 (9 minutes)."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"540000"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"enable_idempotence")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"When set to :data:",(0,r.kt)("inlineCode",{parentName:"td"},"True"),", the producer willensure that exactly one copy of each message is written in thestream. If :data:",(0,r.kt)("inlineCode",{parentName:"td"},"False"),", producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to ",(0,r.kt)("inlineCode",{parentName:"td"},"all"),". If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:",(0,r.kt)("inlineCode",{parentName:"td"},"ValueError")," will be thrown.New in version 0.5.0."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"False"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_mechanism")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Authentication mechanism when security_protocolis configured for ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_PLAINTEXT")," or ",(0,r.kt)("inlineCode",{parentName:"td"},"SASL_SSL"),". Valid valuesare: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"GSSAPI"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-256"),", ",(0,r.kt)("inlineCode",{parentName:"td"},"SCRAM-SHA-512"),",",(0,r.kt)("inlineCode",{parentName:"td"},"OAUTHBEARER"),".Default: ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'PLAIN'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_username")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"username for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"sasl_plain_password")),(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"password for SASL ",(0,r.kt)("inlineCode",{parentName:"td"},"PLAIN")," authentication.Default: :data:",(0,r.kt)("inlineCode",{parentName:"td"},"None")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]], Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]]")),(0,r.kt)("td",{parentName:"tr",align:null},": A function returning the same function")))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ValueError")),(0,r.kt)("td",{parentName:"tr",align:null},"when needed")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.run_in_background"},"run_in_background"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L676-L709",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"run_in_background(\n self\n)\n")),(0,r.kt)("p",null,"Decorator to schedule a task to be run in the background."),(0,r.kt)("p",null,"This decorator is used to schedule a task to be run in the background when the app's ",(0,r.kt)("inlineCode",{parentName:"p"},"_on_startup")," event is triggered."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[Callable[..., Coroutine[Any, Any, Any]]], Callable[..., Coroutine[Any, Any, Any]]]")),(0,r.kt)("td",{parentName:"tr",align:null},"A decorator function that takes a background task as an input and stores it to be run in the backround.")))),(0,r.kt)("h3",{id:"fastkafka._application.app.FastKafka.set_kafka_broker"},"set_kafka_broker"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_application/app.py#L321-L337",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"set_kafka_broker(\n self, kafka_broker_name\n)\n")),(0,r.kt)("p",null,"Sets the Kafka broker to start FastKafka with"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"kafka_broker_name")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"The name of the Kafka broker to start FastKafka"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"ValueError")),(0,r.kt)("td",{parentName:"tr",align:null},"If the provided kafka_broker_name is not found in dictionary of kafka_brokers")))))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c377a04b.e32687a6.js b/assets/js/c377a04b.e32687a6.js new file mode 100644 index 0000000..c39758a --- /dev/null +++ b/assets/js/c377a04b.e32687a6.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6971],{3905:(a,e,t)=>{t.d(e,{Zo:()=>c,kt:()=>f});var n=t(7294);function o(a,e,t){return e in a?Object.defineProperty(a,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):a[e]=t,a}function s(a,e){var t=Object.keys(a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(a);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(a,e).enumerable}))),t.push.apply(t,n)}return t}function r(a){for(var e=1;e<arguments.length;e++){var t=null!=arguments[e]?arguments[e]:{};e%2?s(Object(t),!0).forEach((function(e){o(a,e,t[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(a,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(e){Object.defineProperty(a,e,Object.getOwnPropertyDescriptor(t,e))}))}return a}function i(a,e){if(null==a)return{};var t,n,o=function(a,e){if(null==a)return{};var t,n,o={},s=Object.keys(a);for(n=0;n<s.length;n++)t=s[n],e.indexOf(t)>=0||(o[t]=a[t]);return o}(a,e);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(a);for(n=0;n<s.length;n++)t=s[n],e.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(a,t)&&(o[t]=a[t])}return o}var p=n.createContext({}),l=function(a){var e=n.useContext(p),t=e;return a&&(t="function"==typeof a?a(e):r(r({},e),a)),t},c=function(a){var e=l(a.components);return n.createElement(p.Provider,{value:e},a.children)},k="mdxType",u={inlineCode:"code",wrapper:function(a){var e=a.children;return n.createElement(n.Fragment,{},e)}},d=n.forwardRef((function(a,e){var t=a.components,o=a.mdxType,s=a.originalType,p=a.parentName,c=i(a,["components","mdxType","originalType","parentName"]),k=l(t),d=o,f=k["".concat(p,".").concat(d)]||k[d]||u[d]||s;return t?n.createElement(f,r(r({ref:e},c),{},{components:t})):n.createElement(f,r({ref:e},c))}));function f(a,e){var t=arguments,o=e&&e.mdxType;if("string"==typeof a||o){var s=t.length,r=new Array(s);r[0]=d;var i={};for(var p in e)hasOwnProperty.call(e,p)&&(i[p]=e[p]);i.originalType=a,i[k]="string"==typeof a?a:o,r[1]=i;for(var l=2;l<s;l++)r[l]=t[l];return n.createElement.apply(null,r)}return n.createElement.apply(null,t)}d.displayName="MDXCreateElement"},1269:(a,e,t)=>{t.r(e),t.d(e,{assets:()=>p,contentTitle:()=>r,default:()=>u,frontMatter:()=>s,metadata:()=>i,toc:()=>l});var n=t(7462),o=(t(7294),t(3905));const s={},r="FastKafka",i={unversionedId:"index",id:"index",title:"FastKafka",description:"Effortless Kafka integration for your web services",source:"@site/docs/index.md",sourceDirName:".",slug:"/",permalink:"/docs/next/",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",next:{title:"@consumes basics",permalink:"/docs/next/guides/Guide_11_Consumes_Basics"}},p={},l=[{value:"\u2b50\u2b50\u2b50 Stay in touch \u2b50\u2b50\u2b50",id:"-stay-in-touch-",level:4},{value:"\ud83d\udc1d\ud83d\udc1d\ud83d\udc1d We were busy lately \ud83d\udc1d\ud83d\udc1d\ud83d\udc1d",id:"-we-were-busy-lately-",level:4},{value:"Install",id:"install",level:2},{value:"Tutorial",id:"tutorial",level:2},{value:"Writing server code",id:"writing-server-code",level:2},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"Testing the service",id:"testing-the-service",level:2},{value:"Recap",id:"recap",level:3},{value:"Running the service",id:"running-the-service",level:2},{value:"Documentation",id:"documentation",level:2},{value:"License",id:"license",level:2}],c={toc:l},k="wrapper";function u(a){let{components:e,...t}=a;return(0,o.kt)(k,(0,n.Z)({},c,t,{components:e,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"fastkafka"},"FastKafka"),(0,o.kt)("b",null,"Effortless Kafka integration for your web services"),(0,o.kt)("hr",null),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/v/fastkafka.png",alt:"PyPI"})," ",(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/dm/fastkafka.png",alt:"PyPI -\nDownloads"})," ",(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/pyversions/fastkafka.png",alt:"PyPI - Python\nVersion"})),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/github/actions/workflow/status/airtai/fastkafka/test.yaml",alt:"GitHub Workflow\nStatus"}),"\n",(0,o.kt)("img",{parentName:"p",src:"https://github.com/airtai/fastkafka//actions/workflows/codeql.yml/badge.svg",alt:"CodeQL"}),"\n",(0,o.kt)("img",{parentName:"p",src:"https://github.com/airtai/fastkafka//actions/workflows/dependency-review.yml/badge.svg",alt:"Dependency\nReview"})),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/github/license/airtai/fastkafka.png",alt:"GitHub"})),(0,o.kt)("hr",null),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,o.kt)("hr",null),(0,o.kt)("h4",{id:"-stay-in-touch-"},"\u2b50\u2b50\u2b50 Stay in touch \u2b50\u2b50\u2b50"),(0,o.kt)("p",null,"Please show your support and stay in touch by:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"giving our ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/"},"GitHub repository")," a\nstar, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"joining our ",(0,o.kt)("a",{parentName:"p",href:"https://discord.gg/CJWmYpyFbc"},"Discord server"),"."))),(0,o.kt)("p",null,"Your support helps us to stay in touch with you and encourages us to\ncontinue developing and improving the library. Thank you for your\nsupport!"),(0,o.kt)("hr",null),(0,o.kt)("h4",{id:"-we-were-busy-lately-"},"\ud83d\udc1d\ud83d\udc1d\ud83d\udc1d We were busy lately \ud83d\udc1d\ud83d\udc1d\ud83d\udc1d"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://repobeats.axiom.co/api/embed/21f36049093d5eb8e5fdad18c3c5d8df5428ca30.svg",alt:"Activity",title:"Repobeats analytics image"})),(0,o.kt)("h2",{id:"install"},"Install"),(0,o.kt)("p",null,"FastKafka works on Windows, macOS, Linux, and most Unix-style operating\nsystems. You can install base version of FastKafka with ",(0,o.kt)("inlineCode",{parentName:"p"},"pip")," as usual:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka\n")),(0,o.kt)("p",null,"To install FastKafka with testing features please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[test]\n")),(0,o.kt)("p",null,"To install FastKafka with asyncapi docs please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[docs]\n")),(0,o.kt)("p",null,"To install FastKafka with all the features please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[test,docs]\n")),(0,o.kt)("h2",{id:"tutorial"},"Tutorial"),(0,o.kt)("p",null,"You can start an interactive tutorial in Google Colab by clicking the\nbutton below:"),(0,o.kt)("a",{href:"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/index.ipynb",target:"_blank"},(0,o.kt)("img",{src:"https://colab.research.google.com/assets/colab-badge.svg",alt:"Open in Colab"})),(0,o.kt)("h2",{id:"writing-server-code"},"Writing server code"),(0,o.kt)("p",null,"To demonstrate FastKafka simplicity of using ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"@consumes"),"\ndecorators, we will focus on a simple app."),(0,o.kt)("p",null,"The app will consume JSON messages containing positive floats from one\ntopic, log them, and then produce incremented values to another topic."),(0,o.kt)("h3",{id:"messages"},"Messages"),(0,o.kt)("p",null,"FastKafka uses ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,o.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,o.kt)("p",null,"This example defines one ",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," mesage class. This Class will model the\nconsumed and produced data in our app demo, it contains one\n",(0,o.kt)("inlineCode",{parentName:"p"},"NonNegativeFloat")," field ",(0,o.kt)("inlineCode",{parentName:"p"},"data"),' that will be logged and \u201cprocessed"\nbefore being produced to another topic.'),(0,o.kt)("p",null,"These message class will be used to parse and validate incoming data in\nKafka consumers and producers."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass Data(BaseModel):\n data: NonNegativeFloat = Field(\n ..., example=0.5, description="Float data example"\n )\n')),(0,o.kt)("h3",{id:"application"},"Application"),(0,o.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,o.kt)("p",null,"It starts by defining a dictionary called ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,o.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,o.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\nboth generating the documentation and later to run the actual server\nagainst one of the given kafka broker."),(0,o.kt)("p",null,"Next, an object of the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum set of arguments:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generation of documentation")),(0,o.kt)("p",null,"We will also import and create a logger so that we can log the incoming\ndata in our consuming function."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from logging import getLogger\nfrom fastkafka import FastKafka\n\nlogger = getLogger("Demo Kafka app")\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Demo Kafka app",\n kafka_brokers=kafka_brokers,\n)\n')),(0,o.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,o.kt)("p",null,"FastKafka provides convenient function decorators ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"decoding and encoding JSON messages"))),(0,o.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,o.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,o.kt)("p",null,"This following example shows how to use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," message class. Specifying the type of the\nsingle argument is instructing the Pydantic to use ",(0,o.kt)("inlineCode",{parentName:"p"},"Data.parse_raw()"),"\non the consumed message before passing it to the user defined function\n",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"to_output_data"),' function,\nwhich specifies that this function should produce a message to the\n\u201coutput_data" Kafka topic whenever it is called. The ',(0,o.kt)("inlineCode",{parentName:"p"},"to_output_data"),"\nfunction takes a single float argument ",(0,o.kt)("inlineCode",{parentName:"p"},"data"),". It it increments the\ndata returns it wrapped in a ",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," object. The framework will call\nthe ",(0,o.kt)("inlineCode",{parentName:"p"},'Data.json().encode("utf-8")')," function on the returned value and\nproduce it to the specified topic."))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: Data):\n logger.info(f"Got data: {msg.data}")\n await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic="output_data")\nasync def to_output_data(data: float) -> Data:\n processed_data = Data(data=data+1.0)\n return processed_data\n')),(0,o.kt)("h2",{id:"testing-the-service"},"Testing the service"),(0,o.kt)("p",null,"The service can be tested using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/testing/Tester#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstances which internally starts InMemory implementation of Kafka\nbroker."),(0,o.kt)("p",null,"The Tester will redirect your consumes and produces decorated functions\nto the InMemory Kafka broker so that you can quickly test your app\nwithout the need for a running Kafka broker and all its dependencies."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka.testing import Tester\n\nmsg = Data(\n data=0.1,\n)\n\n# Start Tester app and create InMemory Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n # Send Data message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with incremented data in output_data topic\n await tester.awaited_mocks.on_output_data.assert_awaited_with(\n Data(data=1.1), timeout=2\n )\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] Demo Kafka app: Got data: 0.1\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,o.kt)("h3",{id:"recap"},"Recap"),(0,o.kt)("p",null,"We have created a simple FastKafka application. The app will consume the\n",(0,o.kt)("inlineCode",{parentName:"p"},"Data")," from the ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic, log it and produce the incremented\ndata to ",(0,o.kt)("inlineCode",{parentName:"p"},"output_data")," topic."),(0,o.kt)("p",null,"To test the app we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Created the app")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Started our Tester class which mirrors the developed app topics for\ntesting purposes")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Sent Data message to ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Asserted and checked that the developed service has reacted to Data\nmessage"))),(0,o.kt)("h2",{id:"running-the-service"},"Running the service"),(0,o.kt)("p",null,"The service can be started using builtin faskafka run CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file ",(0,o.kt)("inlineCode",{parentName:"p"},'"application.py"')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass Data(BaseModel):\n data: NonNegativeFloat = Field(\n ..., example=0.5, description="Float data example"\n )\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Demo Kafka app",\n kafka_brokers=kafka_brokers,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: Data):\n logger.info(f"Got data: {msg.data}")\n await to_output_data(msg.data)\n\n\n@kafka_app.produces(topic="output_data")\nasync def to_output_data(data: float) -> Data:\n processed_data = Data(data=data+1.0)\n return processed_data\n')),(0,o.kt)("p",null,"To run the service, use the FastKafka CLI command and pass the module\n(in this case, the file where the app implementation is located) and the\napp simbol to the command."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka run --num-workers=1 --kafka-broker localhost application:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see the following output in your\ncommand line:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[1504]: 23-05-31 11:36:45.874 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1504]: 23-05-31 11:36:45.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1506]: 23-05-31 11:36:45.993 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[1506]: 23-05-31 11:36:45.994 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[1506]: 23-05-31 11:36:46.014 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[1506]: 23-05-31 11:36:46.015 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[1506]: 23-05-31 11:36:46.040 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[1506]: 23-05-31 11:36:46.042 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[1506]: 23-05-31 11:36:46.043 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[1506]: 23-05-31 11:36:46.043 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[1506]: 23-05-31 11:36:46.068 [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata\n[1506]: 23-05-31 11:36:46.070 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1504]: 23-05-31 11:36:46.131 [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[1504]: 23-05-31 11:36:46.132 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable to update metadata from [0]\n[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)\n[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable to update metadata from [0]\n[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\nStarting process cleanup, this may take a few seconds...\n23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1504...\n23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1506...\n[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-05-31 11:37:00.607 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1506 was already terminated.\n23-05-31 11:37:00.822 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1504 was already terminated.\n")),(0,o.kt)("h2",{id:"documentation"},"Documentation"),(0,o.kt)("p",null,"The kafka app comes with builtin documentation generation using\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/tools/generator"},"AsyncApi HTML generator"),"."),(0,o.kt)("p",null,"AsyncApi requires Node.js to be installed and we provide the following\nconvenience command line for it:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"23-05-31 11:38:24.128 [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n")),(0,o.kt)("p",null,"To generate the documentation programatically you just need to call the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs generate application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"23-05-31 11:38:25.113 [INFO] fastkafka._components.asyncapi: Old async specifications at '/content/asyncapi/spec/asyncapi.yml' does not exist.\n23-05-31 11:38:25.118 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'\n23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /content/asyncapi/docs.\n")),(0,o.kt)("p",null,"This will generate the ",(0,o.kt)("em",{parentName:"p"},"asyncapi")," folder in relative path where all your\ndocumentation will be saved. You can check out the content of it with:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"ls -l asyncapi\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"total 8\ndrwxr-xr-x 4 root root 4096 May 31 11:38 docs\ndrwxr-xr-x 2 root root 4096 May 31 11:38 spec\n")),(0,o.kt)("p",null,"In docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka docs serve"),"\nCLI command (more on that in our guides)."),(0,o.kt)("p",null,"In spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application."),(0,o.kt)("p",null,"We can locally preview the generated documentation by running the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs serve application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},'23-05-31 11:38:45.250 [INFO] fastkafka._components.asyncapi: New async specifications generated at: \'/content/asyncapi/spec/asyncapi.yml\'\n23-05-31 11:39:04.410 [INFO] fastkafka._components.asyncapi: Async docs generated at \'asyncapi/docs\'\n23-05-31 11:39:04.411 [INFO] fastkafka._components.asyncapi: Output of \'$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write\'\n\nDone! \u2728\nCheck out your shiny new generated files at /content/asyncapi/docs.\n\n\nServing documentation on http://127.0.0.1:8000\n127.0.0.1 - - [31/May/2023 11:39:14] "GET / HTTP/1.1" 200 -\n127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/global.min.css HTTP/1.1" 200 -\n127.0.0.1 - - [31/May/2023 11:39:14] "GET /js/asyncapi-ui.min.js HTTP/1.1" 200 -\n127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/asyncapi.min.css HTTP/1.1" 200 -\nInterupting serving of documentation and cleaning up...\n')),(0,o.kt)("p",null,"From the parameters passed to the application constructor, we get the\ndocumentation bellow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Demo Kafka app",\n kafka_brokers=kafka_brokers,\n)\n')),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png",alt:"Kafka_servers"})),(0,o.kt)("p",null,"The following documentation snippet are for the consumer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png",alt:"Kafka_consumer"})),(0,o.kt)("p",null,"The following documentation snippet are for the producer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png",alt:"Kafka_producer"})),(0,o.kt)("p",null,"Finally, all messages as defined as subclasses of ",(0,o.kt)("em",{parentName:"p"},"BaseModel")," are\ndocumented as well:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png",alt:"Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)"})),(0,o.kt)("h2",{id:"license"},"License"),(0,o.kt)("p",null,"FastKafka is licensed under the Apache License 2.0"),(0,o.kt)("p",null,"A permissive license whose main conditions require preservation of\ncopyright and license notices. Contributors provide an express grant of\npatent rights. Licensed works, modifications, and larger works may be\ndistributed under different terms and without source code."),(0,o.kt)("p",null,"The full text of the license can be found\n",(0,o.kt)("a",{parentName:"p",href:"https://raw.githubusercontent.com/airtai/fastkafka/main/LICENSE"},"here"),"."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c3d488fa.7534f536.js b/assets/js/c3d488fa.7534f536.js new file mode 100644 index 0000000..857ea5a --- /dev/null +++ b/assets/js/c3d488fa.7534f536.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3196],{3905:(e,t,a)=>{a.d(t,{Zo:()=>d,kt:()=>m});var n=a(7294);function i(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){i(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function s(e,t){if(null==e)return{};var a,n,i=function(e,t){if(null==e)return{};var a,n,i={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(i[a]=e[a]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},d=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,d=s(e,["components","mdxType","originalType","parentName"]),c=l(a),u=i,m=c["".concat(p,".").concat(u)]||c[u]||k[u]||r;return a?n.createElement(m,o(o({ref:t},d),{},{components:a})):n.createElement(m,o({ref:t},d))}));function m(e,t){var a=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=a.length,o=new Array(r);o[0]=u;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[c]="string"==typeof e?e:i,o[1]=s;for(var l=2;l<r;l++)o[l]=a[l];return n.createElement.apply(null,o)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},4617:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>k,frontMatter:()=>r,metadata:()=>s,toc:()=>l});var n=a(7462),i=(a(7294),a(3905));const r={},o="Using Redpanda to test FastKafka",s={unversionedId:"guides/Guide_31_Using_redpanda_to_test_fastkafka",id:"version-0.8.0/guides/Guide_31_Using_redpanda_to_test_fastkafka",title:"Using Redpanda to test FastKafka",description:"What is FastKafka?",source:"@site/versioned_docs/version-0.8.0/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",sourceDirName:"guides",slug:"/guides/Guide_31_Using_redpanda_to_test_fastkafka",permalink:"/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using multiple Kafka clusters",permalink:"/docs/guides/Guide_24_Using_Multiple_Kafka_Clusters"},next:{title:"Deploy FastKafka docs to GitHub Pages",permalink:"/docs/guides/Guide_04_Github_Actions_Workflow"}},p={},l=[{value:"What is FastKafka?",id:"what-is-fastkafka",level:2},{value:"What is Redpanda?",id:"what-is-redpanda",level:2},{value:"Example repo",id:"example-repo",level:2},{value:"The process",id:"the-process",level:2},{value:"1. Prerequisites",id:"1-prerequisites",level:2},{value:"2. Cloning and setting up the example repo",id:"2-cloning-and-setting-up-the-example-repo",level:2},{value:"Create a virtual environment",id:"create-a-virtual-environment",level:3},{value:"Install Python dependencies",id:"install-python-dependencies",level:3},{value:"3. Writing server code",id:"3-writing-server-code",level:2},{value:"Preparing the demo model",id:"preparing-the-demo-model",level:3},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"4. Writing the test code",id:"4-writing-the-test-code",level:2},{value:"5. Running the tests",id:"5-running-the-tests",level:2},{value:"Recap",id:"recap",level:3}],d={toc:l},c="wrapper";function k(e){let{components:t,...a}=e;return(0,i.kt)(c,(0,n.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"using-redpanda-to-test-fastkafka"},"Using Redpanda to test FastKafka"),(0,i.kt)("h2",{id:"what-is-fastkafka"},"What is FastKafka?"),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,i.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,i.kt)("h2",{id:"what-is-redpanda"},"What is Redpanda?"),(0,i.kt)("p",null,"Redpanda is a drop-in replacement for Kafka. Most of the Kafka tools\nwork out of the box with Redpanda."),(0,i.kt)("p",null,"From ",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/"},"redpanda.com"),":"),(0,i.kt)("blockquote",null,(0,i.kt)("p",{parentName:"blockquote"},"Redpanda is a Kafka\xae-compatible streaming data platform that is proven\nto be 10x faster and 6x lower in total costs. It is also JVM-free,\nZooKeeper\xae-free, Jepsen-tested and source available.")),(0,i.kt)("p",null,"Some of the advantages of Redpanda over Kafka are"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A single binary with built-in everything, no ZooKeeper\xae or JVM\nneeded."),(0,i.kt)("li",{parentName:"ol"},"Costs upto 6X less than Kafka."),(0,i.kt)("li",{parentName:"ol"},"Up to 10x lower average latencies and up to 6x faster Kafka\ntransactions without compromising correctness.")),(0,i.kt)("p",null,"To learn more about Redpanda, please visit their\n",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/"},"website")," or checkout this ",(0,i.kt)("a",{parentName:"p",href:"https://redpanda.com/blog/redpanda-vs-kafka-performance-benchmark"},"blog\npost"),"\ncomparing Redpanda and Kafka\u2019s performance benchmarks."),(0,i.kt)("h2",{id:"example-repo"},"Example repo"),(0,i.kt)("p",null,"A sample FastKafka-based library that uses Redpanda for testing, based\non this guide, can be found\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_with_redpanda"},"here"),"."),(0,i.kt)("h2",{id:"the-process"},"The process"),(0,i.kt)("p",null,"Here are the steps we\u2019ll be walking through to build our example:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Set up the prerequisites."),(0,i.kt)("li",{parentName:"ol"},"Clone the example repo."),(0,i.kt)("li",{parentName:"ol"},"Explain how to write an application using FastKafka."),(0,i.kt)("li",{parentName:"ol"},"Explain how to write a test case to test FastKafka with Redpanda."),(0,i.kt)("li",{parentName:"ol"},"Run the test case and produce/consume messages.")),(0,i.kt)("h2",{id:"1-prerequisites"},"1. Prerequisites"),(0,i.kt)("p",null,"Before starting, make sure you have the following prerequisites set up:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Python 3.x"),": A Python 3.x installation is required to run\nFastKafka. You can download the latest version of Python from the\n",(0,i.kt)("a",{parentName:"li",href:"https://www.python.org/downloads/"},"official website"),". You\u2019ll also\nneed to have pip installed and updated, which is Python\u2019s package\ninstaller."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Docker Desktop"),": Docker is used to run Redpanda, which is\nrequired for testing FastKafka. You can download and install Docker\nDesktop from the ",(0,i.kt)("a",{parentName:"li",href:"https://www.docker.com/products/docker-desktop/"},"official\nwebsite"),"."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("strong",{parentName:"li"},"Git"),": You\u2019ll need to have Git installed to clone the example\nrepo. You can download Git from the ",(0,i.kt)("a",{parentName:"li",href:"https://git-scm.com/downloads"},"official\nwebsite"),".")),(0,i.kt)("h2",{id:"2-cloning-and-setting-up-the-example-repo"},"2. Cloning and setting up the example repo"),(0,i.kt)("p",null,"To get started with the example code, clone the ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/airtai/sample_fastkafka_with_redpanda"},"GitHub\nrepository")," by\nrunning the following command in your terminal:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"git clone https://github.com/airtai/sample_fastkafka_with_redpanda.git\ncd sample_fastkafka_with_redpanda\n")),(0,i.kt)("p",null,"This will create a new directory called sample_fastkafka_with_redpanda\nand download all the necessary files."),(0,i.kt)("h3",{id:"create-a-virtual-environment"},"Create a virtual environment"),(0,i.kt)("p",null,"Before writing any code, let\u2019s ",(0,i.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/venv.html#module-venv"},"create a new virtual\nenvironment"),"\nfor our project."),(0,i.kt)("p",null,"A virtual environment is an isolated environment for a Python project,\nwhich allows you to manage project-specific dependencies and avoid\nconflicts between different projects."),(0,i.kt)("p",null,"To create a new virtual environment, run the following commands in your\nterminal:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"python3 -m venv venv\n")),(0,i.kt)("p",null,"This will create a new directory called ",(0,i.kt)("inlineCode",{parentName:"p"},"venv")," in your project\ndirectory, which will contain the virtual environment."),(0,i.kt)("p",null,"To activate the virtual environment, run the following command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"source venv/bin/activate\n")),(0,i.kt)("p",null,"This will change your shell\u2019s prompt to indicate that you are now\nworking inside the virtual environment."),(0,i.kt)("p",null,"Finally, run the following command to upgrade ",(0,i.kt)("inlineCode",{parentName:"p"},"pip"),", the Python package\ninstaller:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pip install --upgrade pip\n")),(0,i.kt)("h3",{id:"install-python-dependencies"},"Install Python dependencies"),(0,i.kt)("p",null,"Next, let\u2019s install the required Python dependencies. In this guide,\nwe\u2019ll be using\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nto write our application code and ",(0,i.kt)("inlineCode",{parentName:"p"},"pytest")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"pytest-asyncio")," to test\nit."),(0,i.kt)("p",null,"You can install the dependencies from the ",(0,i.kt)("inlineCode",{parentName:"p"},"requirements.txt")," file\nprovided in the cloned repository by running:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pip install -r requirements.txt\n")),(0,i.kt)("p",null,"This will install all the required packages and their dependencies."),(0,i.kt)("h2",{id:"3-writing-server-code"},"3. Writing server code"),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file in the cloned repository demonstrates how to\nuse FastKafka to consume messages from a Kafka topic, make predictions\nusing a predictive model, and publish the predictions to another Kafka\ntopic. Here is an explanation of the code:"),(0,i.kt)("h3",{id:"preparing-the-demo-model"},"Preparing the demo model"),(0,i.kt)("p",null,"First we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model."),(0,i.kt)("p",null,"We will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n')),(0,i.kt)("h3",{id:"messages"},"Messages"),(0,i.kt)("p",null,"FastKafka uses ",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,i.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,i.kt)("p",null,"This example defines two message classes for use in a FastKafka\napplication:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," class is used to represent input data for a\npredictive model. It has four fields of type\n",(0,i.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/latest/api/types/#pydantic.types.NonNegativeFloat"},(0,i.kt)("inlineCode",{parentName:"a"},"NonNegativeFloat")),",\nwhich is a subclass of float that only allows non-negative floating\npoint values.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," class is used to represent the output of the\npredictive model. It has a single field ",(0,i.kt)("inlineCode",{parentName:"p"},"species")," of type string\nrepresenting the predicted species."))),(0,i.kt)("p",null,"These message classes will be used to parse and validate incoming data\nin Kafka consumers and producers."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("h3",{id:"application"},"Application"),(0,i.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,i.kt)("p",null,"It starts by defining a dictionary called ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,i.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,i.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used both\nto generate documentation and to later run the server against one of the\ngiven kafka broker."),(0,i.kt)("p",null,"Next, an instance of the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum required arguments:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generating documentation")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n')),(0,i.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,i.kt)("p",null,"FastKafka provides convenient function decorators ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,i.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,i.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,i.kt)("p",null,"This following example shows how to use the ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message class. Specifying the\ntype of the single argument is instructing the Pydantic to use\n",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData.parse_raw()")," on the consumed message before passing it\nto the user defined function ",(0,i.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,i.kt)("inlineCode",{parentName:"p"},"to_predictions"),' function,\nwhich specifies that this function should produce a message to the\n\u201cpredictions" Kafka topic whenever it is called. The ',(0,i.kt)("inlineCode",{parentName:"p"},"to_predictions"),"\nfunction takes a single integer argument ",(0,i.kt)("inlineCode",{parentName:"p"},"species_class")," representing\none of three possible strign values predicted by the mdoel. It creates\na new ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," message using this value and then returns it.\nThe framework will call the ",(0,i.kt)("inlineCode",{parentName:"p"},'IrisPrediction.json().encode("utf-8")'),"\nfunction on the returned value and produce it to the specified topic."))),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("h2",{id:"4-writing-the-test-code"},"4. Writing the test code"),(0,i.kt)("p",null,"The service can be tested using the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstance which can be configured to start a ",(0,i.kt)("a",{parentName:"p",href:"../../api/fastkafka/testing/LocalRedpandaBroker/"},"Redpanda\nbroker")," for testing\npurposes. The ",(0,i.kt)("inlineCode",{parentName:"p"},"test.py")," file in the cloned repository contains the\nfollowing code for testing."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'import pytest\nfrom application import IrisInputData, IrisPrediction, kafka_app\n\nfrom fastkafka.testing import Tester\n\nmsg = IrisInputData(\n sepal_length=0.1,\n sepal_width=0.2,\n petal_length=0.3,\n petal_width=0.4,\n)\n\n\n@pytest.mark.asyncio\nasync def test():\n # Start Tester app and create local Redpanda broker for testing\n async with Tester(kafka_app).using_local_redpanda(\n tag="v23.1.2", listener_port=9092\n ) as tester:\n # Send IrisInputData message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with IrisPrediction in predictions topic\n await tester.awaited_mocks.on_predictions.assert_awaited_with(\n IrisPrediction(species="setosa"), timeout=2\n )\n')),(0,i.kt)("p",null,"The\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\nmodule utilizes uses\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/testing/LocalRedpandaBroker#fastkafka.testing.LocalRedpandaBroker"},(0,i.kt)("inlineCode",{parentName:"a"},"LocalRedpandaBroker")),"\nto start and stop a Redpanda broker for testing purposes using Docker"),(0,i.kt)("h2",{id:"5-running-the-tests"},"5. Running the tests"),(0,i.kt)("p",null,"We can run the tests which is in ",(0,i.kt)("inlineCode",{parentName:"p"},"test.py")," file by executing the\nfollowing command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"pytest test.py\n")),(0,i.kt)("p",null,"This will start a Redpanda broker using Docker and executes tests. The\noutput of the command is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest\n============================== test session starts ===============================\nplatform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0\nrootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py\nplugins: asyncio-0.21.0, anyio-3.6.2\nasyncio: mode=strict\ncollected 1 item \n\ntest.py . [100%]\n\n=============================== 1 passed in 7.28s ================================\n(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$\n")),(0,i.kt)("p",null,"Running the tests with the Redpanda broker ensures that your code is\nworking correctly with a real Kafka-like message broker, making your\ntests more reliable."),(0,i.kt)("h3",{id:"recap"},"Recap"),(0,i.kt)("p",null,"We have created an Iris classification model and encapulated it into our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napplication. The app will consume the ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," from the\n",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic and produce the predictions to ",(0,i.kt)("inlineCode",{parentName:"p"},"predictions")," topic."),(0,i.kt)("p",null,"To test the app we have:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Created the app")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Started our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/testing/Tester#fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"a"},"Tester")),"\nclass with ",(0,i.kt)("inlineCode",{parentName:"p"},"Redpanda")," broker which mirrors the developed app topics\nfor testing purposes")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Sent ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message to ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Asserted and checked that the developed iris classification service\nhas reacted to ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message"))))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c4a14462.17d186df.js b/assets/js/c4a14462.17d186df.js new file mode 100644 index 0000000..58d645c --- /dev/null +++ b/assets/js/c4a14462.17d186df.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1159],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>u});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function i(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?i(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):i(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},i=Object.keys(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),s=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},c=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},d="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,i=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=s(a),k=r,u=d["".concat(p,".").concat(k)]||d[k]||f[k]||i;return a?n.createElement(u,o(o({ref:t},c),{},{components:a})):n.createElement(u,o({ref:t},c))}));function u(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var i=a.length,o=new Array(i);o[0]=k;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[d]="string"==typeof e?e:r,o[1]=l;for(var s=2;s<i;s++)o[s]=a[s];return n.createElement.apply(null,o)}return n.createElement.apply(null,a)}k.displayName="MDXCreateElement"},4159:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>f,frontMatter:()=>i,metadata:()=>l,toc:()=>s});var n=a(7462),r=(a(7294),a(3905));const i={},o=void 0,l={unversionedId:"api/fastkafka/EventMetadata",id:"version-0.7.0/api/fastkafka/EventMetadata",title:"EventMetadata",description:"fastkafka.EventMetadata {fastkafka.EventMetadata}",source:"@site/versioned_docs/version-0.7.0/api/fastkafka/EventMetadata.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/EventMetadata",permalink:"/docs/0.7.0/api/fastkafka/EventMetadata",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Benchmarking FastKafka app",permalink:"/docs/0.7.0/guides/Guide_06_Benchmarking_FastKafka"},next:{title:"FastKafka",permalink:"/docs/0.7.0/api/fastkafka/"}},p={},s=[{value:"<code>fastkafka.EventMetadata</code>",id:"fastkafka.EventMetadata",level:2},{value:"<code>create_event_metadata</code>",id:"create_event_metadata",level:3}],c={toc:s},d="wrapper";function f(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.EventMetadata"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.EventMetadata")),(0,r.kt)("p",null,"A class for encapsulating Kafka record metadata."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"topic"),": The topic this record is received from"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"partition"),": The partition from which this record is received"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"offset"),": The position of this record in the corresponding Kafka partition"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"timestamp"),": The timestamp of this record"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"timestamp_type"),": The timestamp type of this record"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"key"),": The key (or ",(0,r.kt)("inlineCode",{parentName:"li"},"None")," if no key is specified)"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"value"),": The value"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"serialized_key_size"),": The size of the serialized, uncompressed key in bytes"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"serialized_value_size"),": The size of the serialized, uncompressed value in bytes"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"headers"),": The headers")),(0,r.kt)("h3",{id:"create_event_metadata"},(0,r.kt)("inlineCode",{parentName:"h3"},"create_event_metadata")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def create_event_metadata(record: aiokafka.structs.ConsumerRecord) -> EventMetadata")),(0,r.kt)("p",null,"Creates an instance of EventMetadata from a ConsumerRecord."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"record"),": The Kafka ConsumerRecord.")),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"The created EventMetadata instance.")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c4f5d8e4.b78e8a3e.js b/assets/js/c4f5d8e4.b78e8a3e.js new file mode 100644 index 0000000..255dcdb --- /dev/null +++ b/assets/js/c4f5d8e4.b78e8a3e.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4195],{9767:(e,t,a)=>{a.r(t),a.d(t,{default:()=>S});var r=a(7294),n=a(6010),o=a(9960),i=a(2263),s=a(7452),l=a(7462);const c={features:"features_t9lD",featureSvg:"featureSvg_GfXr",title:"title_iMVk",textContainer:"textContainer_jPR0",subTitle:"subTitle_yWbm",description:"description_ynef",rowWitExtraMargin:"rowWitExtraMargin_qK_b"},m=[{title:"WRITE",src:"img/write.svg",description:r.createElement(r.Fragment,null,"producers & consumers for Kafka topics in a simplified way")},{title:"PROTOTYPE",src:"img/prototype.svg",description:r.createElement(r.Fragment,null,"quickly & develop high-performance Kafka-based services")},{title:"STREAMLINE",src:"img/streamline.svg",description:r.createElement(r.Fragment,null,"your workflow & accelerate your progress")}];function u(e){let{src:t,title:a,description:o}=e;return r.createElement("div",{className:(0,n.Z)("col col--4")},r.createElement("div",{className:"text--center"},r.createElement("img",{className:c.featureSvg,src:t})),r.createElement("div",{className:((0,n.Z)("text--center padding-horiz--md"),c.textContainer)},r.createElement("h3",null,a),r.createElement("p",null,o)))}function d(){return r.createElement("section",{className:c.features},r.createElement("div",{className:"container"},r.createElement("div",{className:(0,n.Z)("col col--12")},r.createElement("h2",{className:c.title},"Swim with the stream\u2026ing services")),r.createElement("div",{className:"row"},m.map(((e,t)=>r.createElement(u,(0,l.Z)({key:t},e)))))))}const p={features:"features_JWCp",featureSvg:"featureSvg_ed9f",title:"title_e8_4",subTitle:"subTitle_TSnm",rowWitExtraMargin:"rowWitExtraMargin_OCkm",link:"link_ksra",wrapper:"wrapper_yrC9",verticalAndHorizontalCenter:"verticalAndHorizontalCenter_LWfS",childrenWithExtraPadding:"childrenWithExtraPadding_xwhI"};function g(){return r.createElement("section",{className:p.features},r.createElement("div",{className:"container"},r.createElement("div",{className:(0,n.Z)("col col--12")},r.createElement("h2",{className:p.title},"You get what you expect")),r.createElement("div",{className:`row ${p.childrenWithExtraPadding}`},r.createElement("div",{className:(0,n.Z)("col col--6 text--center padding-horiz--md")},r.createElement("p",null,"Function decorators with type hints specifying Pydantic classes for JSON encoding/decoding, automatic message routing and documentation generation.")),r.createElement("div",{className:(0,n.Z)("col col--6 text--center padding-horiz--md")},r.createElement("p",null,"Built on top of ",r.createElement("a",{className:p.link,href:"https://docs.pydantic.dev/",target:"_blank"},"Pydantic"),", ",r.createElement("a",{className:p.link,href:"https://github.com/aio-libs/aiokafka/",target:"_blank"},"AIOKafka")," and ",r.createElement("a",{className:p.link,href:"https://www.asyncapi.com/",target:"_blank"},"AsyncAPI"),", FastKafka simplifies the process of writing producers and consumers for Kafka topics, handling all the parsing, networking, task scheduling and data generation automatically. ")))))}const f={features:"features_i8jF",featureSvg:"featureSvg_IBxz",title:"title__ymQ",subTitle:"subTitle_DG7d",description:"description_fCta",buttons:"buttons_E9Qp",heroButton:"heroButton_F0GI",testimonialAnchor:"testimonialAnchor_iYyG",testimonialWrapper:"testimonialWrapper_gvoa",testimonialDescription:"testimonialDescription_MWAM",testimonialHeader:"testimonialHeader_iSI8",testimonialUserInfo:"testimonialUserInfo_th5k",testimonialProfilePic:"testimonialProfilePic_wg0d",testimonialSourceIcon:"testimonialSourceIcon_RwqW"};function h(e){let{testimonialLimitToShow:t,allTestimonials:a}=e;return r.createElement("div",{className:`${(0,n.Z)("col col--4")} ${f.testimonialWrapper}`},Object.entries(a).map((e=>{let[a,n]=e;return a.split("_")[1]<=t?r.createElement("a",{key:a,href:n.source.link,target:"_blank",rel:"noopener noreferrer",className:f.testimonialAnchor},r.createElement("div",{className:f.testimonialContainer},r.createElement("div",{className:f.testimonialHeader},r.createElement("div",{className:f.testimonialUserInfo},r.createElement("img",{src:n.user.profilePic,className:f.testimonialProfilePic}),r.createElement("div",null,r.createElement("h6",null,n.user.fullName),r.createElement("p",null,n.user.userName))),r.createElement("div",null,r.createElement("img",{className:f.testimonialSourceIcon,src:n.source.icon,alt:""}))),r.createElement("div",{className:"text--center padding-horiz--md"},r.createElement("p",{className:f.testimonialDescription},n.description)))):null})))}const w=["deadwisdom","benbenbang","Berouald","baggiponte","No-Application5593","code_mc","teajunky","SteamingBeer","BestBottle4517"];function b(){const[e,t]=(0,r.useState)("2"),[a,o]=(0,r.useState)(w.reduce(((e,t)=>({...e,[t]:{icon_img:"https://www.redditstatic.com/avatars/defaults/v2/avatar_default_1.png",subreddit:{display_name_prefixed:`u/${t}`}}})),{})),i=[{container_1:{source:{icon:"img/reddit-logo.png",link:"https://www.reddit.com/r/Python/comments/13i0eaz/comment/jk90bwz/?utm_source=share&utm_medium=web2x&context=3"},user:{profilePic:a.deadwisdom.icon_img,userName:a.deadwisdom.subreddit.display_name_prefixed,fullName:"deadwisdom"},description:r.createElement(r.Fragment,null,"Well well well, if it isn't the library I was already making but better. Very nice. What is your long-term vision for supporting this as a company? And are you using this now to support real customers or are you expecting this might help you establish a niche?")},container_2:{source:{icon:"img/twitter-logo.svg",link:"https://twitter.com/emaxerrno/status/1635005087721611264?s=20"},user:{profilePic:"img/a-alphabet-round-icon.png",userName:"Alexander Gallego",fullName:"Alexander Gallego"},description:r.createElement(r.Fragment,null,"this is cool. let me know if you want to share it w/ the @redpandadata community.")},container_3:{source:{icon:"img/reddit-logo.png",link:"https://www.reddit.com/r/Python/comments/11paz9u/comment/jbxbbxp/?utm_source=share&utm_medium=web2x&context=3"},user:{profilePic:a.BestBottle4517.icon_img.replace(/&/g,"&"),userName:a.BestBottle4517.subreddit.display_name_prefixed,fullName:"BestBottle4517"},description:r.createElement(r.Fragment,null,"Very cool indeed. Currently at work we're using RabbitMQ for messaging so this doesn't apply to us (for now), but this type and style of implementation is exactly what I would expect when searching for libs like this. Great job!")},container_4:{source:{icon:"img/reddit-logo.png",link:"https://www.reddit.com/r/programming/comments/11sjtgm/comment/jceqgml/?utm_source=share&utm_medium=web2x&context=3"},user:{profilePic:a.teajunky.icon_img,userName:a.teajunky.subreddit.display_name_prefixed,fullName:"teajunky"},description:r.createElement(r.Fragment,null,"Wow, the code in the package is auto-generated from Jupyter-Notebooks")}},{container_1:{source:{icon:"img/reddit-logo.png",link:"https://www.reddit.com/r/FastAPI/comments/124v5di/comment/jfhg2t2/?utm_source=share&utm_medium=web2x&context=3"},user:{profilePic:a.benbenbang.icon_img.replace(/&/g,"&"),userName:a.benbenbang.subreddit.display_name_prefixed,fullName:"benbenbang"},description:r.createElement(r.Fragment,null,"Nice \ud83d\udc4d\ud83c\udffb I\u2019ve promoted this project in the team! Also, would like to contribute if there\u2019s some kind of roadmap")},container_2:{source:{icon:"img/reddit-logo.png",link:"https://www.reddit.com/r/Python/comments/11paz9u/comment/jbxf1v8/?utm_source=share&utm_medium=web2x&context=3"},user:{profilePic:a.code_mc.icon_img,userName:a.code_mc.subreddit.display_name_prefixed,fullName:"code_mc"},description:r.createElement(r.Fragment,null,"I really like the idea of this, as the biggest gripe I have with most pub/sub solutions is all of the tedious boiler plate code needed to correctly subscribe and publish and manage message leases etc. While you often just want to grab a message, do some processing and put it on a different queue.")},container_3:{source:{icon:"img/reddit-logo.png",link:"https://www.reddit.com/r/FastAPI/comments/11oq09r/comment/jc4dwit/?utm_source=share&utm_medium=web2x&context=3"},user:{profilePic:a["No-Application5593"].icon_img,userName:a["No-Application5593"].subreddit.display_name_prefixed,fullName:"No-Application5593"},description:r.createElement(r.Fragment,null,"Wow! This is really great, thank you for your efforts guys. This is what I really need for one of my future projects.")},container_4:{source:{icon:"img/reddit-logo.png",link:"https://www.reddit.com/r/FastAPI/comments/11oq09r/comment/jbx4dfn/?utm_source=share&utm_medium=web2x&context=3"},user:{profilePic:a.SteamingBeer.icon_img.replace(/&/g,"&"),userName:a.SteamingBeer.subreddit.display_name_prefixed,fullName:"SteamingBeer"},description:r.createElement(r.Fragment,null,"Thank you for your efforts. I see me pitching this library to my team in the near future!")}},{container_1:{source:{icon:"img/reddit-logo.png",link:"https://www.reddit.com/r/FastAPI/comments/124v5di/comment/jee9vm9/?utm_source=share&utm_medium=web2x&context=3"},user:{profilePic:a.Berouald.icon_img,userName:a.Berouald.subreddit.display_name_prefixed,fullName:"Berouald"},description:r.createElement(r.Fragment,null,"This is great! I've been thinking about making a similar tool for quite some time, nice job sir! I guess it's to fit your use case, by why stop at Kafka? A paradigm like this would be awesome in the form of a microframework. Like a general message consumer framework with pluggable interfaces for Kafka, Rabbitmq, ActiveMQ or even the Redis message broker.")},container_2:{source:{icon:"img/reddit-logo.png",link:"https://www.reddit.com/r/Python/comments/120mt5k/comment/jdpwycr/?utm_source=share&utm_medium=web2x&context=3"},user:{profilePic:a.baggiponte.icon_img,userName:a.baggiponte.subreddit.display_name_prefixed,fullName:"baggiponte"},description:r.createElement(r.Fragment,null,"Really hope this project becomes as popular as the OG FastAPI!")},container_3:{source:{icon:"img/twitter-logo.svg",link:"https://twitter.com/perbu/status/1635014207656849408?s=20"},user:{profilePic:"img/p-alphabet-round-icon.png",userName:"Per Buer",fullName:"Per Buer"},description:r.createElement(r.Fragment,null,"I really like how we're getting these more specialized ways to leverage streaming databases, instead of the somewhat intimidating access libraries.")},container_4:{source:{icon:"img/Y_Combinator_Logo.png",link:"https://news.ycombinator.com/item?id=35086594"},user:{profilePic:"img/I.svg",userName:"iknownothow",fullName:"iknownothow"},description:r.createElement(r.Fragment,null,"It looks incredible and I truly hope your project takes off for my sake since I have to work with Kafka from time to time!")}}];return(0,r.useEffect)((()=>{!async function(){try{let e={};for(const t of w){const a=await fetch(`https://www.reddit.com/user/${t}/about.json`);let r=await a.json();r.data.icon_img=r.data.icon_img.split("?")[0],e[t]=r.data}o(e)}catch(e){console.error(e)}}()}),[]),r.createElement("section",{className:`${f.features} hero hero--primary`},r.createElement("div",{className:"container"},r.createElement("div",{className:(0,n.Z)("col col--12")},r.createElement("h2",{className:f.title},"The community has spoken!")),r.createElement("div",{className:"row"},i.map(((t,a)=>r.createElement(h,{key:a,testimonialLimitToShow:e,allTestimonials:t})))),e<Object.keys(i[0]).length&&r.createElement("div",{className:f.buttons},r.createElement("button",{className:(0,n.Z)("button button--lg",f.heroButton),onClick:()=>{t("2"===e?"3":Object.keys(i[0]).length)}},"Load More"))))}var _=a(1876);const k={features:"features_fQn7",featureSvg:"featureSvg_Td5A",title:"title_bFDR",subTitle:"subTitle_u53r",description:"description_gnTt",rowWitExtraMargin:"rowWitExtraMargin_R_NL",link:"link_SBpC",wrapper:"wrapper_DIPT",verticalAndHorizontalCenter:"verticalAndHorizontalCenter_krzz",href:"href_wqkW",faqAnswer:"faqAnswer_fJMF"},E=[{heading:"How much does FastKafka cost?",content:"FastKafka is under Apache 2.0 license and free to use."},{heading:"How can I contribute or request features?",content:"We love and welcome community contributions! Here is a <a href='https://github.com/airtai/fastkafka/blob/main/CONTRIBUTING.md' target='_blank'>doc</a> to get you started. To request features, add a \u201cFeature request\u201d using the New issue button in GitHub from <a href='https://github.com/airtai/fastkafka/issues' target='_blank'>this link</a>, or join our feature-request <a href='https://discord.gg/CJWmYpyFbc' target='_blank'>Discord channel</a>."},{heading:"Do you support any streaming platforms other than Kafka?",content:"Slowly, but surely. We built the initial version for Kafka service and for our needs, but we reached out to the wider community to find out what to do next. We added support for Redpanda, and also got requests for RabbitMQ and Pulsar that went to our backlog and we\u2019ll support them in our future releases."},{heading:"Does FastKafka integrate with AsyncAPI in the way that FastAPi integrates with OpenAPI?",content:"Very much the same, but with a small difference due to dependencies of AsyncAPI. You write your code using decorators and you get AsyncAPI specification generated automatically as YAML file. You can convert that file to static HTML file ether by Python API call, CLI or github action. AsyncAPI requires Node.js, and you don\u2019t necessarily want this in production."},{heading:"Does it assume that Kafka messages are in JSON format? What if we want to use protobuf, for example?",content:"For the first implementation we just released uses with JSON encoded messages, but we can easily add additional formats/protocols. We\u2019ve created an issue on GitHub and will try to prioritize it for one of the next releases."}];function y(){return r.createElement("section",{className:k.features},r.createElement("div",{className:"container"},r.createElement("div",{className:(0,n.Z)("col col--12")},r.createElement("h2",{className:k.title},"FAQs"),r.createElement("p",null,"For anything not covered here, join ",r.createElement("a",{className:k.href,href:"https://discord.gg/CJWmYpyFbc",target:"_blank"},"our Discord"))),r.createElement("div",{className:(0,n.Z)("col col--12 text--left padding-horiz--md")},r.createElement(_.UQ,{allowZeroExpanded:!0},E.map(((e,t)=>r.createElement(_.Qd,{key:t},r.createElement(_.Ol,null,r.createElement(_.on,null,e.heading)),r.createElement(_.Mt,null,r.createElement("p",{className:k.faqAnswer,dangerouslySetInnerHTML:{__html:e.content}})))))))))}var N=a(2251);const v={browserWindow:"browserWindow_my1Q",browserWindowHeader:"browserWindowHeader_jXSR",row:"row_KZDM",buttons:"buttons_uHc7",right:"right_oyze",browserWindowAddressBar:"browserWindowAddressBar_Pd8y",dot:"dot_giz1",browserWindowMenuIcon:"browserWindowMenuIcon_Vhuh",bar:"bar_rrRL",browserWindowBody:"browserWindowBody_Idgs"};function x(e){let{children:t,minHeight:a,url:o="",style:i,bodyStyle:s}=e;return r.createElement("div",{className:v.browserWindow,style:{...i,minHeight:a}},r.createElement("div",{className:v.browserWindowHeader},r.createElement("div",{className:v.buttons},r.createElement("span",{className:v.dot,style:{background:"#f25f58"}}),r.createElement("span",{className:v.dot,style:{background:"#fbbe3c"}}),r.createElement("span",{className:v.dot,style:{background:"#58cb42"}})),r.createElement("div",{className:(0,n.Z)(v.browserWindowAddressBar,"text--truncate")},o),r.createElement("div",{className:v.browserWindowMenuIcon},r.createElement("div",null,r.createElement("span",{className:v.bar}),r.createElement("span",{className:v.bar}),r.createElement("span",{className:v.bar})))),r.createElement("div",{className:v.browserWindowBody,style:s},t))}const I={features:"features_K0bx",featureSvg:"featureSvg_waEg",title:"title_lvu5",fastkafkaDescription:"fastkafkaDescription_h_GB",subTitle:"subTitle_FXDe",description:"description_qDmr",rowWitExtraMargin:"rowWitExtraMargin_xiCQ",fastkafkaChatIframe:"fastkafkaChatIframe_w3XB",fastkafkaChatHeader:"fastkafkaChatHeader_lrZG"};function P(){return r.createElement("section",{className:`${I.features} hero hero--primary`},r.createElement("div",{className:"container"},r.createElement("div",{className:(0,n.Z)("col col--12")},r.createElement("h2",{className:I.title},"Check out our code-generation feature!"),r.createElement("p",{className:I.fastkafkaDescription},"Let us know what you need solved and we\u2019ll generate the FastKafka code for you!")),r.createElement("div",{className:"row"},r.createElement("div",{className:(0,n.Z)("col col--12")},r.createElement("div",{className:"text--center padding-horiz--md"},r.createElement(x,null,r.createElement(N.Z,{url:"https://fastkafka-chat.azurewebsites.net/",className:I.fastkafkaChatIframe})))))))}const A={robotFooterContainer:"robotFooterContainer_CsQd",robotFooterIcon:"robotFooterIcon_R67M"};function W(){return r.createElement("section",null,r.createElement("div",{className:(0,n.Z)("container",A.robotFooterContainer)},r.createElement("img",{className:A.robotFooterIcon,src:"img/robot-footer.svg"})))}const F={heroBanner:"heroBanner_qdFl",heroRobot:"heroRobot_FLpk",buttons:"buttons_AeoN",title:"title_GqtP",description:"description_meEo",heroButton:"heroButton_GTT_",descriptionMobile:"descriptionMobile_CZcP"};function B(){return r.createElement("header",{className:(0,n.Z)("hero hero--primary",F.heroBanner)},r.createElement("div",{className:"container"},r.createElement("img",{className:F.heroRobot,src:"img/robot-hero.svg"}),r.createElement("p",{className:F.description},"Open-source framework for building asynchronous web "),r.createElement("p",{className:F.description},"services that interact with Kafka"),r.createElement("p",{className:F.descriptionMobile},"Open-source framework for building asynchronous web services that interact with Kafka"),r.createElement("div",{className:F.buttons},r.createElement(o.Z,{className:(0,n.Z)("button button--lg",F.heroButton),to:"/docs"},"Get Started"))))}function S(){const{siteConfig:e}=(0,i.Z)();return r.createElement(s.Z,{title:e.tagline,description:e.customFields.description},r.createElement(B,null),r.createElement("main",null,r.createElement(d,null),r.createElement(P,null),r.createElement(g,null),r.createElement(b,null),r.createElement(y,null),r.createElement(W,null)))}}}]); \ No newline at end of file diff --git a/assets/js/c602cd44.c0889991.js b/assets/js/c602cd44.c0889991.js new file mode 100644 index 0000000..0a98bfc --- /dev/null +++ b/assets/js/c602cd44.c0889991.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6862],{3905:(e,n,t)=>{t.d(n,{Zo:()=>l,kt:()=>k});var r=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function c(e){for(var n=1;n<arguments.length;n++){var t=null!=arguments[n]?arguments[n]:{};n%2?a(Object(t),!0).forEach((function(n){o(e,n,t[n])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):a(Object(t)).forEach((function(n){Object.defineProperty(e,n,Object.getOwnPropertyDescriptor(t,n))}))}return e}function i(e,n){if(null==e)return{};var t,r,o=function(e,n){if(null==e)return{};var t,r,o={},a=Object.keys(e);for(r=0;r<a.length;r++)t=a[r],n.indexOf(t)>=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r<a.length;r++)t=a[r],n.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var s=r.createContext({}),d=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):c(c({},n),e)),t},l=function(e){var n=d(e.components);return r.createElement(s.Provider,{value:n},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},u=r.forwardRef((function(e,n){var t=e.components,o=e.mdxType,a=e.originalType,s=e.parentName,l=i(e,["components","mdxType","originalType","parentName"]),p=d(t),u=o,k=p["".concat(s,".").concat(u)]||p[u]||f[u]||a;return t?r.createElement(k,c(c({ref:n},l),{},{components:t})):r.createElement(k,c({ref:n},l))}));function k(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var a=t.length,c=new Array(a);c[0]=u;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[p]="string"==typeof e?e:o,c[1]=i;for(var d=2;d<a;d++)c[d]=t[d];return r.createElement.apply(null,c)}return r.createElement.apply(null,t)}u.displayName="MDXCreateElement"},9216:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>c,default:()=>f,frontMatter:()=>a,metadata:()=>i,toc:()=>d});var r=t(7462),o=(t(7294),t(3905));const a={},c=void 0,i={unversionedId:"api/fastkafka/encoder/json_encoder",id:"version-0.6.0/api/fastkafka/encoder/json_encoder",title:"json_encoder",description:"fastkafka.encoder.jsonencoder {fastkafka.encoder.jsonencoder}",source:"@site/versioned_docs/version-0.6.0/api/fastkafka/encoder/json_encoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/json_encoder",permalink:"/docs/0.6.0/api/fastkafka/encoder/json_encoder",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"json_decoder",permalink:"/docs/0.6.0/api/fastkafka/encoder/json_decoder"},next:{title:"ApacheKafkaBroker",permalink:"/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker"}},s={},d=[{value:"<code>fastkafka.encoder.json_encoder</code>",id:"fastkafka.encoder.json_encoder",level:2},{value:"<code>json_encoder</code>",id:"json_encoder",level:3}],l={toc:d},p="wrapper";function f(e){let{components:n,...t}=e;return(0,o.kt)(p,(0,r.Z)({},l,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"fastkafka.encoder.json_encoder"},(0,o.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.json_encoder")),(0,o.kt)("h3",{id:"json_encoder"},(0,o.kt)("inlineCode",{parentName:"h3"},"json_encoder")),(0,o.kt)("p",null,(0,o.kt)("inlineCode",{parentName:"p"},"def json_encoder(msg: pydantic.main.BaseModel) -> bytes")),(0,o.kt)("p",null,"Encoder to encode pydantic instances to json string"),(0,o.kt)("p",null,(0,o.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"msg"),": An instance of pydantic basemodel")),(0,o.kt)("p",null,(0,o.kt)("strong",{parentName:"p"},"Returns"),":"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Json string in bytes which is encoded from pydantic basemodel")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c95b781b.54e317b5.js b/assets/js/c95b781b.54e317b5.js new file mode 100644 index 0000000..90f73cd --- /dev/null +++ b/assets/js/c95b781b.54e317b5.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9810],{6280:a=>{a.exports=JSON.parse('{"pluginId":"default","version":"0.8.0","label":"0.8.0","banner":null,"badge":true,"noIndex":false,"className":"docs-version-0.8.0","isLast":true,"docsSidebars":{"tutorialSidebar":[{"type":"link","label":"FastKafka","href":"/docs/","docId":"index"},{"type":"category","label":"Guides","items":[{"type":"category","label":"Writing services","items":[{"type":"link","label":"@consumes basics","href":"/docs/guides/Guide_11_Consumes_Basics","docId":"guides/Guide_11_Consumes_Basics"},{"type":"link","label":"Batch consuming","href":"/docs/guides/Guide_12_Batch_Consuming","docId":"guides/Guide_12_Batch_Consuming"},{"type":"link","label":"@produces basics","href":"/docs/guides/Guide_21_Produces_Basics","docId":"guides/Guide_21_Produces_Basics"},{"type":"link","label":"Defining a partition key","href":"/docs/guides/Guide_22_Partition_Keys","docId":"guides/Guide_22_Partition_Keys"},{"type":"link","label":"Batch producing","href":"/docs/guides/Guide_23_Batch_Producing","docId":"guides/Guide_23_Batch_Producing"},{"type":"link","label":"Lifespan Events","href":"/docs/guides/Guide_05_Lifespan_Handler","docId":"guides/Guide_05_Lifespan_Handler"},{"type":"link","label":"Encoding and Decoding Kafka Messages with FastKafka","href":"/docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","docId":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"},{"type":"link","label":"Using multiple Kafka clusters","href":"/docs/guides/Guide_24_Using_Multiple_Kafka_Clusters","docId":"guides/Guide_24_Using_Multiple_Kafka_Clusters"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Testing","items":[{"type":"link","label":"Using Redpanda to test FastKafka","href":"/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka","docId":"guides/Guide_31_Using_redpanda_to_test_fastkafka"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Documentation generation","items":[{"type":"link","label":"Deploy FastKafka docs to GitHub Pages","href":"/docs/guides/Guide_04_Github_Actions_Workflow","docId":"guides/Guide_04_Github_Actions_Workflow"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Deployment","items":[{"type":"link","label":"Deploying FastKafka using Docker","href":"/docs/guides/Guide_30_Using_docker_to_deploy_fastkafka","docId":"guides/Guide_30_Using_docker_to_deploy_fastkafka"},{"type":"link","label":"Using FastAPI to Run FastKafka Application","href":"/docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application","docId":"guides/Guide_32_Using_fastapi_to_run_fastkafka_application"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Benchmarking","items":[{"type":"link","label":"Benchmarking FastKafka app","href":"/docs/guides/Guide_06_Benchmarking_FastKafka","docId":"guides/Guide_06_Benchmarking_FastKafka"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"API","items":[{"type":"link","label":"EventMetadata","href":"/docs/api/fastkafka/EventMetadata","docId":"api/fastkafka/EventMetadata"},{"type":"link","label":"FastKafka","href":"/docs/api/fastkafka/","docId":"api/fastkafka/FastKafka"},{"type":"link","label":"KafkaEvent","href":"/docs/api/fastkafka/KafkaEvent","docId":"api/fastkafka/KafkaEvent"},{"type":"category","label":"encoder","items":[{"type":"link","label":"AvroBase","href":"/docs/api/fastkafka/encoder/AvroBase","docId":"api/fastkafka/encoder/AvroBase"},{"type":"link","label":"avro_decoder","href":"/docs/api/fastkafka/encoder/avro_decoder","docId":"api/fastkafka/encoder/avro_decoder"},{"type":"link","label":"avro_encoder","href":"/docs/api/fastkafka/encoder/avro_encoder","docId":"api/fastkafka/encoder/avro_encoder"},{"type":"link","label":"avsc_to_pydantic","href":"/docs/api/fastkafka/encoder/avsc_to_pydantic","docId":"api/fastkafka/encoder/avsc_to_pydantic"},{"type":"link","label":"json_decoder","href":"/docs/api/fastkafka/encoder/json_decoder","docId":"api/fastkafka/encoder/json_decoder"},{"type":"link","label":"json_encoder","href":"/docs/api/fastkafka/encoder/json_encoder","docId":"api/fastkafka/encoder/json_encoder"}],"collapsed":true,"collapsible":true},{"type":"category","label":"executors","items":[{"type":"link","label":"DynamicTaskExecutor","href":"/docs/api/fastkafka/executors/DynamicTaskExecutor","docId":"api/fastkafka/executors/DynamicTaskExecutor"},{"type":"link","label":"SequentialExecutor","href":"/docs/api/fastkafka/executors/SequentialExecutor","docId":"api/fastkafka/executors/SequentialExecutor"}],"collapsed":true,"collapsible":true},{"type":"category","label":"testing","items":[{"type":"link","label":"ApacheKafkaBroker","href":"/docs/api/fastkafka/testing/ApacheKafkaBroker","docId":"api/fastkafka/testing/ApacheKafkaBroker"},{"type":"link","label":"LocalRedpandaBroker","href":"/docs/api/fastkafka/testing/LocalRedpandaBroker","docId":"api/fastkafka/testing/LocalRedpandaBroker"},{"type":"link","label":"Tester","href":"/docs/api/fastkafka/testing/Tester","docId":"api/fastkafka/testing/Tester"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"CLI","items":[{"type":"link","label":"fastkafka","href":"/docs/cli/fastkafka","docId":"cli/fastkafka"},{"type":"link","label":"run_fastkafka_server_process","href":"/docs/cli/run_fastkafka_server_process","docId":"cli/run_fastkafka_server_process"}],"collapsed":true,"collapsible":true},{"type":"link","label":"LICENSE","href":"/docs/LICENSE","docId":"LICENSE"},{"type":"link","label":"Contributing to FastKafka","href":"/docs/CONTRIBUTING","docId":"CONTRIBUTING"},{"type":"link","label":"Release notes","href":"/docs/CHANGELOG","docId":"CHANGELOG"}]},"docs":{"api/fastkafka/encoder/avro_decoder":{"id":"api/fastkafka/encoder/avro_decoder","title":"avro_decoder","description":"avrodecoder {fastkafka.encoder.avrodecoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/avro_encoder":{"id":"api/fastkafka/encoder/avro_encoder","title":"avro_encoder","description":"avroencoder {fastkafka.encoder.avroencoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/AvroBase":{"id":"api/fastkafka/encoder/AvroBase","title":"AvroBase","description":"fastkafka.encoder.AvroBase {fastkafka.encoder.AvroBase}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/avsc_to_pydantic":{"id":"api/fastkafka/encoder/avsc_to_pydantic","title":"avsc_to_pydantic","description":"avsctopydantic {fastkafka.encoder.avsctopydantic}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/json_decoder":{"id":"api/fastkafka/encoder/json_decoder","title":"json_decoder","description":"jsondecoder {fastkafka.encoder.jsondecoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/json_encoder":{"id":"api/fastkafka/encoder/json_encoder","title":"json_encoder","description":"jsonencoder {fastkafka.encoder.jsonencoder}","sidebar":"tutorialSidebar"},"api/fastkafka/EventMetadata":{"id":"api/fastkafka/EventMetadata","title":"EventMetadata","description":"fastkafka.EventMetadata {fastkafka.EventMetadata}","sidebar":"tutorialSidebar"},"api/fastkafka/executors/DynamicTaskExecutor":{"id":"api/fastkafka/executors/DynamicTaskExecutor","title":"DynamicTaskExecutor","description":"fastkafka.executors.DynamicTaskExecutor {fastkafka.executors.DynamicTaskExecutor}","sidebar":"tutorialSidebar"},"api/fastkafka/executors/SequentialExecutor":{"id":"api/fastkafka/executors/SequentialExecutor","title":"SequentialExecutor","description":"fastkafka.executors.SequentialExecutor {fastkafka.executors.SequentialExecutor}","sidebar":"tutorialSidebar"},"api/fastkafka/FastKafka":{"id":"api/fastkafka/FastKafka","title":"FastKafka","description":"init {fastkafka._application.app.FastKafka.init}","sidebar":"tutorialSidebar"},"api/fastkafka/KafkaEvent":{"id":"api/fastkafka/KafkaEvent","title":"KafkaEvent","description":"fastkafka.KafkaEvent {fastkafka.KafkaEvent}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/ApacheKafkaBroker":{"id":"api/fastkafka/testing/ApacheKafkaBroker","title":"ApacheKafkaBroker","description":"fastkafka.testing.ApacheKafkaBroker {fastkafka.testing.ApacheKafkaBroker}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/LocalRedpandaBroker":{"id":"api/fastkafka/testing/LocalRedpandaBroker","title":"LocalRedpandaBroker","description":"fastkafka.testing.LocalRedpandaBroker {fastkafka.testing.LocalRedpandaBroker}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/Tester":{"id":"api/fastkafka/testing/Tester","title":"Tester","description":"init {fastkafka._application.tester.Tester.init}","sidebar":"tutorialSidebar"},"CHANGELOG":{"id":"CHANGELOG","title":"Release notes","description":"0.7.0","sidebar":"tutorialSidebar"},"cli/fastkafka":{"id":"cli/fastkafka","title":"fastkafka","description":"Usage:","sidebar":"tutorialSidebar"},"cli/run_fastkafka_server_process":{"id":"cli/run_fastkafka_server_process","title":"run_fastkafka_server_process","description":"Usage:","sidebar":"tutorialSidebar"},"CONTRIBUTING":{"id":"CONTRIBUTING","title":"Contributing to FastKafka","description":"First off, thanks for taking the time to contribute! \u2764\ufe0f","sidebar":"tutorialSidebar"},"guides/Guide_00_FastKafka_Demo":{"id":"guides/Guide_00_FastKafka_Demo","title":"FastKafka tutorial","description":"FastKafka is a powerful and easy-to-use"},"guides/Guide_01_Intro":{"id":"guides/Guide_01_Intro","title":"Intro","description":"This tutorial will show you how to use FastKafkaAPI, step by"},"guides/Guide_02_First_Steps":{"id":"guides/Guide_02_First_Steps","title":"First Steps","description":"Creating a simple Kafka consumer app"},"guides/Guide_03_Authentication":{"id":"guides/Guide_03_Authentication","title":"Authentication","description":"TLS Authentication"},"guides/Guide_04_Github_Actions_Workflow":{"id":"guides/Guide_04_Github_Actions_Workflow","title":"Deploy FastKafka docs to GitHub Pages","description":"Getting started","sidebar":"tutorialSidebar"},"guides/Guide_05_Lifespan_Handler":{"id":"guides/Guide_05_Lifespan_Handler","title":"Lifespan Events","description":"Did you know that you can define some special code that runs before and","sidebar":"tutorialSidebar"},"guides/Guide_06_Benchmarking_FastKafka":{"id":"guides/Guide_06_Benchmarking_FastKafka","title":"Benchmarking FastKafka app","description":"Prerequisites","sidebar":"tutorialSidebar"},"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka":{"id":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","title":"Encoding and Decoding Kafka Messages with FastKafka","description":"Prerequisites","sidebar":"tutorialSidebar"},"guides/Guide_11_Consumes_Basics":{"id":"guides/Guide_11_Consumes_Basics","title":"@consumes basics","description":"You can use @consumes decorator to consume messages from Kafka topics.","sidebar":"tutorialSidebar"},"guides/Guide_12_Batch_Consuming":{"id":"guides/Guide_12_Batch_Consuming","title":"Batch consuming","description":"If you want to consume data in batches @consumes decorator makes that","sidebar":"tutorialSidebar"},"guides/Guide_21_Produces_Basics":{"id":"guides/Guide_21_Produces_Basics","title":"@produces basics","description":"You can use @produces decorator to produce messages to Kafka topics.","sidebar":"tutorialSidebar"},"guides/Guide_22_Partition_Keys":{"id":"guides/Guide_22_Partition_Keys","title":"Defining a partition key","description":"Partition keys are used in Apache Kafka to determine which partition a","sidebar":"tutorialSidebar"},"guides/Guide_23_Batch_Producing":{"id":"guides/Guide_23_Batch_Producing","title":"Batch producing","description":"If you want to send your data in batches @produces decorator makes","sidebar":"tutorialSidebar"},"guides/Guide_24_Using_Multiple_Kafka_Clusters":{"id":"guides/Guide_24_Using_Multiple_Kafka_Clusters","title":"Using multiple Kafka clusters","description":"Ready to take your FastKafka app to the next level? This guide shows you","sidebar":"tutorialSidebar"},"guides/Guide_30_Using_docker_to_deploy_fastkafka":{"id":"guides/Guide_30_Using_docker_to_deploy_fastkafka","title":"Deploying FastKafka using Docker","description":"Building a Docker Image","sidebar":"tutorialSidebar"},"guides/Guide_31_Using_redpanda_to_test_fastkafka":{"id":"guides/Guide_31_Using_redpanda_to_test_fastkafka","title":"Using Redpanda to test FastKafka","description":"What is FastKafka?","sidebar":"tutorialSidebar"},"guides/Guide_32_Using_fastapi_to_run_fastkafka_application":{"id":"guides/Guide_32_Using_fastapi_to_run_fastkafka_application","title":"Using FastAPI to Run FastKafka Application","description":"When deploying a FastKafka application, the default approach is to","sidebar":"tutorialSidebar"},"index":{"id":"index","title":"FastKafka","description":"Effortless Kafka integration for your web services","sidebar":"tutorialSidebar"},"LICENSE":{"id":"LICENSE","title":"LICENSE","description":"Apache License","sidebar":"tutorialSidebar"}}}')}}]); \ No newline at end of file diff --git a/assets/js/c9eeccbf.fa9d425c.js b/assets/js/c9eeccbf.fa9d425c.js new file mode 100644 index 0000000..d68a5f4 --- /dev/null +++ b/assets/js/c9eeccbf.fa9d425c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3747],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>f});var o=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function s(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?r(Object(n),!0).forEach((function(t){a(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function i(e,t){if(null==e)return{};var n,o,a=function(e,t){if(null==e)return{};var n,o,a={},r=Object.keys(e);for(o=0;o<r.length;o++)n=r[o],t.indexOf(n)>=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o<r.length;o++)n=r[o],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var l=o.createContext({}),c=function(e){var t=o.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):s(s({},t),e)),n},u=function(e){var t=c(e.components);return o.createElement(l.Provider,{value:t},e.children)},p="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},d=o.forwardRef((function(e,t){var n=e.components,a=e.mdxType,r=e.originalType,l=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),p=c(n),d=a,f=p["".concat(l,".").concat(d)]||p[d]||m[d]||r;return n?o.createElement(f,s(s({ref:t},u),{},{components:n})):o.createElement(f,s({ref:t},u))}));function f(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var r=n.length,s=new Array(r);s[0]=d;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[p]="string"==typeof e?e:a,s[1]=i;for(var c=2;c<r;c++)s[c]=n[c];return o.createElement.apply(null,s)}return o.createElement.apply(null,n)}d.displayName="MDXCreateElement"},9709:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>m,frontMatter:()=>r,metadata:()=>i,toc:()=>c});var o=n(7462),a=(n(7294),n(3905));const r={},s="Batch consuming",i={unversionedId:"guides/Guide_12_Batch_Consuming",id:"version-0.7.1/guides/Guide_12_Batch_Consuming",title:"Batch consuming",description:"If you want to consume data in batches @consumes decorator makes that",source:"@site/versioned_docs/version-0.7.1/guides/Guide_12_Batch_Consuming.md",sourceDirName:"guides",slug:"/guides/Guide_12_Batch_Consuming",permalink:"/docs/0.7.1/guides/Guide_12_Batch_Consuming",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"@consumes basics",permalink:"/docs/0.7.1/guides/Guide_11_Consumes_Basics"},next:{title:"@produces basics",permalink:"/docs/0.7.1/guides/Guide_21_Produces_Basics"}},l={},c=[{value:"Consume function with batching",id:"consume-function-with-batching",level:2},{value:"App example",id:"app-example",level:2},{value:"Send the messages to kafka topic",id:"send-the-messages-to-kafka-topic",level:2}],u={toc:c},p="wrapper";function m(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,o.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"batch-consuming"},"Batch consuming"),(0,a.kt)("p",null,"If you want to consume data in batches ",(0,a.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator makes that\npossible for you. By typing a consumed msg object as a ",(0,a.kt)("inlineCode",{parentName:"p"},"list")," of\nmessages the consumer will call your consuming function with a batch of\nmessages consumed from a single partition. Let\u2019s demonstrate that now."),(0,a.kt)("h2",{id:"consume-function-with-batching"},"Consume function with batching"),(0,a.kt)("p",null,"To consume messages in batches, you need to wrap you message type into a\nlist and the ",(0,a.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will take care of the rest for you.\nYour consumes function will be called with batches grouped by partition\nnow."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes(auto_offset_reset="earliest")\nasync def on_hello_world(msg: List[HelloWorld]):\n logger.info(f"Got msg batch: {msg}")\n')),(0,a.kt)("h2",{id:"app-example"},"App example"),(0,a.kt)("p",null,"We will modify the app example from ",(0,a.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_11_Consumes_Basics"},"@consumes\nbasics")," guide to consume\n",(0,a.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages batch. The final app will look like this (make\nsure you replace the ",(0,a.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,a.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values):"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\nfrom typing import List\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.consumes(auto_offset_reset="earliest")\nasync def on_hello_world(msg: List[HelloWorld]):\n logger.info(f"Got msg batch: {msg}")\n')),(0,a.kt)("h2",{id:"send-the-messages-to-kafka-topic"},"Send the messages to kafka topic"),(0,a.kt)("p",null,"Lets send a couple of ",(0,a.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages to the ",(0,a.kt)("em",{parentName:"p"},"hello_world")," topic\nand check if our consumer kafka application has logged the received\nmessages batch. In your terminal, run the following command at least two\ntimes to create multiple messages in your kafka queue:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-shell"},'echo {\\"msg\\": \\"Hello world\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n')),(0,a.kt)("p",null,"Now we can run the app. Copy the code of the example app in\nconsumer_example.py and run it by running"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n")),(0,a.kt)("p",null,"You should see the your Kafka messages being logged in batches by your\nconsumer."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/ca2bf8a3.b136f6a8.js b/assets/js/ca2bf8a3.b136f6a8.js new file mode 100644 index 0000000..0bf08b8 --- /dev/null +++ b/assets/js/ca2bf8a3.b136f6a8.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6704],{3769:a=>{a.exports=JSON.parse('{"name":"docusaurus-plugin-content-docs","id":"default"}')}}]); \ No newline at end of file diff --git a/assets/js/ca36df4d.bdcd738d.js b/assets/js/ca36df4d.bdcd738d.js new file mode 100644 index 0000000..8470d36 --- /dev/null +++ b/assets/js/ca36df4d.bdcd738d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[917],{3905:(t,a,e)=>{e.d(a,{Zo:()=>u,kt:()=>f});var i=e(7294);function r(t,a,e){return a in t?Object.defineProperty(t,a,{value:e,enumerable:!0,configurable:!0,writable:!0}):t[a]=e,t}function n(t,a){var e=Object.keys(t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(t);a&&(i=i.filter((function(a){return Object.getOwnPropertyDescriptor(t,a).enumerable}))),e.push.apply(e,i)}return e}function s(t){for(var a=1;a<arguments.length;a++){var e=null!=arguments[a]?arguments[a]:{};a%2?n(Object(e),!0).forEach((function(a){r(t,a,e[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(e)):n(Object(e)).forEach((function(a){Object.defineProperty(t,a,Object.getOwnPropertyDescriptor(e,a))}))}return t}function p(t,a){if(null==t)return{};var e,i,r=function(t,a){if(null==t)return{};var e,i,r={},n=Object.keys(t);for(i=0;i<n.length;i++)e=n[i],a.indexOf(e)>=0||(r[e]=t[e]);return r}(t,a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);for(i=0;i<n.length;i++)e=n[i],a.indexOf(e)>=0||Object.prototype.propertyIsEnumerable.call(t,e)&&(r[e]=t[e])}return r}var l=i.createContext({}),k=function(t){var a=i.useContext(l),e=a;return t&&(e="function"==typeof t?t(a):s(s({},a),t)),e},u=function(t){var a=k(t.components);return i.createElement(l.Provider,{value:a},t.children)},m="mdxType",o={inlineCode:"code",wrapper:function(t){var a=t.children;return i.createElement(i.Fragment,{},a)}},h=i.forwardRef((function(t,a){var e=t.components,r=t.mdxType,n=t.originalType,l=t.parentName,u=p(t,["components","mdxType","originalType","parentName"]),m=k(e),h=r,f=m["".concat(l,".").concat(h)]||m[h]||o[h]||n;return e?i.createElement(f,s(s({ref:a},u),{},{components:e})):i.createElement(f,s({ref:a},u))}));function f(t,a){var e=arguments,r=a&&a.mdxType;if("string"==typeof t||r){var n=e.length,s=new Array(n);s[0]=h;var p={};for(var l in a)hasOwnProperty.call(a,l)&&(p[l]=a[l]);p.originalType=t,p[m]="string"==typeof t?t:r,s[1]=p;for(var k=2;k<n;k++)s[k]=e[k];return i.createElement.apply(null,s)}return i.createElement.apply(null,e)}h.displayName="MDXCreateElement"},2197:(t,a,e)=>{e.r(a),e.d(a,{assets:()=>l,contentTitle:()=>s,default:()=>o,frontMatter:()=>n,metadata:()=>p,toc:()=>k});var i=e(7462),r=(e(7294),e(3905));const n={},s="Release notes",p={unversionedId:"CHANGELOG",id:"CHANGELOG",title:"Release notes",description:"0.8.0",source:"@site/docs/CHANGELOG.md",sourceDirName:".",slug:"/CHANGELOG",permalink:"/docs/next/CHANGELOG",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Contributing to FastKafka",permalink:"/docs/next/CONTRIBUTING"}},l={},k=[{value:"0.8.0",id:"080",level:2},{value:"New Features",id:"new-features",level:3},{value:"Bugs Squashed",id:"bugs-squashed",level:3},{value:"0.7.1",id:"071",level:2},{value:"Bugs Squashed",id:"bugs-squashed-1",level:3},{value:"0.7.0",id:"070",level:2},{value:"New Features",id:"new-features-1",level:3},{value:"Bugs Squashed",id:"bugs-squashed-2",level:3},{value:"0.6.0",id:"060",level:2},{value:"New Features",id:"new-features-2",level:3},{value:"Bugs Squashed",id:"bugs-squashed-3",level:3},{value:"0.5.0",id:"050",level:2},{value:"New Features",id:"new-features-3",level:3},{value:"Bugs Squashed",id:"bugs-squashed-4",level:3},{value:"0.4.0",id:"040",level:2},{value:"New Features",id:"new-features-4",level:3},{value:"0.3.1",id:"031",level:2},{value:"0.3.0",id:"030",level:2},{value:"New Features",id:"new-features-5",level:3},{value:"Bugs Squashed",id:"bugs-squashed-5",level:3},{value:"0.2.3",id:"023",level:2},{value:"0.2.2",id:"022",level:2},{value:"New Features",id:"new-features-6",level:3},{value:"Bugs Squashed",id:"bugs-squashed-6",level:3},{value:"0.2.0",id:"020",level:2},{value:"New Features",id:"new-features-7",level:3},{value:"Bugs Squashed",id:"bugs-squashed-7",level:3},{value:"0.1.3",id:"013",level:2},{value:"0.1.2",id:"012",level:2},{value:"New Features",id:"new-features-8",level:3},{value:"Bugs Squashed",id:"bugs-squashed-8",level:3},{value:"0.1.1",id:"011",level:2},{value:"Bugs Squashed",id:"bugs-squashed-9",level:3},{value:"0.1.0",id:"010",level:2}],u={toc:k},m="wrapper";function o(t){let{components:a,...e}=t;return(0,r.kt)(m,(0,i.Z)({},u,e,{components:a,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"release-notes"},"Release notes"),(0,r.kt)("h2",{id:"080"},"0.8.0"),(0,r.kt)("h3",{id:"new-features"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Add support for Pydantic v2 (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/408"},"#408"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/kumaranvpl"},"@kumaranvpl"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"FastKafka now uses Pydantic v2 for serialization/deserialization of messages"))),(0,r.kt)("li",{parentName:"ul"},"Enable nbdev_test on windows and run CI tests on windows (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/356"},"#356"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("h3",{id:"bugs-squashed"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix \xb4fastkafka testing install deps\xb4 failing (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/385"},"#385"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Create asyncapi docs directory only while building asyncapi docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/368"},"#368"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add retries to producer in case of raised KafkaTimeoutError exception (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/423"},"#423"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("h2",{id:"071"},"0.7.1"),(0,r.kt)("h3",{id:"bugs-squashed-1"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Limit pydantic version to <2.0 (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/427"},"#427"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix Kafka broker version installation issues (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/427"},"#427"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix ApacheKafkaBroker startup issues (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/427"},"#427"),")"))),(0,r.kt)("h2",{id:"070"},"0.7.0"),(0,r.kt)("h3",{id:"new-features-1"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Optional description argument to consumes and produces decorator implemented (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/338"},"#338"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Consumes and produces decorators now have optional ",(0,r.kt)("inlineCode",{parentName:"li"},"description")," argument that is used instead of function docstring in async doc generation when specified"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"FastKafka Windows OS support enabled (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/326"},"#326"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"FastKafka can now run on Windows"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"FastKafka and FastAPI integration implemented (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/304"},"#304"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"FastKafka can now be run alongside FastAPI"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Batch consuming option to consumers implemented (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/298"},"#298"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Consumers can consume events in batches by specifying msg type of consuming function as ",(0,r.kt)("inlineCode",{parentName:"li"},"List[YourMsgType]")," "))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Removed support for synchronous produce functions (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/295"},"#295"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Added default broker values and update docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/292"},"#292"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("h3",{id:"bugs-squashed-2"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix index.ipynb to be runnable in colab (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/342"},"#342"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Use cli option root_path docs generate and serve CLI commands (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/341"},"#341"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix incorrect asyncapi docs path on fastkafka docs serve command (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/335"},"#335"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Serve docs now takes app ",(0,r.kt)("inlineCode",{parentName:"li"},"root_path")," argument into consideration when specified in app"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix typo (supress_timestamps->suppress_timestamps) and remove fix for enabling timestamps (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/315"},"#315"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix logs printing timestamps (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/308"},"#308"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix topics with dots causing failure of tester instantiation (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/306"},"#306"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},'Specified topics can now have "." in their names')))),(0,r.kt)("h2",{id:"060"},"0.6.0"),(0,r.kt)("h3",{id:"new-features-2"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Timestamps added to CLI commands (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/283"},"#283"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/davorrunje"},"@davorrunje"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Added option to process messages concurrently (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/278"},"#278"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"A new ",(0,r.kt)("inlineCode",{parentName:"li"},"executor")," option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add consumes and produces functions to app (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/274"},"#274"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add batching for producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/273"},"#273"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirement(batch): batch support is a real need! and i see it on the issue list.... so hope we do not need to wait too long"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("a",{parentName:"p",href:"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"},"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix broken links in guides (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/272"},"#272"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Generate the docusaurus sidebar dynamically by parsing summary.md (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/270"},"#270"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Metadata passed to consumer (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/269"},"#269"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirement(key): read the key value somehow..Maybe I missed something in the docs\nrequirement(header): read header values, Reason: I use CDC | Debezium and in the current system the header values are important to differentiate between the CRUD operations."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("a",{parentName:"p",href:"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"},"https://discord.com/channels/1085457301214855171/1090956337938182266/1098592795557630063"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Contribution with instructions how to build and test added (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/255"},"#255"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Export encoders, decoders from fastkafka.encoder (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/246"},"#246"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/239"},"#239"),")")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"UI Improvement: Post screenshots with links to the actual messages in testimonials section (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/228"},"#228"),")")),(0,r.kt)("h3",{id:"bugs-squashed-3"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Batch testing fix (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/280"},"#280"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Tester breaks when using Batching or KafkaEvent producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/279"},"#279"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Consumer loop callbacks are not executing in parallel (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/276"},"#276"),")"))),(0,r.kt)("h2",{id:"050"},"0.5.0"),(0,r.kt)("h3",{id:"new-features-3"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Significant speedup of Kafka producer (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/236"},"#236"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Added support for AVRO encoding/decoding (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/pull/231"},"#231"),"), thanks to ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("h3",{id:"bugs-squashed-4"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fixed sidebar to include guides in docusaurus documentation (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/238"},"#238"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fixed link to symbols in docusaurus docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/227"},"#227"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Removed bootstrap servers from constructor (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/220"},"#220"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl")))),(0,r.kt)("h2",{id:"040"},"0.4.0"),(0,r.kt)("h3",{id:"new-features-4"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Integrate FastKafka chat (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/208"},"#208"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add benchmarking (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/206"},"#206"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Enable fast testing without running kafka locally (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/198"},"#198"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Generate docs using Docusaurus (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/194"},"#194"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/harishmohanraj"},"@harishmohanraj"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add test cases for LocalRedpandaBroker (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/189"},"#189"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Reimplement patch and delegates from fastcore (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/188"},"#188"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Rename existing functions into start and stop and add lifespan handler (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/117"},"#117"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios"},"https://www.linkedin.com/posts/tiangolo_fastapi-activity-7038907638331404288-Oar3/?utm_source=share&utm_medium=member_ios"))))),(0,r.kt)("h2",{id:"031"},"0.3.1"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"README.md file updated")),(0,r.kt)("h2",{id:"030"},"0.3.0"),(0,r.kt)("h3",{id:"new-features-5"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Guide for FastKafka produces using partition key (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/172"},"#172"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Closes #161"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add support for Redpanda for testing and deployment (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/181"},"#181"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kumaranvpl"},"@kumaranvpl"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Remove bootstrap_servers from ",(0,r.kt)("strong",{parentName:"p"},"init")," and use the name of broker as an option when running/testing (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/134"},"#134"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add a GH action file to check for broken links in the docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/163"},"#163"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Optimize requirements for testing and docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/151"},"#151"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Break requirements into base and optional for testing and dev (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/124"},"#124"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Minimize base requirements needed just for running the service."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add link to example git repo into guide for building docs using actions (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/81"},"#81"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Add logging for run_in_background (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/46"},"#46"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Implement partition Key mechanism for producers (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/16"},"#16"),")"))),(0,r.kt)("h3",{id:"bugs-squashed-5"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Implement checks for npm installation and version (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/176"},"#176"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Sternakt"},"@Sternakt")),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Closes #158 by checking if the npx is installed and more verbose error handling"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix the helper.py link in CHANGELOG.md (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/165"},"#165"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"fastkafka docs install_deps fails (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/157"},"#157"),")"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Unexpected internal error: ","[Errno 2]"," No such file or directory: 'npx'"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Broken links in docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/141"},"#141"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"fastkafka run is not showing up in CLI docs (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/132"},"#132"),")"))),(0,r.kt)("h2",{id:"023"},"0.2.3"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Fixed broken links on PyPi index page")),(0,r.kt)("h2",{id:"022"},"0.2.2"),(0,r.kt)("h3",{id:"new-features-6"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Extract JDK and Kafka installation out of LocalKafkaBroker (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/131"},"#131"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"PyYAML version relaxed (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/pull/119"},"#119"),"), thanks to ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/davorrunje"},"@davorrunje"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Replace docker based kafka with local (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/68"},"#68"),")"),(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","replace docker compose with a simple docker run (standard run_jupyter.sh should do)"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","replace all tests to use LocalKafkaBroker"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","update documentation")))),(0,r.kt)("h3",{id:"bugs-squashed-6"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix broken link for FastKafka docs in index notebook (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/145"},"#145"),")")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Fix encoding issues when loading setup.py on windows OS (",(0,r.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/issues/135"},"#135"),")"))),(0,r.kt)("h2",{id:"020"},"0.2.0"),(0,r.kt)("h3",{id:"new-features-7"},"New Features"),(0,r.kt)("ul",{className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul"},"Replace kafka container with LocalKafkaBroker (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/112"},"#112"),")",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Replace kafka container with LocalKafkaBroker in tests"))))),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Remove kafka container from tests environment"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Fix failing tests")),(0,r.kt)("h3",{id:"bugs-squashed-7"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Fix random failing in CI (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/109"},"#109"),")")),(0,r.kt)("h2",{id:"013"},"0.1.3"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"version update in ",(0,r.kt)("strong",{parentName:"li"},"init"),".py")),(0,r.kt)("h2",{id:"012"},"0.1.2"),(0,r.kt)("h3",{id:"new-features-8"},"New Features"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Git workflow action for publishing Kafka docs (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/78"},"#78"),")")),(0,r.kt)("h3",{id:"bugs-squashed-8"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Include missing requirement (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/110"},"#110"),")",(0,r.kt)("ul",{parentName:"li",className:"contains-task-list"},(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Typer is imported in this ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/helpers.py"},"file")," but it is not included in ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/settings.ini"},"settings.ini")),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add aiohttp which is imported in this ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_helpers.py"},"file")),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add nbformat which is imported in _components/helpers.py"),(0,r.kt)("li",{parentName:"ul",className:"task-list-item"},(0,r.kt)("input",{parentName:"li",type:"checkbox",checked:!0,disabled:!0})," ","Add nbconvert which is imported in _components/helpers.py")))),(0,r.kt)("h2",{id:"011"},"0.1.1"),(0,r.kt)("h3",{id:"bugs-squashed-9"},"Bugs Squashed"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"JDK install fails on Python 3.8 (",(0,r.kt)("a",{parentName:"li",href:"https://github.com/airtai/fastkafka/issues/106"},"#106"),")")),(0,r.kt)("h2",{id:"010"},"0.1.0"),(0,r.kt)("p",null,"Initial release"))}o.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/cac45e38.ffd6c350.js b/assets/js/cac45e38.ffd6c350.js new file mode 100644 index 0000000..f2af9bc --- /dev/null +++ b/assets/js/cac45e38.ffd6c350.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3111],{3905:(e,t,r)=>{r.d(t,{Zo:()=>c,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function s(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?o(Object(r),!0).forEach((function(t){a(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):o(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function i(e,t){if(null==e)return{};var r,n,a=function(e,t){if(null==e)return{};var r,n,a={},o=Object.keys(e);for(n=0;n<o.length;n++)r=o[n],t.indexOf(r)>=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)r=o[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var l=n.createContext({}),p=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},c=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,l=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),f=p(r),k=a,m=f["".concat(l,".").concat(k)]||f[k]||u[k]||o;return r?n.createElement(m,s(s({ref:t},c),{},{components:r})):n.createElement(m,s({ref:t},c))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,s=new Array(o);s[0]=k;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[f]="string"==typeof e?e:a,s[1]=i;for(var p=2;p<o;p++)s[p]=r[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,r)}k.displayName="MDXCreateElement"},3570:(e,t,r)=>{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>u,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var n=r(7462),a=(r(7294),r(3905));const o={},s="run_fastkafka_server_process",i={unversionedId:"cli/run_fastkafka_server_process",id:"version-0.5.0/cli/run_fastkafka_server_process",title:"run_fastkafka_server_process",description:"Usage:",source:"@site/versioned_docs/version-0.5.0/cli/run_fastkafka_server_process.md",sourceDirName:"cli",slug:"/cli/run_fastkafka_server_process",permalink:"/docs/0.5.0/cli/run_fastkafka_server_process",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"fastkafka",permalink:"/docs/0.5.0/cli/fastkafka"},next:{title:"Release notes",permalink:"/docs/0.5.0/CHANGELOG"}},l={},p=[],c={toc:p},f="wrapper";function u(e){let{components:t,...r}=e;return(0,a.kt)(f,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"run_fastkafka_server_process"},(0,a.kt)("inlineCode",{parentName:"h1"},"run_fastkafka_server_process")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Usage"),":"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-console"},"$ run_fastkafka_server_process [OPTIONS] APP\n")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,a.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,a.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,a.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Options"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--kafka-broker TEXT"),": kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. ","[required]"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--install-completion"),": Install completion for the current shell."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--show-completion"),": Show completion for the current shell, to copy it or customize the installation."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/cd19d898.67faad21.js b/assets/js/cd19d898.67faad21.js new file mode 100644 index 0000000..7e5d6c1 --- /dev/null +++ b/assets/js/cd19d898.67faad21.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8565],{3905:(e,t,a)=>{a.d(t,{Zo:()=>k,kt:()=>d});var n=a(7294);function l(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){l(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function o(e,t){if(null==e)return{};var a,n,l=function(e,t){if(null==e)return{};var a,n,l={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(l[a]=e[a]);return l}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(l[a]=e[a])}return l}var i=n.createContext({}),p=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},k=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,k=o(e,["components","mdxType","originalType","parentName"]),f=p(a),m=l,d=f["".concat(i,".").concat(m)]||f[m]||u[m]||r;return a?n.createElement(d,s(s({ref:t},k),{},{components:a})):n.createElement(d,s({ref:t},k))}));function d(e,t){var a=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var r=a.length,s=new Array(r);s[0]=m;var o={};for(var i in t)hasOwnProperty.call(t,i)&&(o[i]=t[i]);o.originalType=e,o[f]="string"==typeof e?e:l,s[1]=o;for(var p=2;p<r;p++)s[p]=a[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,a)}m.displayName="MDXCreateElement"},2386:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var n=a(7462),l=(a(7294),a(3905));const r={},s="fastkafka",o={unversionedId:"cli/fastkafka",id:"version-0.5.0/cli/fastkafka",title:"fastkafka",description:"Usage:",source:"@site/versioned_docs/version-0.5.0/cli/fastkafka.md",sourceDirName:"cli",slug:"/cli/fastkafka",permalink:"/docs/0.5.0/cli/fastkafka",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Tester",permalink:"/docs/0.5.0/api/fastkafka/testing/Tester"},next:{title:"run_fastkafka_server_process",permalink:"/docs/0.5.0/cli/run_fastkafka_server_process"}},i={},p=[{value:"<code>fastkafka docs</code>",id:"fastkafka-docs",level:2},{value:"<code>fastkafka docs generate</code>",id:"fastkafka-docs-generate",level:3},{value:"<code>fastkafka docs install_deps</code>",id:"fastkafka-docs-install_deps",level:3},{value:"<code>fastkafka docs serve</code>",id:"fastkafka-docs-serve",level:3},{value:"<code>fastkafka run</code>",id:"fastkafka-run",level:2},{value:"<code>fastkafka testing</code>",id:"fastkafka-testing",level:2},{value:"<code>fastkafka testing install_deps</code>",id:"fastkafka-testing-install_deps",level:3}],k={toc:p},f="wrapper";function u(e){let{components:t,...a}=e;return(0,l.kt)(f,(0,n.Z)({},k,a,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h1",{id:"fastkafka"},(0,l.kt)("inlineCode",{parentName:"h1"},"fastkafka")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--install-completion"),": Install completion for the current shell."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--show-completion"),": Show completion for the current shell, to copy it or customize the installation."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"docs"),": Commands for managing fastkafka app..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"run"),": Runs Fast Kafka API application"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"testing"),": Commands for managing fastkafka testing")),(0,l.kt)("h2",{id:"fastkafka-docs"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka docs")),(0,l.kt)("p",null,"Commands for managing fastkafka app documentation"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"generate"),": Generates documentation for a FastKafka..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"install_deps"),": Installs dependencies for FastKafka..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"serve"),": Generates and serves documentation for a...")),(0,l.kt)("h3",{id:"fastkafka-docs-generate"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs generate")),(0,l.kt)("p",null,"Generates documentation for a FastKafka application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs generate [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--root-path TEXT"),": root path under which documentation will be created ","[default: .]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h3",{id:"fastkafka-docs-install_deps"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs install_deps")),(0,l.kt)("p",null,"Installs dependencies for FastKafka documentation generation"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs install_deps [OPTIONS]\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h3",{id:"fastkafka-docs-serve"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs serve")),(0,l.kt)("p",null,"Generates and serves documentation for a FastKafka application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs serve [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--root-path TEXT"),": root path under which documentation will be created ","[default: .]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--bind TEXT"),": Some info ","[default: 127.0.0.1]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--port INTEGER"),": Some info ","[default: 8000]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h2",{id:"fastkafka-run"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka run")),(0,l.kt)("p",null,"Runs Fast Kafka API application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka run [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--num-workers INTEGER"),": Number of FastKafka instances to run, defaults to number of CPU cores. ","[default: 8]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--kafka-broker TEXT"),": kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. ","[required]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h2",{id:"fastkafka-testing"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka testing")),(0,l.kt)("p",null,"Commands for managing fastkafka testing"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka testing [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"install_deps"),": Installs dependencies for FastKafka app...")),(0,l.kt)("h3",{id:"fastkafka-testing-install_deps"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka testing install_deps")),(0,l.kt)("p",null,"Installs dependencies for FastKafka app testing"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka testing install_deps [OPTIONS]\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/cd59f9ef.54f4605b.js b/assets/js/cd59f9ef.54f4605b.js new file mode 100644 index 0000000..6c51e42 --- /dev/null +++ b/assets/js/cd59f9ef.54f4605b.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1753],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>k});var a=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){i(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function r(e,t){if(null==e)return{};var n,a,i=function(e,t){if(null==e)return{};var n,a,i={},o=Object.keys(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var s=a.createContext({}),d=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=d(e.components);return a.createElement(s.Provider,{value:t},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,o=e.originalType,s=e.parentName,p=r(e,["components","mdxType","originalType","parentName"]),m=d(n),u=i,k=m["".concat(s,".").concat(u)]||m[u]||c[u]||o;return n?a.createElement(k,l(l({ref:t},p),{},{components:n})):a.createElement(k,l({ref:t},p))}));function k(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=n.length,l=new Array(o);l[0]=u;var r={};for(var s in t)hasOwnProperty.call(t,s)&&(r[s]=t[s]);r.originalType=e,r[m]="string"==typeof e?e:i,l[1]=r;for(var d=2;d<o;d++)l[d]=n[d];return a.createElement.apply(null,l)}return a.createElement.apply(null,n)}u.displayName="MDXCreateElement"},7411:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>c,frontMatter:()=>o,metadata:()=>r,toc:()=>d});var a=n(7462),i=(n(7294),n(3905));const o={},l=void 0,r={unversionedId:"api/fastkafka/testing/Tester",id:"version-0.6.0/api/fastkafka/testing/Tester",title:"Tester",description:"fastkafka.testing.Tester {fastkafka.testing.Tester}",source:"@site/versioned_docs/version-0.6.0/api/fastkafka/testing/Tester.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/Tester",permalink:"/docs/0.6.0/api/fastkafka/testing/Tester",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"LocalRedpandaBroker",permalink:"/docs/0.6.0/api/fastkafka/testing/LocalRedpandaBroker"},next:{title:"fastkafka",permalink:"/docs/0.6.0/cli/fastkafka"}},s={},d=[{value:"<code>fastkafka.testing.Tester</code>",id:"fastkafka.testing.Tester",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>benchmark</code>",id:"benchmark",level:3},{value:"<code>consumes</code>",id:"consumes",level:3},{value:"<code>create_mocks</code>",id:"create_mocks",level:3},{value:"<code>produces</code>",id:"produces",level:3},{value:"<code>run_in_background</code>",id:"run_in_background",level:3},{value:"<code>using_local_kafka</code>",id:"using_local_kafka",level:3},{value:"<code>using_local_redpanda</code>",id:"using_local_redpanda",level:3}],p={toc:d},m="wrapper";function c(e){let{components:t,...n}=e;return(0,i.kt)(m,(0,a.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"h2"},"fastkafka.testing.Tester")),(0,i.kt)("h3",{id:"init"},(0,i.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def __init__(self, app: Union[fastkafka.FastKafka, List[fastkafka.FastKafka]], broker: Optional[fastkafka.testing.ApacheKafkaBroker, fastkafka.testing.LocalRedpandaBroker, fastkafka._testing.in_memory_broker.InMemoryBroker] = None, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None")),(0,i.kt)("p",null,"Mirror-like object for testing a FastKafka application"),(0,i.kt)("p",null,"Can be used as context manager"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"zookeeper_port"),": Port for clients (Kafka brokes) to connect"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect")),(0,i.kt)("h3",{id:"benchmark"},(0,i.kt)("inlineCode",{parentName:"h3"},"benchmark")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]")),(0,i.kt)("p",null,"Decorator to benchmark produces/consumes functions"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"interval"),": Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sliding_window_size"),": The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated")),(0,i.kt)("h3",{id:"consumes"},(0,i.kt)("inlineCode",{parentName:"h3"},"consumes")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, prefix: str = 'on_', loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[pydantic.main.BaseModel], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel], NoneType], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], NoneType]]], typing.Union[typing.Callable[[pydantic.main.BaseModel], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel], NoneType], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], NoneType]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when a message is received in a topic."),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"decoder"),": Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"executor"),': Type of executor to choose for consuming tasks. Avaliable options\nare "SequentialExecutor" and "DynamicTaskExecutor". The default option is\n"SequentialExecutor" which will execute the consuming tasks sequentially.\nIf the consuming tasks have high latency it is recommended to use\n"DynamicTaskExecutor" which will wrap the consuming functions into tasks\nand run them in on asyncio loop in background. This comes with a cost of\nincreased overhead so use it only in cases when your consume functions have\nhigh latency such as database queries or some other type of networking.'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: "on_". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"*topics"),": optional list of topics to subscribe to. If not set,\ncall :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," or :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".assign")," before consuming records.\nPassing topics directly is same as calling :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," API."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string (or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings) that the consumer should contact to bootstrap\ninitial cluster metadata.")),(0,i.kt)("p",null,"This does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ",(0,i.kt)("inlineCode",{parentName:"p"},"localhost:9092"),"."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~.consumer.group_coordinator.GroupCoordinator"),"\nfor logging with respect to consumer group administration. Default:\n",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-{version}")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"group_id"),": name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_deserializer"),": Any callable that takes a\nraw message key and returns a deserialized key."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_deserializer"),": Any callable that takes a\nraw message value and returns a deserialized value."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_min_bytes"),": Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n",(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_bytes"),": The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms"),": The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_partition_fetch_bytes"),": The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ",(0,i.kt)("inlineCode",{parentName:"li"},"= #partitions * max_partition_fetch_bytes"),".\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_records"),": The maximum number of records returned in a\nsingle call to :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),". Defaults ",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", no limit."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Client request timeout in milliseconds.\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_offset_reset"),": A policy for resetting offsets on\n:exc:",(0,i.kt)("inlineCode",{parentName:"li"},".OffsetOutOfRangeError")," errors: ",(0,i.kt)("inlineCode",{parentName:"li"},"earliest")," will move to the oldest\navailable message, ",(0,i.kt)("inlineCode",{parentName:"li"},"latest")," will move to the most recent, and\n",(0,i.kt)("inlineCode",{parentName:"li"},"none")," will raise an exception so you can handle this case.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"latest"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_auto_commit"),": If true the consumer's offset will be\nperiodically committed in the background. Default: True."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_commit_interval_ms"),": milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"check_crcs"),": Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partition_assignment_strategy"),": List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: ","[:class:",(0,i.kt)("inlineCode",{parentName:"li"},".RoundRobinPartitionAssignor"),"]"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_interval_ms"),": Maximum allowed time between calls to\nconsume messages (e.g., :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),"). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See ",(0,i.kt)("inlineCode",{parentName:"li"},"KIP-62"),"_ for more\ninformation. Default 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"rebalance_timeout_ms"),": The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to ",(0,i.kt)("inlineCode",{parentName:"li"},"max.poll.interval.ms")," configuration,\nbut as ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka")," will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},".ConsumerRebalanceListener")," to delay rebalacing. Defaults\nto ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),": Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(",(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe ",(0,i.kt)("strong",{parentName:"li"},"broker")," configuration properties\n",(0,i.kt)("inlineCode",{parentName:"li"},"group.min.session.timeout.ms")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"group.max.session.timeout.ms"),".\nDefault: 10000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat_interval_ms"),": The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),", but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"consumer_timeout_ms"),": maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"AIOKafkaConsumer")," supports Kafka API versions >=0.9 only.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more information see\n:ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),". Default: None."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"exclude_internal_topics"),": Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying ",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"isolation_level"),": Controls how to read messages written\ntransactionally.")),(0,i.kt)("p",null,"If set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed"),", :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\ntransactional messages which have been committed.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")," (the default), :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will\nreturn all messages, even transactional messages which have been\naborted."),(0,i.kt)("p",null,"Non-transactional messages will be returned unconditionally in\neither mode."),(0,i.kt)("p",null,"Messages will always be returned in offset order. Hence, in\n",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," mode, :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," the seek_to_end method will\nreturn the LSO. See method docs below. Default: ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values are:\n",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider"),": OAuthBearer token provider instance. (See :mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: None")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("h3",{id:"create_mocks"},(0,i.kt)("inlineCode",{parentName:"h3"},"create_mocks")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def create_mocks(self: fastkafka.FastKafka) -> None")),(0,i.kt)("p",null,"Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock"),(0,i.kt)("h3",{id:"produces"},(0,i.kt)("inlineCode",{parentName:"h3"},"produces")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fa3e2864f70>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fa3e1879090>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when delivery report for a produced message is received"),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"encoder"),": Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: "to_". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list. It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ",(0,i.kt)("inlineCode",{parentName:"li"},"localhost:9092"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-producer-#")," (appended with a unique number\nper instance)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_serializer"),": used to convert user-supplied keys to bytes\nIf not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as ",(0,i.kt)("inlineCode",{parentName:"li"},"f(key),")," should return\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_serializer"),": used to convert user-supplied message\nvalues to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),". If not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as\n",(0,i.kt)("inlineCode",{parentName:"li"},"f(value)"),", should return :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"acks"),": one of ",(0,i.kt)("inlineCode",{parentName:"li"},"0"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"1"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"0"),": Producer will not wait for any acknowledgment from the server\nat all. The message will immediately be added to the socket\nbuffer and considered sent. No guarantee can be made that the\nserver has received the record in this case, and the retries\nconfiguration will not take effect (as the client won't\ngenerally know of any failures). The offset given back for each\nrecord will always be set to -1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"1"),": The broker leader will write the record to its local log but\nwill respond without awaiting full acknowledgement from all\nfollowers. In this case should the leader fail immediately\nafter acknowledging the record but before the followers have\nreplicated it then the record will be lost."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"all"),": The broker leader will wait for the full set of in-sync\nreplicas to acknowledge the record. This guarantees that the\nrecord will not be lost as long as at least one in-sync replica\nremains alive. This is the strongest available guarantee.")),(0,i.kt)("p",null,"If unset, defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=1"),". If ",(0,i.kt)("inlineCode",{parentName:"p"},"enable_idempotence")," is\n:data:",(0,i.kt)("inlineCode",{parentName:"p"},"True")," defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=all")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"compression_type"),": The compression type for all data generated by\nthe producer. Valid values are ",(0,i.kt)("inlineCode",{parentName:"li"},"gzip"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"snappy"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"lz4"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"zstd"),"\nor :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),".\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_batch_size"),": Maximum size of buffered data per partition.\nAfter this amount :meth:",(0,i.kt)("inlineCode",{parentName:"li"},"send")," coroutine will block until batch is\ndrained.\nDefault: 16384"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),": The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),", producer will wait ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms - process_time"),".\nDefault: 0 (i.e. no delay)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partitioner"),": Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n",(0,i.kt)("inlineCode",{parentName:"li"},"partitioner(key_bytes, all_partitions, available_partitions)"),".\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_request_size"),": The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"~kafka.protocol.produce.ProduceRequest")," (it's a blocking\ncall), maximum waiting time can be up to ",(0,i.kt)("inlineCode",{parentName:"li"},"2 *\nrequest_timeout_ms"),".\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more\ninformation see :ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_idempotence"),": When set to :data:",(0,i.kt)("inlineCode",{parentName:"li"},"True"),", the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:",(0,i.kt)("inlineCode",{parentName:"li"},"False"),", producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:",(0,i.kt)("inlineCode",{parentName:"li"},"ValueError")," will be thrown.\nNew in version 0.5.0."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values\nare: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider ("),": class:",(0,i.kt)("inlineCode",{parentName:"li"},"~aiokafka.abc.AbstractTokenProvider"),"):\nOAuthBearer token provider instance. (See\n:mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"))),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ValueError"),": when needed")),(0,i.kt)("h3",{id:"run_in_background"},(0,i.kt)("inlineCode",{parentName:"h3"},"run_in_background")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]")),(0,i.kt)("p",null,"Decorator to schedule a task to be run in the background."),(0,i.kt)("p",null,"This decorator is used to schedule a task to be run in the background when the app's ",(0,i.kt)("inlineCode",{parentName:"p"},"_on_startup")," event is triggered."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"A decorator function that takes a background task as an input and stores it to be run in the backround.")),(0,i.kt)("h3",{id:"using_local_kafka"},(0,i.kt)("inlineCode",{parentName:"h3"},"using_local_kafka")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def using_local_kafka(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> Tester")),(0,i.kt)("p",null,"Starts local Kafka broker used by the Tester instance"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"zookeeper_port"),": Port for clients (Kafka brokes) to connect"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"An instance of tester with Kafka as broker")),(0,i.kt)("h3",{id:"using_local_redpanda"},(0,i.kt)("inlineCode",{parentName:"h3"},"using_local_redpanda")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def using_local_redpanda(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug') -> Tester")),(0,i.kt)("p",null,"Starts local Redpanda broker used by the Tester instance"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"tag"),": Tag of Redpanda image to use to start container"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"seastar_core"),": Core(s) to use byt Seastar (the framework Redpanda uses under the hood)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"memory"),": The amount of memory to make available to Redpanda"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"mode"),": Mode to use to load configuration properties in container"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"default_log_level"),": Log levels to use for Redpanda")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"An instance of tester with Redpanda as broker")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d0381ee6.20083f4a.js b/assets/js/d0381ee6.20083f4a.js new file mode 100644 index 0000000..ceaff43 --- /dev/null +++ b/assets/js/d0381ee6.20083f4a.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8120],{3905:(e,a,o)=>{o.d(a,{Zo:()=>c,kt:()=>f});var s=o(7294);function t(e,a,o){return a in e?Object.defineProperty(e,a,{value:o,enumerable:!0,configurable:!0,writable:!0}):e[a]=o,e}function n(e,a){var o=Object.keys(e);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);a&&(s=s.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),o.push.apply(o,s)}return o}function r(e){for(var a=1;a<arguments.length;a++){var o=null!=arguments[a]?arguments[a]:{};a%2?n(Object(o),!0).forEach((function(a){t(e,a,o[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(o)):n(Object(o)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(o,a))}))}return e}function i(e,a){if(null==e)return{};var o,s,t=function(e,a){if(null==e)return{};var o,s,t={},n=Object.keys(e);for(s=0;s<n.length;s++)o=n[s],a.indexOf(o)>=0||(t[o]=e[o]);return t}(e,a);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);for(s=0;s<n.length;s++)o=n[s],a.indexOf(o)>=0||Object.prototype.propertyIsEnumerable.call(e,o)&&(t[o]=e[o])}return t}var p=s.createContext({}),k=function(e){var a=s.useContext(p),o=a;return e&&(o="function"==typeof e?e(a):r(r({},a),e)),o},c=function(e){var a=k(e.components);return s.createElement(p.Provider,{value:a},e.children)},_="mdxType",l={inlineCode:"code",wrapper:function(e){var a=e.children;return s.createElement(s.Fragment,{},a)}},m=s.forwardRef((function(e,a){var o=e.components,t=e.mdxType,n=e.originalType,p=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),_=k(o),m=t,f=_["".concat(p,".").concat(m)]||_[m]||l[m]||n;return o?s.createElement(f,r(r({ref:a},c),{},{components:o})):s.createElement(f,r({ref:a},c))}));function f(e,a){var o=arguments,t=a&&a.mdxType;if("string"==typeof e||t){var n=o.length,r=new Array(n);r[0]=m;var i={};for(var p in a)hasOwnProperty.call(a,p)&&(i[p]=a[p]);i.originalType=e,i[_]="string"==typeof e?e:t,r[1]=i;for(var k=2;k<n;k++)r[k]=o[k];return s.createElement.apply(null,r)}return s.createElement.apply(null,o)}m.displayName="MDXCreateElement"},3347:(e,a,o)=>{o.r(a),o.d(a,{assets:()=>p,contentTitle:()=>r,default:()=>l,frontMatter:()=>n,metadata:()=>i,toc:()=>k});var s=o(7462),t=(o(7294),o(3905));const n={},r="Using multiple Kafka clusters",i={unversionedId:"guides/Guide_24_Using_Multiple_Kafka_Clusters",id:"version-0.8.0/guides/Guide_24_Using_Multiple_Kafka_Clusters",title:"Using multiple Kafka clusters",description:"Ready to take your FastKafka app to the next level? This guide shows you",source:"@site/versioned_docs/version-0.8.0/guides/Guide_24_Using_Multiple_Kafka_Clusters.md",sourceDirName:"guides",slug:"/guides/Guide_24_Using_Multiple_Kafka_Clusters",permalink:"/docs/guides/Guide_24_Using_Multiple_Kafka_Clusters",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Encoding and Decoding Kafka Messages with FastKafka",permalink:"/docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"},next:{title:"Using Redpanda to test FastKafka",permalink:"/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka"}},p={},k=[{value:"Test message",id:"test-message",level:3},{value:"Defining multiple broker configurations",id:"defining-multiple-broker-configurations",level:2},{value:"How it works",id:"how-it-works",level:4},{value:"Testing the application",id:"testing-the-application",level:2},{value:"Running the application",id:"running-the-application",level:2},{value:"Application documentation",id:"application-documentation",level:2},{value:"Examples on how to use multiple broker configurations",id:"examples-on-how-to-use-multiple-broker-configurations",level:2},{value:"Example #1",id:"example-1",level:3},{value:"Testing",id:"testing",level:4},{value:"Example #2",id:"example-2",level:3},{value:"Testing",id:"testing-1",level:4},{value:"Example #3",id:"example-3",level:3},{value:"Testing",id:"testing-2",level:4}],c={toc:k},_="wrapper";function l(e){let{components:a,...o}=e;return(0,t.kt)(_,(0,s.Z)({},c,o,{components:a,mdxType:"MDXLayout"}),(0,t.kt)("h1",{id:"using-multiple-kafka-clusters"},"Using multiple Kafka clusters"),(0,t.kt)("p",null,"Ready to take your FastKafka app to the next level? This guide shows you\nhow to connect to multiple Kafka clusters effortlessly. Consolidate\ntopics and produce messages across clusters like a pro. Unleash the full\npotential of your Kafka-powered app with FastKafka. Let\u2019s dive in and\nelevate your application\u2019s capabilities!"),(0,t.kt)("h3",{id:"test-message"},"Test message"),(0,t.kt)("p",null,"To showcase the functionalities of FastKafka and illustrate the concepts\ndiscussed, we can use a simple test message called ",(0,t.kt)("inlineCode",{parentName:"p"},"TestMsg"),". Here\u2019s the\ndefinition of the ",(0,t.kt)("inlineCode",{parentName:"p"},"TestMsg")," class:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},"class TestMsg(BaseModel):\n msg: str = Field(...)\n")),(0,t.kt)("h2",{id:"defining-multiple-broker-configurations"},"Defining multiple broker configurations"),(0,t.kt)("p",null,"When building a FastKafka application, you may need to consume messages\nfrom multiple Kafka clusters, each with its own set of broker\nconfigurations. FastKafka provides the flexibility to define different\nbroker clusters using the brokers argument in the consumes decorator.\nLet\u2019s explore an example code snippet"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\n\nkafka_brokers_1 = dict(\n development=dict(url="dev.server_1", port=9092),\n production=dict(url="prod.server_1", port=9092),\n)\nkafka_brokers_2 = dict(\n development=dict(url="dev.server_2", port=9092),\n production=dict(url="prod.server_1", port=9092),\n)\n\napp = FastKafka(kafka_brokers=kafka_brokers_1, bootstrap_servers_id="development")\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals_1(msg: TestMsg):\n print(f"Received on s1: {msg=}")\n await to_predictions_1(msg)\n\n\n@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)\nasync def on_preprocessed_signals_2(msg: TestMsg):\n print(f"Received on s2: {msg=}")\n await to_predictions_2(msg)\n\n\n@app.produces(topic="predictions")\nasync def to_predictions_1(msg: TestMsg) -> TestMsg:\n return msg\n\n\n@app.produces(topic="predictions", brokers=kafka_brokers_2)\nasync def to_predictions_2(msg: TestMsg) -> TestMsg:\n return msg\n')),(0,t.kt)("p",null,"In this example, the application has two consumes endpoints, both of\nwhich will consume events from ",(0,t.kt)("inlineCode",{parentName:"p"},"preprocessed_signals")," topic.\n",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_1")," will consume events from ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1"),"\nconfiguration and ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_2")," will consume events from\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," configuration. When producing, ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_1")," will\nproduce to ",(0,t.kt)("inlineCode",{parentName:"p"},"predictions")," topic on ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," cluster and\n",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_2")," will produce to ",(0,t.kt)("inlineCode",{parentName:"p"},"predictions")," topic on\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," cluster."),(0,t.kt)("h4",{id:"how-it-works"},"How it works"),(0,t.kt)("p",null,"The ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," configuration represents the primary cluster,\nwhile ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," serves as an alternative cluster specified in\nthe decorator."),(0,t.kt)("p",null,"Using the FastKafka class, the app object is initialized with the\nprimary broker configuration (",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1"),"). By default, the\n",(0,t.kt)("inlineCode",{parentName:"p"},"@app.consumes")," decorator without the brokers argument consumes messages\nfrom the ",(0,t.kt)("inlineCode",{parentName:"p"},"preprocessed_signals")," topic on ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1"),"."),(0,t.kt)("p",null,"To consume messages from a different cluster, the ",(0,t.kt)("inlineCode",{parentName:"p"},"@app.consumes"),"\ndecorator includes the ",(0,t.kt)("inlineCode",{parentName:"p"},"brokers")," argument. This allows explicit\nspecification of the broker cluster in the ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_2"),"\nfunction, enabling consumption from the same topic but using the\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," configuration."),(0,t.kt)("p",null,"The brokers argument can also be used in the @app.produces decorator to\ndefine multiple broker clusters for message production."),(0,t.kt)("p",null,"It\u2019s important to ensure that all broker configurations have the same\nrequired settings as the primary cluster to ensure consistent behavior."),(0,t.kt)("h2",{id:"testing-the-application"},"Testing the application"),(0,t.kt)("p",null,"To test our FastKafka \u2018mirroring\u2019 application, we can use our testing\nframework. Lets take a look how it\u2019s done:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n # Send TestMsg to topic/broker pair on_preprocessed_signals_1 is consuming from\n await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal_s1"))\n # Assert on_preprocessed_signals_1 consumed sent message\n await app.awaited_mocks.on_preprocessed_signals_1.assert_called_with(\n TestMsg(msg="signal_s1"), timeout=5\n )\n # Assert app has produced a prediction\n await tester.mirrors[app.to_predictions_1].assert_called_with(\n TestMsg(msg="signal_s1"), timeout=5\n )\n\n # Send TestMsg to topic/broker pair on_preprocessed_signals_2 is consuming from\n await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal_s2"))\n # Assert on_preprocessed_signals_2 consumed sent message\n await app.awaited_mocks.on_preprocessed_signals_2.assert_called_with(\n TestMsg(msg="signal_s2"), timeout=5\n )\n # Assert app has produced a prediction\n await tester.mirrors[app.to_predictions_2].assert_called_with(\n TestMsg(msg="signal_s2"), timeout=5\n )\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-06-23 12:15:51.156 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-06-23 12:15:51.157 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-06-23 12:15:51.157 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\n23-06-23 12:15:51.158 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:15:51.158 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\n23-06-23 12:15:51.159 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:15:51.178 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'\n23-06-23 12:15:51.178 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:15:51.179 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'\n23-06-23 12:15:51.180 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:15:51.180 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:15:51.180 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\n23-06-23 12:15:51.181 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:15:51.181 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:15:51.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:15:51.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-06-23 12:15:51.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:15:51.186 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:15:51.187 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\n23-06-23 12:15:51.187 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:15:51.188 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:15:51.188 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:15:51.189 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-06-23 12:15:51.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:15:51.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:15:51.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}\n23-06-23 12:15:51.190 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:15:51.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:15:51.191 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:15:51.191 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-06-23 12:15:51.191 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:15:51.192 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:15:51.192 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}\n23-06-23 12:15:51.193 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:15:51.193 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:15:51.193 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:15:51.194 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-06-23 12:15:51.194 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\nReceived on s1: msg=TestMsg(msg='signal_s1')\nReceived on s2: msg=TestMsg(msg='signal_s2')\n23-06-23 12:15:56.181 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:15:56.181 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:15:56.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:15:56.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:15:56.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:15:56.183 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:15:56.183 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:15:56.183 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:15:56.184 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:15:56.184 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:15:56.185 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:15:56.185 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:15:56.185 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:15:56.186 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:15:56.186 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:15:56.186 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:15:56.188 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"The usage of the ",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors")," dictionary allows specifying the\ndesired topic/broker combination for sending the test messages,\nespecially when working with multiple Kafka clusters. This ensures that\nthe data is sent to the appropriate topic/broker based on the consuming\nfunction, and consumed from appropriate topic/broker based on the\nproducing function."),(0,t.kt)("h2",{id:"running-the-application"},"Running the application"),(0,t.kt)("p",null,"You can run your application using ",(0,t.kt)("inlineCode",{parentName:"p"},"fastkafka run")," CLI command in the\nsame way that you would run a single cluster app."),(0,t.kt)("p",null,"To start your app, copy the code above in multi_cluster_example.py and\nrun it by running:"),(0,t.kt)("p",null,"Now we can run the app. Copy the code above in multi_cluster_example.py,\nadjust your server configurations, and run it by running"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=development multi_cluster_example:app\n")),(0,t.kt)("p",null,"In your app logs, you should see your app starting up and your two\nconsumer functions connecting to different kafka clusters."),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:24092'}\n[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:24093'}\n[182747]: 23-06-23 12:16:14.131 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[182747]: 23-06-23 12:16:14.131 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\n[182747]: 23-06-23 12:16:14.131 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\n[182747]: 23-06-23 12:16:14.131 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[182747]: 23-06-23 12:16:14.136 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[182747]: 23-06-23 12:16:14.136 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})\n[182747]: 23-06-23 12:16:14.136 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}\n[182747]: 23-06-23 12:16:14.136 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[182747]: 23-06-23 12:16:14.141 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \n[182747]: 23-06-23 12:16:14.141 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}. \nStarting process cleanup, this may take a few seconds...\n23-06-23 12:16:18.294 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 182747...\n[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:19.471 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 182747 terminated.\n")),(0,t.kt)("h2",{id:"application-documentation"},"Application documentation"),(0,t.kt)("p",null,"At the moment the documentation for multicluster app is not yet\nimplemented, but it is under development and you can expecti it soon!"),(0,t.kt)("h2",{id:"examples-on-how-to-use-multiple-broker-configurations"},"Examples on how to use multiple broker configurations"),(0,t.kt)("h3",{id:"example-1"},"Example ","#","1"),(0,t.kt)("p",null,"In this section, we\u2019ll explore how you can effectively forward topics\nbetween different Kafka clusters, enabling seamless data synchronization\nfor your applications."),(0,t.kt)("p",null,"Imagine having two Kafka clusters, namely ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2"),", each hosting its own set of topics and messages. Now,\nif you want to forward a specific topic (in this case:\n",(0,t.kt)("inlineCode",{parentName:"p"},"preprocessed_signals"),") from kafka_brokers_1 to kafka_brokers_2,\nFastKafka provides an elegant solution."),(0,t.kt)("p",null,"Let\u2019s examine the code snippet that configures our application for topic\nforwarding:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals_original(msg: TestMsg):\n await to_preprocessed_signals_forward(msg)\n\n\n@app.produces(topic="preprocessed_signals", brokers=kafka_brokers_2)\nasync def to_preprocessed_signals_forward(data: TestMsg) -> TestMsg:\n return data\n')),(0,t.kt)("p",null,"Here\u2019s how it works: our FastKafka application is configured to consume\nmessages from ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_1")," and process them in the\n",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_original")," function. We want to forward these\nmessages to ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2"),". To achieve this, we define the\n",(0,t.kt)("inlineCode",{parentName:"p"},"to_preprocessed_signals_forward")," function as a producer, seamlessly\nproducing the processed messages to the preprocessed_signals topic\nwithin the ",(0,t.kt)("inlineCode",{parentName:"p"},"kafka_brokers_2")," cluster."),(0,t.kt)("h4",{id:"testing"},"Testing"),(0,t.kt)("p",null,"To test our FastKafka forwarding application, we can use our testing\nframework. Let\u2019s take a look at the testing code snippet:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n await tester.mirrors[app.on_preprocessed_signals_original](TestMsg(msg="signal"))\n await tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-06-23 12:16:31.689 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-06-23 12:16:31.690 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-06-23 12:16:31.691 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n23-06-23 12:16:31.691 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:31.701 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-06-23 12:16:31.702 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:31.702 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:31.703 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-06-23 12:16:31.703 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:31.704 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:31.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:31.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-06-23 12:16:31.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:16:31.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:31.707 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n23-06-23 12:16:31.707 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:31.708 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:31.708 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:31.709 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-06-23 12:16:31.709 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:16:35.703 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:35.703 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:35.704 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:35.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:35.705 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:35.705 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:35.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:35.707 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:35.707 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"With the help of the ",(0,t.kt)("strong",{parentName:"p"},"Tester")," object, we can simulate and verify the\nbehavior of our FastKafka application. Here\u2019s how it works:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"We create an instance of the ",(0,t.kt)("strong",{parentName:"p"},"Tester")," by passing in our ",(0,t.kt)("em",{parentName:"p"},"app"),"\nobject, which represents our FastKafka application.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Using the ",(0,t.kt)("strong",{parentName:"p"},"tester.mirrors")," dictionary, we can send a message to a\nspecific Kafka broker and topic combination. In this case, we use\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.on_preprocessed_signals_original]"),' to send a\nTestMsg message with the content \u201csignal" to the appropriate Kafka\nbroker and topic.')),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"After sending the message, we can perform assertions on the mirrored\nfunction using\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)"),".\nThis assertion ensures that the mirrored function has been called\nwithin a specified timeout period (in this case, 5 seconds)."))),(0,t.kt)("h3",{id:"example-2"},"Example ","#","2"),(0,t.kt)("p",null,"In this section, we\u2019ll explore how you can effortlessly consume data\nfrom multiple sources, process it, and aggregate the results into a\nsingle topic on a specific cluster."),(0,t.kt)("p",null,"Imagine you have two Kafka clusters: ",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," and\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_2"),", each hosting its own set of topics and messages.\nNow, what if you want to consume data from both clusters, perform some\nprocessing, and produce the results to a single topic on\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1"),"? FastKafka has got you covered!"),(0,t.kt)("p",null,"Let\u2019s take a look at the code snippet that configures our application\nfor aggregating multiple clusters:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals_1(msg: TestMsg):\n print(f"Default: {msg=}")\n await to_predictions(msg)\n\n\n@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)\nasync def on_preprocessed_signals_2(msg: TestMsg):\n print(f"Specified: {msg=}")\n await to_predictions(msg)\n\n\n@app.produces(topic="predictions")\nasync def to_predictions(prediction: TestMsg) -> TestMsg:\n print(f"Sending prediction: {prediction}")\n return [prediction]\n')),(0,t.kt)("p",null,'Here\u2019s the idea: our FastKafka application is set to consume messages\nfrom the topic \u201cpreprocessed_signals" on ',(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," cluster, as\nwell as from the same topic on ",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_2")," cluster. We have two\nconsuming functions, ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_1")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals_2"),", that handle the messages from their\nrespective clusters. These functions perform any required processing, in\nthis case, just calling the to_predictions function."),(0,t.kt)("p",null,'The exciting part is that the to_predictions function acts as a\nproducer, sending the processed results to the \u201cpredictions" topic on\n',(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1 cluster"),". By doing so, we effectively aggregate the\ndata from multiple sources into a single topic on a specific cluster."),(0,t.kt)("p",null,"This approach enables you to consume data from multiple Kafka clusters,\nprocess it, and produce the aggregated results to a designated topic.\nWhether you\u2019re generating predictions, performing aggregations, or any\nother form of data processing, FastKafka empowers you to harness the\nfull potential of multiple clusters."),(0,t.kt)("h4",{id:"testing-1"},"Testing"),(0,t.kt)("p",null,"Let\u2019s take a look at the testing code snippet:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal"))\n await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal"))\n await tester.on_predictions.assert_called(timeout=5)\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-06-23 12:16:41.222 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-06-23 12:16:41.223 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-06-23 12:16:41.224 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-06-23 12:16:41.224 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:41.239 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-06-23 12:16:41.239 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:41.240 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n23-06-23 12:16:41.240 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:41.241 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:41.241 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-06-23 12:16:41.241 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:41.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:41.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:41.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-06-23 12:16:41.243 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:16:41.243 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:41.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n23-06-23 12:16:41.245 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:41.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:41.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:41.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-06-23 12:16:41.247 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:16:41.247 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:41.248 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-06-23 12:16:41.248 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:41.249 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:41.249 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:41.249 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-06-23 12:16:41.249 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\nDefault: msg=TestMsg(msg='signal')\nSending prediction: msg='signal'\nSpecified: msg=TestMsg(msg='signal')\nSending prediction: msg='signal'\n23-06-23 12:16:45.241 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:45.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:45.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:45.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:45.243 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:45.243 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:45.244 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:45.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:45.245 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:45.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:45.246 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:45.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:45.247 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"Here\u2019s how the code above works:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Within an ",(0,t.kt)("inlineCode",{parentName:"p"},"async with")," block, create an instance of the Tester by\npassing in your app object, representing your FastKafka application.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Using the tester.mirrors dictionary, you can send messages to\nspecific Kafka broker and topic combinations. In this case, we use\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.on_preprocessed_signals_1]")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[app.on_preprocessed_signals_2]"),' to send TestMsg\nmessages with the content \u201csignal" to the corresponding Kafka broker\nand topic combinations.')),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"After sending the messages, you can perform assertions on the\n",(0,t.kt)("strong",{parentName:"p"},"on_predictions")," function using\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.on_predictions.assert_called(timeout=5)"),". This assertion\nensures that the on_predictions function has been called within a\nspecified timeout period (in this case, 5 seconds)."))),(0,t.kt)("h3",{id:"example-3"},"Example ","#","3"),(0,t.kt)("p",null,"In some scenarios, you may need to produce messages to multiple Kafka\nclusters simultaneously. FastKafka simplifies this process by allowing\nyou to configure your application to produce messages to multiple\nclusters effortlessly. Let\u2019s explore how you can achieve this:"),(0,t.kt)("p",null,"Consider the following code snippet that demonstrates producing messages\nto multiple clusters:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\n\nclass TestMsg(BaseModel):\n msg: str = Field(...)\n\nkafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))\nkafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))\n\napp = FastKafka(kafka_brokers=kafka_brokers_1)\n\n\n@app.consumes(topic="preprocessed_signals")\nasync def on_preprocessed_signals(msg: TestMsg):\n print(f"{msg=}")\n await to_predictions_1(TestMsg(msg="prediction"))\n await to_predictions_2(TestMsg(msg="prediction"))\n\n\n@app.produces(topic="predictions")\nasync def to_predictions_1(prediction: TestMsg) -> TestMsg:\n print(f"Sending prediction to s1: {prediction}")\n return [prediction]\n\n\n@app.produces(topic="predictions", brokers=kafka_brokers_2)\nasync def to_predictions_2(prediction: TestMsg) -> TestMsg:\n print(f"Sending prediction to s2: {prediction}")\n return [prediction]\n')),(0,t.kt)("p",null,"Here\u2019s what you need to know about producing to multiple clusters:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"We define two Kafka broker configurations: ",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," and\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_2"),", representing different clusters with their\nrespective connection details.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"We create an instance of the FastKafka application, specifying\n",(0,t.kt)("strong",{parentName:"p"},"kafka_brokers_1")," as the primary cluster for producing messages.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"The ",(0,t.kt)("inlineCode",{parentName:"p"},"on_preprocessed_signals"),' function serves as a consumer,\nhandling incoming messages from the \u201cpreprocessed_signals" topic.\nWithin this function, we invoke two producer functions:\n',(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_1")," and ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_2"),".")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"The ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_1"),' function sends predictions to the\n\u201cpredictions" topic on ',(0,t.kt)("em",{parentName:"p"},"kafka_brokers_1")," cluster.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Additionally, the ",(0,t.kt)("inlineCode",{parentName:"p"},"to_predictions_2"),' function sends the same\npredictions to the \u201cpredictions" topic on ',(0,t.kt)("em",{parentName:"p"},"kafka_brokers_2")," cluster.\nThis allows for producing the same data to multiple clusters\nsimultaneously."))),(0,t.kt)("p",null,"By utilizing this approach, you can seamlessly produce messages to\nmultiple Kafka clusters, enabling you to distribute data across\ndifferent environments or leverage the strengths of various clusters."),(0,t.kt)("p",null,"Feel free to customize the producer functions as per your requirements,\nperforming any necessary data transformations or enrichment before\nsending the predictions."),(0,t.kt)("p",null,"With FastKafka, producing to multiple clusters becomes a breeze,\nempowering you to harness the capabilities of multiple environments\neffortlessly."),(0,t.kt)("h4",{id:"testing-2"},"Testing"),(0,t.kt)("p",null,"Let\u2019s take a look at the testing code snippet:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nasync with Tester(app) as tester:\n await tester.to_preprocessed_signals(TestMsg(msg="signal"))\n await tester.mirrors[to_predictions_1].assert_called(timeout=5)\n await tester.mirrors[to_predictions_2].assert_called(timeout=5)\n')),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"23-06-23 12:16:49.903 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n23-06-23 12:16:49.904 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n23-06-23 12:16:49.904 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-06-23 12:16:49.905 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:49.905 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'\n23-06-23 12:16:49.906 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:49.921 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'\n23-06-23 12:16:49.921 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n23-06-23 12:16:49.921 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:49.922 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-06-23 12:16:49.922 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:49.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:49.923 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:49.924 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']\n23-06-23 12:16:49.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:16:49.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:49.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}\n23-06-23 12:16:49.925 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:49.926 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:49.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:49.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-06-23 12:16:49.927 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n23-06-23 12:16:49.927 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n23-06-23 12:16:49.928 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}\n23-06-23 12:16:49.928 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n23-06-23 12:16:49.928 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n23-06-23 12:16:49.929 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n23-06-23 12:16:49.929 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n23-06-23 12:16:49.929 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\nmsg=TestMsg(msg='signal')\nSending prediction to s1: msg='prediction'\nSending prediction to s2: msg='prediction'\n23-06-23 12:16:53.922 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:53.922 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:53.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:53.923 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:53.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:53.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:53.924 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:53.925 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n23-06-23 12:16:53.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n23-06-23 12:16:53.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,t.kt)("p",null,"Here\u2019s how you can perform the necessary tests:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Within an async with block, create an instance of the ",(0,t.kt)("strong",{parentName:"p"},"Tester")," by\npassing in your app object, representing your FastKafka application.")),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"Using the ",(0,t.kt)("inlineCode",{parentName:"p"},"tester.to_preprocessed_signals"),' method, you can send a\nTestMsg message with the content \u201csignal".')),(0,t.kt)("li",{parentName:"ol"},(0,t.kt)("p",{parentName:"li"},"After sending the message, you can perform assertions on the\nto_predictions_1 and to_predictions_2 functions using\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[to_predictions_1].assert_called(timeout=5)")," and\n",(0,t.kt)("inlineCode",{parentName:"p"},"tester.mirrors[to_predictions_2].assert_called(timeout=5)"),". These\nassertions ensure that the respective producer functions have\nproduced data to their respective topic/broker combinations."))),(0,t.kt)("p",null,"By employing this testing approach, you can verify that the producing\nfunctions correctly send messages to their respective clusters. The\ntesting framework provided by FastKafka enables you to ensure the\naccuracy and reliability of your application\u2019s producing logic."))}l.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d2282d9e.ad5da899.js b/assets/js/d2282d9e.ad5da899.js new file mode 100644 index 0000000..d4eb5d6 --- /dev/null +++ b/assets/js/d2282d9e.ad5da899.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4779],{3905:(e,t,n)=>{n.d(t,{Zo:()=>l,kt:()=>k});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function c(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){a(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function s(e,t){if(null==e)return{};var n,r,a=function(e,t){if(null==e)return{};var n,r,a={},o=Object.keys(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=r.createContext({}),d=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):c(c({},t),e)),n},l=function(e){var t=d(e.components);return r.createElement(i.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},u=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,i=e.parentName,l=s(e,["components","mdxType","originalType","parentName"]),p=d(n),u=a,k=p["".concat(i,".").concat(u)]||p[u]||f[u]||o;return n?r.createElement(k,c(c({ref:t},l),{},{components:n})):r.createElement(k,c({ref:t},l))}));function k(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,c=new Array(o);c[0]=u;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[p]="string"==typeof e?e:a,c[1]=s;for(var d=2;d<o;d++)c[d]=n[d];return r.createElement.apply(null,c)}return r.createElement.apply(null,n)}u.displayName="MDXCreateElement"},9985:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>c,default:()=>f,frontMatter:()=>o,metadata:()=>s,toc:()=>d});var r=n(7462),a=(n(7294),n(3905));const o={},c=void 0,s={unversionedId:"api/fastkafka/encoder/json_decoder",id:"version-0.7.0/api/fastkafka/encoder/json_decoder",title:"json_decoder",description:"fastkafka.encoder.jsondecoder {fastkafka.encoder.jsondecoder}",source:"@site/versioned_docs/version-0.7.0/api/fastkafka/encoder/json_decoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/json_decoder",permalink:"/docs/0.7.0/api/fastkafka/encoder/json_decoder",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avsc_to_pydantic",permalink:"/docs/0.7.0/api/fastkafka/encoder/avsc_to_pydantic"},next:{title:"json_encoder",permalink:"/docs/0.7.0/api/fastkafka/encoder/json_encoder"}},i={},d=[{value:"<code>fastkafka.encoder.json_decoder</code>",id:"fastkafka.encoder.json_decoder",level:2},{value:"<code>json_decoder</code>",id:"json_decoder",level:3}],l={toc:d},p="wrapper";function f(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,r.Z)({},l,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"fastkafka.encoder.json_decoder"},(0,a.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.json_decoder")),(0,a.kt)("h3",{id:"json_decoder"},(0,a.kt)("inlineCode",{parentName:"h3"},"json_decoder")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def json_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any")),(0,a.kt)("p",null,"Decoder to decode json string in bytes to pydantic model instance"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"raw_msg"),": Bytes message received from Kafka topic"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"cls"),": Pydantic class; This pydantic class will be used to construct instance of same class")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Returns"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"An instance of given pydantic class")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d241d4ef.a87ac3a3.js b/assets/js/d241d4ef.a87ac3a3.js new file mode 100644 index 0000000..17592d0 --- /dev/null +++ b/assets/js/d241d4ef.a87ac3a3.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5339],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){a(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function l(e,t){if(null==e)return{};var n,r,a=function(e,t){if(null==e)return{};var n,r,a={},o=Object.keys(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var u=r.createContext({}),s=function(e){var t=r.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(u.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,u=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),p=s(n),k=a,m=p["".concat(u,".").concat(k)]||p[k]||f[k]||o;return n?r.createElement(m,i(i({ref:t},c),{},{components:n})):r.createElement(m,i({ref:t},c))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,i=new Array(o);i[0]=k;var l={};for(var u in t)hasOwnProperty.call(t,u)&&(l[u]=t[u]);l.originalType=e,l[p]="string"==typeof e?e:a,i[1]=l;for(var s=2;s<o;s++)i[s]=n[s];return r.createElement.apply(null,i)}return r.createElement.apply(null,n)}k.displayName="MDXCreateElement"},232:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>i,default:()=>f,frontMatter:()=>o,metadata:()=>l,toc:()=>s});var r=n(7462),a=(n(7294),n(3905));const o={},i=void 0,l={unversionedId:"api/fastkafka/executors/SequentialExecutor",id:"version-0.6.0/api/fastkafka/executors/SequentialExecutor",title:"SequentialExecutor",description:"fastkafka.executors.SequentialExecutor {fastkafka.executors.SequentialExecutor}",source:"@site/versioned_docs/version-0.6.0/api/fastkafka/executors/SequentialExecutor.md",sourceDirName:"api/fastkafka/executors",slug:"/api/fastkafka/executors/SequentialExecutor",permalink:"/docs/0.6.0/api/fastkafka/executors/SequentialExecutor",draft:!1,tags:[],version:"0.6.0",frontMatter:{}},u={},s=[{value:"<code>fastkafka.executors.SequentialExecutor</code>",id:"fastkafka.executors.SequentialExecutor",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>run</code>",id:"run",level:3}],c={toc:s},p="wrapper";function f(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"fastkafka.executors.SequentialExecutor"},(0,a.kt)("inlineCode",{parentName:"h2"},"fastkafka.executors.SequentialExecutor")),(0,a.kt)("p",null,"A class that implements a sequential executor for processing consumer records."),(0,a.kt)("p",null,"The SequentialExecutor class extends the StreamExecutor class and provides functionality\nfor running processing tasks in sequence by awaiting their coroutines."),(0,a.kt)("h3",{id:"init"},(0,a.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000) -> None")),(0,a.kt)("p",null,"Create an instance of SequentialExecutor"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"throw_exceptions"),": Flag indicating whether exceptions should be thrown or logged.\nDefaults to False."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"max_buffer_size"),": Maximum buffer size for the memory object stream.\nDefaults to 100_000.")),(0,a.kt)("h3",{id:"run"},(0,a.kt)("inlineCode",{parentName:"h3"},"run")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None")),(0,a.kt)("p",null,"Runs the sequential executor."),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"is_shutting_down_f"),": Function to check if the executor is shutting down."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"generator"),": Generator function for retrieving consumer records."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"processor"),": Processor function for processing consumer records.")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Returns"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"None")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d2af0b95.61dc7ae0.js b/assets/js/d2af0b95.61dc7ae0.js new file mode 100644 index 0000000..3ef9f0d --- /dev/null +++ b/assets/js/d2af0b95.61dc7ae0.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1267],{3905:(e,a,n)=>{n.d(a,{Zo:()=>k,kt:()=>m});var t=n(7294);function o(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function r(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function s(e){for(var a=1;a<arguments.length;a++){var n=null!=arguments[a]?arguments[a]:{};a%2?r(Object(n),!0).forEach((function(a){o(e,a,n[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(n,a))}))}return e}function l(e,a){if(null==e)return{};var n,t,o=function(e,a){if(null==e)return{};var n,t,o={},r=Object.keys(e);for(t=0;t<r.length;t++)n=r[t],a.indexOf(n)>=0||(o[n]=e[n]);return o}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(t=0;t<r.length;t++)n=r[t],a.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var i=t.createContext({}),p=function(e){var a=t.useContext(i),n=a;return e&&(n="function"==typeof e?e(a):s(s({},a),e)),n},k=function(e){var a=p(e.components);return t.createElement(i.Provider,{value:a},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},f=t.forwardRef((function(e,a){var n=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,k=l(e,["components","mdxType","originalType","parentName"]),c=p(n),f=o,m=c["".concat(i,".").concat(f)]||c[f]||u[f]||r;return n?t.createElement(m,s(s({ref:a},k),{},{components:n})):t.createElement(m,s({ref:a},k))}));function m(e,a){var n=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var r=n.length,s=new Array(r);s[0]=f;var l={};for(var i in a)hasOwnProperty.call(a,i)&&(l[i]=a[i]);l.originalType=e,l[c]="string"==typeof e?e:o,s[1]=l;for(var p=2;p<r;p++)s[p]=n[p];return t.createElement.apply(null,s)}return t.createElement.apply(null,n)}f.displayName="MDXCreateElement"},1036:(e,a,n)=>{n.r(a),n.d(a,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var t=n(7462),o=(n(7294),n(3905));const r={},s="First Steps",l={unversionedId:"guides/Guide_02_First_Steps",id:"version-0.7.0/guides/Guide_02_First_Steps",title:"First Steps",description:"Creating a simple Kafka consumer app",source:"@site/versioned_docs/version-0.7.0/guides/Guide_02_First_Steps.md",sourceDirName:"guides",slug:"/guides/Guide_02_First_Steps",permalink:"/docs/0.7.0/guides/Guide_02_First_Steps",draft:!1,tags:[],version:"0.7.0",frontMatter:{}},i={},p=[{value:"Creating a simple Kafka consumer app",id:"creating-a-simple-kafka-consumer-app",level:2},{value:"Sending first message to your consumer",id:"sending-first-message-to-your-consumer",level:2},{value:"Creating a hello Kafka producer",id:"creating-a-hello-kafka-producer",level:2},{value:"Recap",id:"recap",level:2}],k={toc:p},c="wrapper";function u(e){let{components:a,...n}=e;return(0,o.kt)(c,(0,t.Z)({},k,n,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"first-steps"},"First Steps"),(0,o.kt)("h2",{id:"creating-a-simple-kafka-consumer-app"},"Creating a simple Kafka consumer app"),(0,o.kt)("p",null,"For our first demo we will create the simplest possible Kafka consumer\nand run it using \u2018fastkafka run\u2019 command."),(0,o.kt)("p",null,"The consumer will:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Connect to the Kafka Broker we setup in the Intro guide")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Listen to the hello topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Write any message received from the hello topic to stdout"))),(0,o.kt)("p",null,"To create the consumer, first, create a file named"),(0,o.kt)("b",null,"hello_kafka_consumer.py")," and copy the following code to it:",(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom os import environ\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nkafka_server_url = environ["KAFKA_HOSTNAME"]\nkafka_server_port = environ["KAFKA_PORT"]\n\nkafka_brokers = {\n "localhost": {\n "description": "local development kafka",\n "url": kafka_server_url,\n "port": kafka_server_port\n }\n}\n\nclass HelloKafkaMsg(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_app = FastKafka(\n kafka_brokers=kafka_brokers\n)\n \n@kafka_app.consumes()\nasync def on_hello(msg: HelloKafkaMsg):\n print(f"Got data, msg={msg.msg}", flush=True)\n')),(0,o.kt)("p",null,'!!! info "Kafka configuration"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n")),(0,o.kt)("p",null,'!!! warning "Remember to flush"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.\n")),(0,o.kt)("p",null,"To run this consumer, in your terminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see something similar to the ouput\nbelow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})\n[878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n[878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}. \nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.\n\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.\n")),(0,o.kt)("p",null,"Now you can interact with your consumer, by sending the messages to the\nsubscribed \u2018hello\u2019 topic, don\u2019t worry, we will cover this in the next\nstep of this guide."),(0,o.kt)("h2",{id:"sending-first-message-to-your-consumer"},"Sending first message to your consumer"),(0,o.kt)("p",null,"After we have created and run our first consumer, we should send a\nmessage to it, to make sure it is working properly."),(0,o.kt)("p",null,"If you are using the Kafka setup as described in the Intro guide, you\ncan follow the steps listed here to send a message to the hello topic."),(0,o.kt)("p",null,"First, connect to your running kafka broker by running:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"docker run -it kafka /bin/bash\n")),(0,o.kt)("p",null,"Then, when connected to the container, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello\n")),(0,o.kt)("p",null,"This will open an interactive connection to the hello topic, now you can\nwrite your mesages to the topic and they will be consumed by our\nconsumer."),(0,o.kt)("p",null,"In the shell, type:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'{"msg":"hello"}\n')),(0,o.kt)("p",null,"and press enter. This will send a hello message to the topic which will\nbe read by our running consumer and outputed to stdout."),(0,o.kt)("p",null,"Check the output of your consumer (terminal where you ran the \u2018fastkafka\nrun\u2019 command) and confirm that your consumer has read the Kafka message.\nYou shoud see something like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"Got data, msg=hello\n")),(0,o.kt)("h2",{id:"creating-a-hello-kafka-producer"},"Creating a hello Kafka producer"),(0,o.kt)("p",null,"Consuming messages is only a part of this Library functionality, the\nother big part is producing the messages. So, let\u2019s create our first\nkafka producer which will send it\u2019s greetings to our consumer\nperiodically."),(0,o.kt)("p",null,"The producer will:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Connect to the Kafka Broker we setup in the Intro guide"),(0,o.kt)("li",{parentName:"ol"},"Connect to the hello topic"),(0,o.kt)("li",{parentName:"ol"},"Periodically send a message to the hello world topic")),(0,o.kt)("p",null,"To create the producer, first, create a file named"),(0,o.kt)("b",null,"hello_kafka_producer.py")," and copy the following code to it:",(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom os import environ\n\nimport asyncio\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nkafka_server_url = environ["KAFKA_HOSTNAME"]\nkafka_server_port = environ["KAFKA_PORT"]\n\nkafka_brokers = {\n "localhost": {\n "description": "local development kafka",\n "url": kafka_server_url,\n "port": kafka_server_port\n }\n}\n\nclass HelloKafkaMsg(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_app = FastKafka(\n kafka_brokers=kafka_brokers\n)\n\nlogger = get_logger(__name__)\n\n@kafka_app.produces()\nasync def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:\n logger.info(f"Producing: {msg}")\n return msg\n\n@kafka_app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello(HelloKafkaMsg(msg="hello"))\n await asyncio.sleep(1)\n')),(0,o.kt)("p",null,'!!! info "Kafka configuration"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n")),(0,o.kt)("p",null,"To run this producer, in your terminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see something similar to the ouput\nbelow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.\n\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.\n")),(0,o.kt)("p",null,"Now, while the producer is running, it will send a HelloKafkaMsg every\nsecond to the hello kafka topic. If your consumer is still running, you\nshould see the messages appear in its log."),(0,o.kt)("h2",{id:"recap"},"Recap"),(0,o.kt)("p",null,"In this guide we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Created a simple Kafka consumer using FastKafka"),(0,o.kt)("li",{parentName:"ol"},"Sent a message to our consumer trough Kafka"),(0,o.kt)("li",{parentName:"ol"},"Created a simple Kafka producer using FastKafka")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d2b827bd.868ae315.js b/assets/js/d2b827bd.868ae315.js new file mode 100644 index 0000000..8db1b23 --- /dev/null +++ b/assets/js/d2b827bd.868ae315.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4874],{3905:(e,t,n)=>{n.d(t,{Zo:()=>h,kt:()=>m});var i=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function a(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?r(Object(n),!0).forEach((function(t){o(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function s(e,t){if(null==e)return{};var n,i,o=function(e,t){if(null==e)return{};var n,i,o={},r=Object.keys(e);for(i=0;i<r.length;i++)n=r[i],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(i=0;i<r.length;i++)n=r[i],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=i.createContext({}),l=function(e){var t=i.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},h=function(e){var t=l(e.components);return i.createElement(c.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},p=i.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,c=e.parentName,h=s(e,["components","mdxType","originalType","parentName"]),d=l(n),p=o,m=d["".concat(c,".").concat(p)]||d[p]||u[p]||r;return n?i.createElement(m,a(a({ref:t},h),{},{components:n})):i.createElement(m,a({ref:t},h))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,a=new Array(r);a[0]=p;var s={};for(var c in t)hasOwnProperty.call(t,c)&&(s[c]=t[c]);s.originalType=e,s[d]="string"==typeof e?e:o,a[1]=s;for(var l=2;l<r;l++)a[l]=n[l];return i.createElement.apply(null,a)}return i.createElement.apply(null,n)}p.displayName="MDXCreateElement"},5407:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>a,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>l});var i=n(7462),o=(n(7294),n(3905));const r={},a=void 0,s={unversionedId:"LICENSE",id:"version-0.7.0/LICENSE",title:"LICENSE",description:"Apache License",source:"@site/versioned_docs/version-0.7.0/LICENSE.md",sourceDirName:".",slug:"/LICENSE",permalink:"/docs/0.7.0/LICENSE",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"run_fastkafka_server_process",permalink:"/docs/0.7.0/cli/run_fastkafka_server_process"},next:{title:"Contributing to fastkafka",permalink:"/docs/0.7.0/CONTRIBUTING"}},c={},l=[],h={toc:l},d="wrapper";function u(e){let{components:t,...n}=e;return(0,o.kt)(d,(0,i.Z)({},h,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Apache License\nVersion 2.0, January 2004\n",(0,o.kt)("a",{parentName:"p",href:"http://www.apache.org/licenses/"},"http://www.apache.org/licenses/")),(0,o.kt)("p",null," TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Definitions."),(0,o.kt)("p",{parentName:"li"},'"License" shall mean the terms and conditions for use, reproduction,\nand distribution as defined by Sections 1 through 9 of this document.'),(0,o.kt)("p",{parentName:"li"},'"Licensor" shall mean the copyright owner or entity authorized by\nthe copyright owner that is granting the License.'),(0,o.kt)("p",{parentName:"li"},'"Legal Entity" shall mean the union of the acting entity and all\nother entities that control, are controlled by, or are under common\ncontrol with that entity. For the purposes of this definition,\n"control" means (i) the power, direct or indirect, to cause the\ndirection or management of such entity, whether by contract or\notherwise, or (ii) ownership of fifty percent (50%) or more of the\noutstanding shares, or (iii) beneficial ownership of such entity.'),(0,o.kt)("p",{parentName:"li"},'"You" (or "Your") shall mean an individual or Legal Entity\nexercising permissions granted by this License.'),(0,o.kt)("p",{parentName:"li"},'"Source" form shall mean the preferred form for making modifications,\nincluding but not limited to software source code, documentation\nsource, and configuration files.'),(0,o.kt)("p",{parentName:"li"},'"Object" form shall mean any form resulting from mechanical\ntransformation or translation of a Source form, including but\nnot limited to compiled object code, generated documentation,\nand conversions to other media types.'),(0,o.kt)("p",{parentName:"li"},'"Work" shall mean the work of authorship, whether in Source or\nObject form, made available under the License, as indicated by a\ncopyright notice that is included in or attached to the work\n(an example is provided in the Appendix below).'),(0,o.kt)("p",{parentName:"li"},'"Derivative Works" shall mean any work, whether in Source or Object\nform, that is based on (or derived from) the Work and for which the\neditorial revisions, annotations, elaborations, or other modifications\nrepresent, as a whole, an original work of authorship. For the purposes\nof this License, Derivative Works shall not include works that remain\nseparable from, or merely link (or bind by name) to the interfaces of,\nthe Work and Derivative Works thereof.'),(0,o.kt)("p",{parentName:"li"},'"Contribution" shall mean any work of authorship, including\nthe original version of the Work and any modifications or additions\nto that Work or Derivative Works thereof, that is intentionally\nsubmitted to Licensor for inclusion in the Work by the copyright owner\nor by an individual or Legal Entity authorized to submit on behalf of\nthe copyright owner. For the purposes of this definition, "submitted"\nmeans any form of electronic, verbal, or written communication sent\nto the Licensor or its representatives, including but not limited to\ncommunication on electronic mailing lists, source code control systems,\nand issue tracking systems that are managed by, or on behalf of, the\nLicensor for the purpose of discussing and improving the Work, but\nexcluding communication that is conspicuously marked or otherwise\ndesignated in writing by the copyright owner as "Not a Contribution."'),(0,o.kt)("p",{parentName:"li"},'"Contributor" shall mean Licensor and any individual or Legal Entity\non behalf of whom a Contribution has been received by Licensor and\nsubsequently incorporated within the Work.')),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Grant of Copyright License. Subject to the terms and conditions of\nthis License, each Contributor hereby grants to You a perpetual,\nworldwide, non-exclusive, no-charge, royalty-free, irrevocable\ncopyright license to reproduce, prepare Derivative Works of,\npublicly display, publicly perform, sublicense, and distribute the\nWork and such Derivative Works in Source or Object form.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Grant of Patent License. Subject to the terms and conditions of\nthis License, each Contributor hereby grants to You a perpetual,\nworldwide, non-exclusive, no-charge, royalty-free, irrevocable\n(except as stated in this section) patent license to make, have made,\nuse, offer to sell, sell, import, and otherwise transfer the Work,\nwhere such license applies only to those patent claims licensable\nby such Contributor that are necessarily infringed by their\nContribution(s) alone or by combination of their Contribution(s)\nwith the Work to which such Contribution(s) was submitted. If You\ninstitute patent litigation against any entity (including a\ncross-claim or counterclaim in a lawsuit) alleging that the Work\nor a Contribution incorporated within the Work constitutes direct\nor contributory patent infringement, then any patent licenses\ngranted to You under this License for that Work shall terminate\nas of the date such litigation is filed.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Redistribution. You may reproduce and distribute copies of the\nWork or Derivative Works thereof in any medium, with or without\nmodifications, and in Source or Object form, provided that You\nmeet the following conditions:"),(0,o.kt)("p",{parentName:"li"},"(a) You must give any other recipients of the Work or\nDerivative Works a copy of this License; and"),(0,o.kt)("p",{parentName:"li"},"(b) You must cause any modified files to carry prominent notices\nstating that You changed the files; and"),(0,o.kt)("p",{parentName:"li"},"(c) You must retain, in the Source form of any Derivative Works\nthat You distribute, all copyright, patent, trademark, and\nattribution notices from the Source form of the Work,\nexcluding those notices that do not pertain to any part of\nthe Derivative Works; and"),(0,o.kt)("p",{parentName:"li"},'(d) If the Work includes a "NOTICE" text file as part of its\ndistribution, then any Derivative Works that You distribute must\ninclude a readable copy of the attribution notices contained\nwithin such NOTICE file, excluding those notices that do not\npertain to any part of the Derivative Works, in at least one\nof the following places: within a NOTICE text file distributed\nas part of the Derivative Works; within the Source form or\ndocumentation, if provided along with the Derivative Works; or,\nwithin a display generated by the Derivative Works, if and\nwherever such third-party notices normally appear. The contents\nof the NOTICE file are for informational purposes only and\ndo not modify the License. You may add Your own attribution\nnotices within Derivative Works that You distribute, alongside\nor as an addendum to the NOTICE text from the Work, provided\nthat such additional attribution notices cannot be construed\nas modifying the License.'),(0,o.kt)("p",{parentName:"li"},"You may add Your own copyright statement to Your modifications and\nmay provide additional or different license terms and conditions\nfor use, reproduction, or distribution of Your modifications, or\nfor any such Derivative Works as a whole, provided Your use,\nreproduction, and distribution of the Work otherwise complies with\nthe conditions stated in this License.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Submission of Contributions. Unless You explicitly state otherwise,\nany Contribution intentionally submitted for inclusion in the Work\nby You to the Licensor shall be under the terms and conditions of\nthis License, without any additional terms or conditions.\nNotwithstanding the above, nothing herein shall supersede or modify\nthe terms of any separate license agreement you may have executed\nwith Licensor regarding such Contributions.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Trademarks. This License does not grant permission to use the trade\nnames, trademarks, service marks, or product names of the Licensor,\nexcept as required for reasonable and customary use in describing the\norigin of the Work and reproducing the content of the NOTICE file.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},'Disclaimer of Warranty. Unless required by applicable law or\nagreed to in writing, Licensor provides the Work (and each\nContributor provides its Contributions) on an "AS IS" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\nimplied, including, without limitation, any warranties or conditions\nof TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\nPARTICULAR PURPOSE. You are solely responsible for determining the\nappropriateness of using or redistributing the Work and assume any\nrisks associated with Your exercise of permissions under this License.')),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Limitation of Liability. In no event and under no legal theory,\nwhether in tort (including negligence), contract, or otherwise,\nunless required by applicable law (such as deliberate and grossly\nnegligent acts) or agreed to in writing, shall any Contributor be\nliable to You for damages, including any direct, indirect, special,\nincidental, or consequential damages of any character arising as a\nresult of this License or out of the use or inability to use the\nWork (including but not limited to damages for loss of goodwill,\nwork stoppage, computer failure or malfunction, or any and all\nother commercial damages or losses), even if such Contributor\nhas been advised of the possibility of such damages.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Accepting Warranty or Additional Liability. While redistributing\nthe Work or Derivative Works thereof, You may choose to offer,\nand charge a fee for, acceptance of support, warranty, indemnity,\nor other liability obligations and/or rights consistent with this\nLicense. However, in accepting such obligations, You may act only\non Your own behalf and on Your sole responsibility, not on behalf\nof any other Contributor, and only if You agree to indemnify,\ndefend, and hold each Contributor harmless for any liability\nincurred by, or claims asserted against, such Contributor by reason\nof your accepting any such warranty or additional liability."),(0,o.kt)("p",{parentName:"li"},"END OF TERMS AND CONDITIONS"),(0,o.kt)("p",{parentName:"li"},"APPENDIX: How to apply the Apache License to your work."),(0,o.kt)("p",{parentName:"li"},' To apply the Apache License to your work, attach the following\nboilerplate notice, with the fields enclosed by brackets "[]"\nreplaced with your own identifying information. (Don\'t include\nthe brackets!) The text should be enclosed in the appropriate\ncomment syntax for the file format. We also recommend that a\nfile or class name and description of purpose be included on the\nsame "printed page" as the copyright notice for easier\nidentification within third-party archives.'),(0,o.kt)("p",{parentName:"li"},"Copyright ","[yyyy][name of copyright owner]"),(0,o.kt)("p",{parentName:"li"},'Licensed under the Apache License, Version 2.0 (the "License");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at'),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre"},"http://www.apache.org/licenses/LICENSE-2.0\n")),(0,o.kt)("p",{parentName:"li"},'Unless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an "AS IS" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.'))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d35204c3.f3736ac4.js b/assets/js/d35204c3.f3736ac4.js new file mode 100644 index 0000000..268726f --- /dev/null +++ b/assets/js/d35204c3.f3736ac4.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[604],{3905:(e,t,n)=>{n.d(t,{Zo:()=>l,kt:()=>m});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?a(Object(n),!0).forEach((function(t){o(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):a(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function u(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r<a.length;r++)n=a[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=r.createContext({}),s=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},l=function(e){var t=s(e.components);return r.createElement(c.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,l=u(e,["components","mdxType","originalType","parentName"]),p=s(n),d=o,m=p["".concat(c,".").concat(d)]||p[d]||f[d]||a;return n?r.createElement(m,i(i({ref:t},l),{},{components:n})):r.createElement(m,i({ref:t},l))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=d;var u={};for(var c in t)hasOwnProperty.call(t,c)&&(u[c]=t[c]);u.originalType=e,u[p]="string"==typeof e?e:o,i[1]=u;for(var s=2;s<a;s++)i[s]=n[s];return r.createElement.apply(null,i)}return r.createElement.apply(null,n)}d.displayName="MDXCreateElement"},2560:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>f,frontMatter:()=>a,metadata:()=>u,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={},i="Authentication",u={unversionedId:"guides/Guide_03_Authentication",id:"version-0.7.1/guides/Guide_03_Authentication",title:"Authentication",description:"TLS Authentication",source:"@site/versioned_docs/version-0.7.1/guides/Guide_03_Authentication.md",sourceDirName:"guides",slug:"/guides/Guide_03_Authentication",permalink:"/docs/0.7.1/guides/Guide_03_Authentication",draft:!1,tags:[],version:"0.7.1",frontMatter:{}},c={},s=[{value:"TLS Authentication",id:"tls-authentication",level:2}],l={toc:s},p="wrapper";function f(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},l,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"authentication"},"Authentication"),(0,o.kt)("h2",{id:"tls-authentication"},"TLS Authentication"),(0,o.kt)("p",null,"sasl_mechanism (str) \u2013 Authentication mechanism when security_protocol\nis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN,\nGSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN"),(0,o.kt)("p",null,"sasl_plain_username (str) \u2013 username for SASL PLAIN authentication.\nDefault: None"),(0,o.kt)("p",null,"sasl_plain_password (str) \u2013 password for SASL PLAIN authentication.\nDefault: None"),(0,o.kt)("p",null,"sasl_oauth_token_provider (AbstractTokenProvider) \u2013 OAuthBearer token\nprovider instance. (See kafka.oauth.abstract). Default: None"))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d40fb48f.0dd8a669.js b/assets/js/d40fb48f.0dd8a669.js new file mode 100644 index 0000000..19cb51c --- /dev/null +++ b/assets/js/d40fb48f.0dd8a669.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[2049],{3905:(e,n,a)=>{a.d(n,{Zo:()=>l,kt:()=>f});var t=a(7294);function i(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function s(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function o(e){for(var n=1;n<arguments.length;n++){var a=null!=arguments[n]?arguments[n]:{};n%2?s(Object(a),!0).forEach((function(n){i(e,n,a[n])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):s(Object(a)).forEach((function(n){Object.defineProperty(e,n,Object.getOwnPropertyDescriptor(a,n))}))}return e}function r(e,n){if(null==e)return{};var a,t,i=function(e,n){if(null==e)return{};var a,t,i={},s=Object.keys(e);for(t=0;t<s.length;t++)a=s[t],n.indexOf(a)>=0||(i[a]=e[a]);return i}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(t=0;t<s.length;t++)a=s[t],n.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(i[a]=e[a])}return i}var d=t.createContext({}),c=function(e){var n=t.useContext(d),a=n;return e&&(a="function"==typeof e?e(n):o(o({},n),e)),a},l=function(e){var n=c(e.components);return t.createElement(d.Provider,{value:n},e.children)},p="mdxType",m={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},u=t.forwardRef((function(e,n){var a=e.components,i=e.mdxType,s=e.originalType,d=e.parentName,l=r(e,["components","mdxType","originalType","parentName"]),p=c(a),u=i,f=p["".concat(d,".").concat(u)]||p[u]||m[u]||s;return a?t.createElement(f,o(o({ref:n},l),{},{components:a})):t.createElement(f,o({ref:n},l))}));function f(e,n){var a=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var s=a.length,o=new Array(s);o[0]=u;var r={};for(var d in n)hasOwnProperty.call(n,d)&&(r[d]=n[d]);r.originalType=e,r[p]="string"==typeof e?e:i,o[1]=r;for(var c=2;c<s;c++)o[c]=a[c];return t.createElement.apply(null,o)}return t.createElement.apply(null,a)}u.displayName="MDXCreateElement"},1885:(e,n,a)=>{a.r(n),a.d(n,{assets:()=>d,contentTitle:()=>o,default:()=>m,frontMatter:()=>s,metadata:()=>r,toc:()=>c});var t=a(7462),i=(a(7294),a(3905));const s={},o="Encoding and Decoding Kafka Messages with FastKafka",r={unversionedId:"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",id:"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",title:"Encoding and Decoding Kafka Messages with FastKafka",description:"Prerequisites",source:"@site/docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",sourceDirName:"guides",slug:"/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",permalink:"/docs/next/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Lifespan Events",permalink:"/docs/next/guides/Guide_05_Lifespan_Handler"},next:{title:"Using multiple Kafka clusters",permalink:"/docs/next/guides/Guide_24_Using_Multiple_Kafka_Clusters"}},d={},c=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Ways to Encode and Decode Messages with FastKafka",id:"ways-to-encode-and-decode-messages-with-fastkafka",level:2},{value:"1. Json encoder and decoder",id:"1-json-encoder-and-decoder",level:2},{value:"2. Avro encoder and decoder",id:"2-avro-encoder-and-decoder",level:2},{value:"What is Avro?",id:"what-is-avro",level:3},{value:"Installing FastKafka with Avro dependencies",id:"installing-fastkafka-with-avro-dependencies",level:3},{value:"Defining Avro Schema Using Pydantic Models",id:"defining-avro-schema-using-pydantic-models",level:3},{value:"Reusing existing avro schema",id:"reusing-existing-avro-schema",level:3},{value:"Building pydantic models from avro schema dictionary",id:"building-pydantic-models-from-avro-schema-dictionary",level:4},{value:"Building pydantic models from <code>.avsc</code> file",id:"building-pydantic-models-from-avsc-file",level:4},{value:"Consume/Produce avro messages with FastKafka",id:"consumeproduce-avro-messages-with-fastkafka",level:3},{value:"Assembling it all together",id:"assembling-it-all-together",level:3},{value:"3. Custom encoder and decoder",id:"3-custom-encoder-and-decoder",level:2},{value:"Writing a custom encoder and decoder",id:"writing-a-custom-encoder-and-decoder",level:3},{value:"Assembling it all together",id:"assembling-it-all-together-1",level:3}],l={toc:c},p="wrapper";function m(e){let{components:n,...a}=e;return(0,i.kt)(p,(0,t.Z)({},l,a,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"encoding-and-decoding-kafka-messages-with-fastkafka"},"Encoding and Decoding Kafka Messages with FastKafka"),(0,i.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A basic knowledge of\n",(0,i.kt)("a",{parentName:"li",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nis needed to proceed with this guide. If you are not familiar with\n",(0,i.kt)("a",{parentName:"li",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),",\nplease go through the ",(0,i.kt)("a",{parentName:"li",href:"/docs#tutorial"},"tutorial")," first."),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("a",{parentName:"li",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith its dependencies installed is needed. Please install\n",(0,i.kt)("a",{parentName:"li",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nusing the command - ",(0,i.kt)("inlineCode",{parentName:"li"},"pip install fastkafka"))),(0,i.kt)("h2",{id:"ways-to-encode-and-decode-messages-with-fastkafka"},"Ways to Encode and Decode Messages with FastKafka"),(0,i.kt)("p",null,"In python, by default, we send Kafka messages as bytes. Even if our\nmessage is a string, we convert it to bytes and then send it to Kafka\ntopic. imilarly, while consuming messages, we consume them as bytes and\nthen convert them to strings."),(0,i.kt)("p",null,"In FastKafka, we specify message schema using Pydantic models as\nmentioned in ",(0,i.kt)("a",{parentName:"p",href:"/docs#messages"},"tutorial"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# Define Pydantic models for Kafka messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("p",null,"Then, we send and receive messages as instances of Pydantic models which\nwe defined. So, FastKafka needs a way to encode/decode to these Pydantic\nmodel messages to bytes in order to send/receive messages to/from Kafka\ntopics."),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods of FastKafka accept a parameter\ncalled ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," to decode/encode Kafka messages. FastKafka\nprovides three ways to encode and decode messages:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"json - This is the default encoder/decoder option in FastKafka.\nWhile producing, this option converts our instance of Pydantic model\nmessages to a JSON string and then converts it to bytes before\nsending it to the topic. While consuming, it converts bytes to a\nJSON string and then constructs an instance of Pydantic model from\nthe JSON string."),(0,i.kt)("li",{parentName:"ol"},"avro - This option uses Avro encoding/decoding to convert instances\nof Pydantic model messages to bytes while producing, and while\nconsuming, it constructs an instance of Pydantic model from bytes."),(0,i.kt)("li",{parentName:"ol"},"custom encoder/decoder - If you are not happy with the json or avro\nencoder/decoder options, you can write your own encoder/decoder\nfunctions and use them to encode/decode Pydantic messages.")),(0,i.kt)("h2",{id:"1-json-encoder-and-decoder"},"1. Json encoder and decoder"),(0,i.kt)("p",null,"The default option in FastKafka is json encoder/decoder. This option,\nwhile producing, converts our instance of pydantic model messages to\njson string and then converts to bytes before sending it to the topics.\nWhile consuming it converts bytes to json string and then constructs\ninstance of pydantic model from json string."),(0,i.kt)("p",null,"We can use the application from ",(0,i.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," as\nis, and it will use the json encoder/decoder by default. But, for\nclarity, let\u2019s modify it to explicitly accept the \u2018json\u2019 encoder/decoder\nparameter:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder="json")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder="json")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"In the above code, the ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),' decorator sets up a\nconsumer for the \u201cinput_data" topic, using the \u2018json\u2019 decoder to convert\nthe message payload to an instance of ',(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData"),". The\n",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces"),' decorator sets up a producer for the \u201cpredictions"\ntopic, using the \u2018json\u2019 encoder to convert the instance of\n',(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," to message payload."),(0,i.kt)("h2",{id:"2-avro-encoder-and-decoder"},"2. Avro encoder and decoder"),(0,i.kt)("h3",{id:"what-is-avro"},"What is Avro?"),(0,i.kt)("p",null,"Avro is a row-oriented remote procedure call and data serialization\nframework developed within Apache\u2019s Hadoop project. It uses JSON for\ndefining data types and protocols, and serializes data in a compact\nbinary format. To learn more about the Apache Avro, please check out the\n",(0,i.kt)("a",{parentName:"p",href:"https://avro.apache.org/docs/"},"docs"),"."),(0,i.kt)("h3",{id:"installing-fastkafka-with-avro-dependencies"},"Installing FastKafka with Avro dependencies"),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith dependencies for Apache Avro installed is needed to use avro\nencoder/decoder. Please install\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nwith Avro support using the command - ",(0,i.kt)("inlineCode",{parentName:"p"},"pip install fastkafka[avro]")),(0,i.kt)("h3",{id:"defining-avro-schema-using-pydantic-models"},"Defining Avro Schema Using Pydantic Models"),(0,i.kt)("p",null,"By default, you can use Pydantic model to define your message schemas.\nFastKafka internally takes care of encoding and decoding avro messages,\nbased on the Pydantic models."),(0,i.kt)("p",null,"So, similar to the ",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),", the message schema will\nremain as it is."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# Define Pydantic models for Avro messages\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,i.kt)("p",null,"No need to change anything to support avro. You can use existing\nPydantic models as is."),(0,i.kt)("h3",{id:"reusing-existing-avro-schema"},"Reusing existing avro schema"),(0,i.kt)("p",null,"If you are using some other library to send and receive avro encoded\nmessages, it is highly likely that you already have an Avro schema\ndefined."),(0,i.kt)("h4",{id:"building-pydantic-models-from-avro-schema-dictionary"},"Building pydantic models from avro schema dictionary"),(0,i.kt)("p",null,"Let\u2019s modify the above example and let\u2019s assume we have schemas already\nfor ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisInputData")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," which will look like below:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'iris_input_data_schema = {\n "type": "record",\n "namespace": "IrisInputData",\n "name": "IrisInputData",\n "fields": [\n {"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},\n {"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},\n {"doc": "Petal length in cm", "type": "double", "name": "petal_length"},\n {"doc": "Petal width in cm", "type": "double", "name": "petal_width"},\n ],\n}\niris_prediction_schema = {\n "type": "record",\n "namespace": "IrisPrediction",\n "name": "IrisPrediction",\n "fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],\n}\n')),(0,i.kt)("p",null,"We can easily construct pydantic models from avro schema using\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/encoder/avsc_to_pydantic#fastkafka.encoder.avsc_to_pydantic"},(0,i.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction which is included as part of\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nitself."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.model_fields)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.model_fields)\n")),(0,i.kt)("p",null,"The above code will convert avro schema to pydantic models and will\nprint pydantic models\u2019 fields. The output of the above is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"{'sepal_length': ModelField(name='sepal_length', type=float, required=True),\n 'sepal_width': ModelField(name='sepal_width', type=float, required=True),\n 'petal_length': ModelField(name='petal_length', type=float, required=True),\n 'petal_width': ModelField(name='petal_width', type=float, required=True)}\n \n {'species': ModelField(name='species', type=str, required=True)}\n")),(0,i.kt)("p",null,"This is exactly same as manually defining the pydantic models ourselves.\nYou don\u2019t have to worry about not making any mistakes while converting\navro schema to pydantic models manually. You can easily and\nautomatically accomplish it by using\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/encoder/avsc_to_pydantic#fastkafka.encoder.avsc_to_pydantic"},(0,i.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction as demonstrated above."),(0,i.kt)("h4",{id:"building-pydantic-models-from-avsc-file"},"Building pydantic models from ",(0,i.kt)("inlineCode",{parentName:"h4"},".avsc")," file"),(0,i.kt)("p",null,"Not all cases will have avro schema conveniently defined as a python\ndictionary. You may have it stored as the proprietary ",(0,i.kt)("inlineCode",{parentName:"p"},".avsc")," files in\nfilesystem. Let\u2019s see how to convert those ",(0,i.kt)("inlineCode",{parentName:"p"},".avsc")," files to pydantic\nmodels."),(0,i.kt)("p",null,"Let\u2019s assume our avro files are stored in files called\n",(0,i.kt)("inlineCode",{parentName:"p"},"iris_input_data_schema.avsc")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"iris_prediction_schema.avsc"),". In that\ncase, following code converts the schema to pydantic models:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'import json\nfrom fastkafka.encoder import avsc_to_pydantic\n\n\nwith open("iris_input_data_schema.avsc", "rb") as f:\n iris_input_data_schema = json.load(f)\n \nwith open("iris_prediction_schema.avsc", "rb") as f:\n iris_prediction_schema = json.load(f)\n \n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nprint(IrisInputData.model_fields)\n\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\nprint(IrisPrediction.model_fields)\n')),(0,i.kt)("h3",{id:"consumeproduce-avro-messages-with-fastkafka"},"Consume/Produce avro messages with FastKafka"),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nprovides ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods to consume/produces\nmessages to/from a ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," topic. This is explained in\n",(0,i.kt)("a",{parentName:"p",href:"/docs#function-decorators"},"tutorial"),"."),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods accepts a parameter called\n",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," to decode/encode avro messages."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", encoder="avro")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", decoder="avro")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"In the above example, in ",(0,i.kt)("inlineCode",{parentName:"p"},"@consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@produces")," methods, we\nexplicitly instruct FastKafka to ",(0,i.kt)("inlineCode",{parentName:"p"},"decode")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"encode")," messages using\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"avro")," ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," instead of the default ",(0,i.kt)("inlineCode",{parentName:"p"},"json"),"\n",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"/",(0,i.kt)("inlineCode",{parentName:"p"},"encoder"),"."),(0,i.kt)("h3",{id:"assembling-it-all-together"},"Assembling it all together"),(0,i.kt)("p",null,"Let\u2019s rewrite the sample code found in\n",(0,i.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," to use ",(0,i.kt)("inlineCode",{parentName:"p"},"avro")," to ",(0,i.kt)("inlineCode",{parentName:"p"},"decode")," and\n",(0,i.kt)("inlineCode",{parentName:"p"},"encode")," messages:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\niris_input_data_schema = {\n "type": "record",\n "namespace": "IrisInputData",\n "name": "IrisInputData",\n "fields": [\n {"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},\n {"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},\n {"doc": "Petal length in cm", "type": "double", "name": "petal_length"},\n {"doc": "Petal width in cm", "type": "double", "name": "petal_width"},\n ],\n}\niris_prediction_schema = {\n "type": "record",\n "namespace": "IrisPrediction",\n "name": "IrisPrediction",\n "fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],\n}\n# Or load schema from avsc files\n\nfrom fastkafka.encoder import avsc_to_pydantic\n\nIrisInputData = avsc_to_pydantic(iris_input_data_schema)\nIrisPrediction = avsc_to_pydantic(iris_prediction_schema)\n\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder="avro")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder="avro")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"The above code is a sample implementation of using FastKafka to consume\nand produce Avro-encoded messages from/to a Kafka topic. The code\ndefines two Avro schemas for the input data and the prediction result.\nIt then uses the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/encoder/avsc_to_pydantic#fastkafka.encoder.avsc_to_pydantic"},(0,i.kt)("inlineCode",{parentName:"a"},"avsc_to_pydantic")),"\nfunction from the FastKafka library to convert the Avro schema into\nPydantic models, which will be used to decode and encode Avro messages."),(0,i.kt)("p",null,"The\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is then instantiated with the broker details, and two functions\ndecorated with ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces"),' are\ndefined to consume messages from the \u201cinput_data" topic and produce\nmessages to the \u201cpredictions" topic, respectively. The functions uses\nthe decoder=\u201cavro" and encoder=\u201cavro" parameters to decode and encode\nthe Avro messages.'),(0,i.kt)("p",null,"In summary, the above code demonstrates a straightforward way to use\nAvro-encoded messages with FastKafka to build a message processing\npipeline."),(0,i.kt)("h2",{id:"3-custom-encoder-and-decoder"},"3. Custom encoder and decoder"),(0,i.kt)("p",null,"If you are not happy with the json or avro encoder/decoder options, you\ncan write your own encoder/decoder functions and use them to\nencode/decode Pydantic messages."),(0,i.kt)("h3",{id:"writing-a-custom-encoder-and-decoder"},"Writing a custom encoder and decoder"),(0,i.kt)("p",null,"In this section, let\u2019s see how to write a custom encoder and decoder\nwhich obfuscates kafka message with simple\n",(0,i.kt)("a",{parentName:"p",href:"https://en.wikipedia.org/wiki/ROT13"},"ROT13")," cipher."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},"import codecs\nimport json\nfrom typing import Any, Type\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n msg_str = msg.json()\n obfuscated = codecs.encode(msg_str, 'rot13')\n raw_bytes = obfuscated.encode(\"utf-8\")\n return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:\n obfuscated = raw_msg.decode(\"utf-8\")\n msg_str = codecs.decode(obfuscated, 'rot13')\n msg_dict = json.loads(msg_str)\n return cls(**msg_dict)\n")),(0,i.kt)("p",null,"The above code defines two custom functions for encoding and decoding\nmessages in a Kafka application using the FastKafka library."),(0,i.kt)("p",null,"The encoding function, ",(0,i.kt)("inlineCode",{parentName:"p"},"custom_encoder()"),", takes a message ",(0,i.kt)("inlineCode",{parentName:"p"},"msg")," which\nis an instance of a Pydantic model, converts it to a JSON string using\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"json()")," method, obfuscates the resulting string using the ROT13\nalgorithm from the ",(0,i.kt)("inlineCode",{parentName:"p"},"codecs")," module, and finally encodes the obfuscated\nstring as raw bytes using the UTF-8 encoding."),(0,i.kt)("p",null,"The decoding function, ",(0,i.kt)("inlineCode",{parentName:"p"},"custom_decoder()"),", takes a raw message ",(0,i.kt)("inlineCode",{parentName:"p"},"raw_msg"),"\nin bytes format, a Pydantic class to construct instance with cls\nparameter. It first decodes the raw message from UTF-8 encoding, then\nuses the ROT13 algorithm to de-obfuscate the string. Finally, it loads\nthe resulting JSON string using the ",(0,i.kt)("inlineCode",{parentName:"p"},"json.loads()")," method and returns a\nnew instance of the specified ",(0,i.kt)("inlineCode",{parentName:"p"},"cls")," class initialized with the decoded\ndictionary."),(0,i.kt)("p",null,"These functions can be used with FastKafka\u2019s ",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder"),"\nparameters to customize the serialization and deserialization of\nmessages in Kafka topics."),(0,i.kt)("p",null,"Let\u2019s test the above code"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},"i = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n\nencoded = custom_encoder(i)\ndisplay(encoded)\n\ndecoded = custom_decoder(encoded, IrisInputData)\ndisplay(decoded)\n")),(0,i.kt)("p",null,"This will result in following output"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},'b\'{"frcny_yratgu": 0.5, "frcny_jvqgu": 0.5, "crgny_yratgu": 0.5, "crgny_jvqgu": 0.5}\'\n\nIrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)\n')),(0,i.kt)("h3",{id:"assembling-it-all-together-1"},"Assembling it all together"),(0,i.kt)("p",null,"Let\u2019s rewrite the sample code found in\n",(0,i.kt)("a",{parentName:"p",href:"/docs#running-the-service"},"tutorial")," to use our custom decoder and\nencoder functions:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\n\nimport codecs\nimport json\nfrom typing import Any, Type\n\n\ndef custom_encoder(msg: BaseModel) -> bytes:\n msg_str = msg.json()\n obfuscated = codecs.encode(msg_str, \'rot13\')\n raw_bytes = obfuscated.encode("utf-8")\n return raw_bytes\n\ndef custom_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:\n obfuscated = raw_msg.decode("utf-8")\n msg_str = codecs.decode(obfuscated, \'rot13\')\n msg_dict = json.loads(msg_str)\n return cls(**msg_dict)\n\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", decoder=custom_decoder)\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions", encoder=custom_encoder)\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"This code defines a custom encoder and decoder functions for encoding\nand decoding messages sent through a Kafka messaging system."),(0,i.kt)("p",null,"The custom ",(0,i.kt)("inlineCode",{parentName:"p"},"encoder")," function takes a message represented as a\n",(0,i.kt)("inlineCode",{parentName:"p"},"BaseModel")," and encodes it as bytes by first converting it to a JSON\nstring and then obfuscating it using the ROT13 encoding. The obfuscated\nmessage is then converted to bytes using UTF-8 encoding and returned."),(0,i.kt)("p",null,"The custom ",(0,i.kt)("inlineCode",{parentName:"p"},"decoder")," function takes in the bytes representing an\nobfuscated message, decodes it using UTF-8 encoding, then decodes the\nROT13 obfuscation, and finally loads it as a dictionary using the ",(0,i.kt)("inlineCode",{parentName:"p"},"json"),"\nmodule. This dictionary is then converted to a ",(0,i.kt)("inlineCode",{parentName:"p"},"BaseModel")," instance\nusing the cls parameter."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d67a4111.aa568655.js b/assets/js/d67a4111.aa568655.js new file mode 100644 index 0000000..9f09fed --- /dev/null +++ b/assets/js/d67a4111.aa568655.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8308],{3905:(e,a,n)=>{n.d(a,{Zo:()=>c,kt:()=>f});var t=n(7294);function o(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function i(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function r(e){for(var a=1;a<arguments.length;a++){var n=null!=arguments[a]?arguments[a]:{};a%2?i(Object(n),!0).forEach((function(a){o(e,a,n[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):i(Object(n)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(n,a))}))}return e}function s(e,a){if(null==e)return{};var n,t,o=function(e,a){if(null==e)return{};var n,t,o={},i=Object.keys(e);for(t=0;t<i.length;t++)n=i[t],a.indexOf(n)>=0||(o[n]=e[n]);return o}(e,a);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(t=0;t<i.length;t++)n=i[t],a.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=t.createContext({}),l=function(e){var a=t.useContext(p),n=a;return e&&(n="function"==typeof e?e(a):r(r({},a),e)),n},c=function(e){var a=l(e.components);return t.createElement(p.Provider,{value:a},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},m=t.forwardRef((function(e,a){var n=e.components,o=e.mdxType,i=e.originalType,p=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=l(n),m=o,f=d["".concat(p,".").concat(m)]||d[m]||u[m]||i;return n?t.createElement(f,r(r({ref:a},c),{},{components:n})):t.createElement(f,r({ref:a},c))}));function f(e,a){var n=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var i=n.length,r=new Array(i);r[0]=m;var s={};for(var p in a)hasOwnProperty.call(a,p)&&(s[p]=a[p]);s.originalType=e,s[d]="string"==typeof e?e:o,r[1]=s;for(var l=2;l<i;l++)r[l]=n[l];return t.createElement.apply(null,r)}return t.createElement.apply(null,n)}m.displayName="MDXCreateElement"},279:(e,a,n)=>{n.r(a),n.d(a,{assets:()=>p,contentTitle:()=>r,default:()=>u,frontMatter:()=>i,metadata:()=>s,toc:()=>l});var t=n(7462),o=(n(7294),n(3905));const i={},r="Lifespan Events",s={unversionedId:"guides/Guide_05_Lifespan_Handler",id:"version-0.7.0/guides/Guide_05_Lifespan_Handler",title:"Lifespan Events",description:"Did you know that you can define some special code that runs before and",source:"@site/versioned_docs/version-0.7.0/guides/Guide_05_Lifespan_Handler.md",sourceDirName:"guides",slug:"/guides/Guide_05_Lifespan_Handler",permalink:"/docs/0.7.0/guides/Guide_05_Lifespan_Handler",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Batch producing",permalink:"/docs/0.7.0/guides/Guide_23_Batch_Producing"},next:{title:"Encoding and Decoding Kafka Messages with FastKafka",permalink:"/docs/0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"}},p={},l=[{value:"Lifespan example - Iris prediction model",id:"lifespan-example---iris-prediction-model",level:2},{value:"Lifespan",id:"lifespan",level:3},{value:"Async context manager",id:"async-context-manager",level:3},{value:"App demo",id:"app-demo",level:2},{value:"FastKafka app",id:"fastkafka-app",level:3},{value:"Data modeling",id:"data-modeling",level:3},{value:"Consumers and producers",id:"consumers-and-producers",level:3},{value:"Final app",id:"final-app",level:3},{value:"Running the app",id:"running-the-app",level:3},{value:"Recap",id:"recap",level:2}],c={toc:l},d="wrapper";function u(e){let{components:a,...n}=e;return(0,o.kt)(d,(0,t.Z)({},c,n,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"lifespan-events"},"Lifespan Events"),(0,o.kt)("p",null,"Did you know that you can define some special code that runs before and\nafter your Kafka application? This code will be executed just once, but\nit covers the whole lifespan of your app! \ud83d\ude80"),(0,o.kt)("p",null,"Lets break it down:"),(0,o.kt)("p",null,"You can define logic (code) that should be executed before the\napplication starts up. This is like a warm-up for your app, getting it\nready to consume and produce messages."),(0,o.kt)("p",null,"Similarly, you can define logic (code) that should be executed when the\napplication is shutting down. This is like a cool-down for your app,\nmaking sure everything is properly closed and cleaned up."),(0,o.kt)("p",null,"By executing code before consuming and after producing, you cover the\nentire lifecycle of your application \ud83c\udf89"),(0,o.kt)("p",null,"This is super handy for setting up shared resources that are needed\nacross consumers and producers, like a database connection pool or a\nmachine learning model. And the best part? You can clean up these\nresources when the app is shutting down!"),(0,o.kt)("p",null,"So lets give it a try and see how it can make your Kafka app even more\nawesome! \ud83d\udcaa"),(0,o.kt)("h2",{id:"lifespan-example---iris-prediction-model"},"Lifespan example - Iris prediction model"),(0,o.kt)("p",null,"Let\u2019s dive into an example to see how you can leverage the lifecycle\nhandler to solve a common use case. Imagine that you have some machine\nlearning models that need to consume incoming messages and produce\nresponse/prediction messages. These models are shared among consumers\nand producers, which means you don\u2019t want to load them for every\nmessage."),(0,o.kt)("p",null,"Here\u2019s where the lifecycle handler comes to the rescue! By loading the\nmodel before the messages are consumed and produced, but only right\nbefore the application starts receiving messages, you can ensure that\nthe model is ready to use without compromising the performance of your\ntests. In the upcoming sections, we\u2019ll walk you through how to\ninitialize an Iris species prediction model and use it in your developed\napplication."),(0,o.kt)("h3",{id:"lifespan"},"Lifespan"),(0,o.kt)("p",null,"You can define this startup and shutdown logic using the lifespan\nparameter of the FastKafka app, and an async context manager."),(0,o.kt)("p",null,"Let\u2019s start with an example and then see it in detail."),(0,o.kt)("p",null,"We create an async function lifespan() with yield like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n print("Loading the model!")\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n yield\n # Clean up the ML models and release the resources\n \n print("Exiting, clearing model dict!")\n ml_models.clear()\n \n')),(0,o.kt)("p",null,"The first thing to notice, is that we are defining an async function\nwith ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),". This is very similar to Dependencies with ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),"."),(0,o.kt)("p",null,"The first part of the function, before the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),", will be executed\n",(0,o.kt)("strong",{parentName:"p"},"before")," the application starts. And the part after the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield")," will\nbe executed ",(0,o.kt)("strong",{parentName:"p"},"after")," the application has finished."),(0,o.kt)("p",null,"This lifespan will create an iris_prediction model on application\nstartup and cleanup the references after the app is shutdown."),(0,o.kt)("p",null,"The lifespan will be passed an KafkaApp reference on startup of your\napplication, which you can use to reference your application on startup."),(0,o.kt)("p",null,"For demonstration sake, we also added prints so that when running the\napp we can see that our lifespan was called."),(0,o.kt)("h3",{id:"async-context-manager"},"Async context manager"),(0,o.kt)("p",null,"Context managers can be used in ",(0,o.kt)("inlineCode",{parentName:"p"},"with")," blocks, our lifespan, for example\ncould be used like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"ml_models = {}\nasync with lifespan(None):\n print(ml_models)\n")),(0,o.kt)("p",null,"When you create a context manager or an async context manager, what it\ndoes is that, before entering the ",(0,o.kt)("inlineCode",{parentName:"p"},"with")," block, it will execute the code\nbefore the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),", and after exiting the ",(0,o.kt)("inlineCode",{parentName:"p"},"with")," block, it will execute\nthe code after the ",(0,o.kt)("inlineCode",{parentName:"p"},"yield"),"."),(0,o.kt)("p",null,"If you want to learn more about context managers and contextlib\ndecorators, please visit ",(0,o.kt)("a",{parentName:"p",href:"https://docs.python.org/3/library/contextlib.html"},"Python official\ndocs")),(0,o.kt)("h2",{id:"app-demo"},"App demo"),(0,o.kt)("h3",{id:"fastkafka-app"},"FastKafka app"),(0,o.kt)("p",null,"Lets now create our application using the created lifespan handler."),(0,o.kt)("p",null,"Notice how we passed our lifespan handler to the app when constructing\nit trough the ",(0,o.kt)("inlineCode",{parentName:"p"},"lifespan")," argument."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local development kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n')),(0,o.kt)("h3",{id:"data-modeling"},"Data modeling"),(0,o.kt)("p",null,"Lets model the Iris data for our app:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,o.kt)("h3",{id:"consumers-and-producers"},"Consumers and producers"),(0,o.kt)("p",null,"Lets create a consumer and producer for our app that will generate\npredictions from input iris data."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h3",{id:"final-app"},"Final app"),(0,o.kt)("p",null,"The final app looks like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\nfrom contextlib import asynccontextmanager\n\nfrom pydantic import BaseModel, Field, NonNegativeFloat\n\nfrom fastkafka import FastKafka\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\nml_models = {}\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n print("Loading the model!")\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)\n yield\n # Clean up the ML models and release the resources\n \n print("Exiting, clearing model dict!")\n ml_models.clear()\n \nkafka_brokers = {\n "localhost": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local development kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h3",{id:"running-the-app"},"Running the app"),(0,o.kt)("p",null,"Now we can run the app with your custom lifespan handler. Copy the code\nabove in lifespan_example.py and run it by running"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app\n")),(0,o.kt)("p",null,"When you run the app, you should see a simmilar output to the one below:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[262292]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[262292]: Loading the model!\n[262292]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Entering...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting send_stream\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Finished.\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[262292]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[262292]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[262292]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[262292]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 262292...\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Entering...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Exiting send_stream\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Finished.\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Stoping producer...\n[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Finished\n[262292]: Exiting, clearing model dict!\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 262292 terminated.\n")),(0,o.kt)("h2",{id:"recap"},"Recap"),(0,o.kt)("p",null,"In this guide we have defined a lifespan handler and passed to our\nFastKafka app."),(0,o.kt)("p",null,"Some important points are:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Lifespan handler is implemented as\n",(0,o.kt)("a",{parentName:"li",href:"https://docs.python.org/3/library/contextlib.html#contextlib.asynccontextmanager"},"AsyncContextManager")),(0,o.kt)("li",{parentName:"ol"},"Code ",(0,o.kt)("strong",{parentName:"li"},"before")," yield in lifespan will be executed ",(0,o.kt)("strong",{parentName:"li"},"before"),"\napplication ",(0,o.kt)("strong",{parentName:"li"},"startup")),(0,o.kt)("li",{parentName:"ol"},"Code ",(0,o.kt)("strong",{parentName:"li"},"after")," yield in lifespan will be executed ",(0,o.kt)("strong",{parentName:"li"},"after"),"\napplication ",(0,o.kt)("strong",{parentName:"li"},"shutdown")),(0,o.kt)("li",{parentName:"ol"},"You can pass your lifespan handler to FastKafka app on\ninitialisation by passing a ",(0,o.kt)("inlineCode",{parentName:"li"},"lifespan")," argument")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d73efefc.77589437.js b/assets/js/d73efefc.77589437.js new file mode 100644 index 0000000..868289e --- /dev/null +++ b/assets/js/d73efefc.77589437.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[2353],{3905:(e,t,a)=>{a.d(t,{Zo:()=>d,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),p=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},d=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),c=p(a),u=r,f=c["".concat(s,".").concat(u)]||c[u]||k[u]||o;return a?n.createElement(f,i(i({ref:t},d),{},{components:a})):n.createElement(f,i({ref:t},d))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=u;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:r,i[1]=l;for(var p=2;p<o;p++)i[p]=a[p];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},3130:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>k,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={},i=void 0,l={unversionedId:"api/fastkafka/testing/LocalRedpandaBroker",id:"version-0.7.0/api/fastkafka/testing/LocalRedpandaBroker",title:"LocalRedpandaBroker",description:"fastkafka.testing.LocalRedpandaBroker {fastkafka.testing.LocalRedpandaBroker}",source:"@site/versioned_docs/version-0.7.0/api/fastkafka/testing/LocalRedpandaBroker.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/LocalRedpandaBroker",permalink:"/docs/0.7.0/api/fastkafka/testing/LocalRedpandaBroker",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"ApacheKafkaBroker",permalink:"/docs/0.7.0/api/fastkafka/testing/ApacheKafkaBroker"},next:{title:"Tester",permalink:"/docs/0.7.0/api/fastkafka/testing/Tester"}},s={},p=[{value:"<code>fastkafka.testing.LocalRedpandaBroker</code>",id:"fastkafka.testing.LocalRedpandaBroker",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>get_service_config_string</code>",id:"get_service_config_string",level:3},{value:"<code>start</code>",id:"start",level:3},{value:"<code>stop</code>",id:"stop",level:3}],d={toc:p},c="wrapper";function k(e){let{components:t,...a}=e;return(0,r.kt)(c,(0,n.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.testing.LocalRedpandaBroker"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.testing.LocalRedpandaBroker")),(0,r.kt)("p",null,"LocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing."),(0,r.kt)("h3",{id:"init"},(0,r.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug', **kwargs: Dict[str, Any]) -> None")),(0,r.kt)("p",null,"Initialises the LocalRedpandaBroker object"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"tag"),": Tag of Redpanda image to use to start container"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"seastar_core"),": Core(s) to use byt Seastar (the framework Redpanda uses under the hood)"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"memory"),": The amount of memory to make available to Redpanda"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"mode"),": Mode to use to load configuration properties in container"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"default_log_level"),": Log levels to use for Redpanda")),(0,r.kt)("h3",{id:"get_service_config_string"},(0,r.kt)("inlineCode",{parentName:"h3"},"get_service_config_string")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def get_service_config_string(self, service: str, data_dir: pathlib.Path) -> str")),(0,r.kt)("p",null,"Generates a configuration for a service"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"service"),': "redpanda", defines which service to get config string for')),(0,r.kt)("h3",{id:"start"},(0,r.kt)("inlineCode",{parentName:"h3"},"start")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def start(self: fastkafka.testing.LocalRedpandaBroker) -> str")),(0,r.kt)("p",null,"Starts a local redpanda broker instance synchronously"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Redpanda broker bootstrap server address in string format: add:port")),(0,r.kt)("h3",{id:"stop"},(0,r.kt)("inlineCode",{parentName:"h3"},"stop")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def stop(self: fastkafka.testing.LocalRedpandaBroker) -> None")),(0,r.kt)("p",null,"Stops a local redpanda broker instance synchronously"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"None")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d7dfec52.b5cc8811.js b/assets/js/d7dfec52.b5cc8811.js new file mode 100644 index 0000000..4113ee7 --- /dev/null +++ b/assets/js/d7dfec52.b5cc8811.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8457],{3905:(e,t,r)=>{r.d(t,{Zo:()=>c,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function s(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?o(Object(r),!0).forEach((function(t){a(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):o(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function l(e,t){if(null==e)return{};var r,n,a=function(e,t){if(null==e)return{};var r,n,a={},o=Object.keys(e);for(n=0;n<o.length;n++)r=o[n],t.indexOf(r)>=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)r=o[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var i=n.createContext({}),p=function(e){var t=n.useContext(i),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},c=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,i=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),f=p(r),k=a,m=f["".concat(i,".").concat(k)]||f[k]||u[k]||o;return r?n.createElement(m,s(s({ref:t},c),{},{components:r})):n.createElement(m,s({ref:t},c))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,s=new Array(o);s[0]=k;var l={};for(var i in t)hasOwnProperty.call(t,i)&&(l[i]=t[i]);l.originalType=e,l[f]="string"==typeof e?e:a,s[1]=l;for(var p=2;p<o;p++)s[p]=r[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,r)}k.displayName="MDXCreateElement"},6322:(e,t,r)=>{r.r(t),r.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var n=r(7462),a=(r(7294),r(3905));const o={},s="run_fastkafka_server_process",l={unversionedId:"cli/run_fastkafka_server_process",id:"cli/run_fastkafka_server_process",title:"run_fastkafka_server_process",description:"Usage:",source:"@site/docs/cli/run_fastkafka_server_process.md",sourceDirName:"cli",slug:"/cli/run_fastkafka_server_process",permalink:"/docs/next/cli/run_fastkafka_server_process",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"fastkafka",permalink:"/docs/next/cli/fastkafka"},next:{title:"LICENSE",permalink:"/docs/next/LICENSE"}},i={},p=[],c={toc:p},f="wrapper";function u(e){let{components:t,...r}=e;return(0,a.kt)(f,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"run_fastkafka_server_process"},(0,a.kt)("inlineCode",{parentName:"h1"},"run_fastkafka_server_process")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Usage"),":"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-console"},"$ run_fastkafka_server_process [OPTIONS] APP\n")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"APP"),": Input in the form of 'path:app', where ",(0,a.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,a.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,a.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Options"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--kafka-broker TEXT"),": Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class. ","[required]"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--install-completion"),": Install completion for the current shell."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--show-completion"),": Show completion for the current shell, to copy it or customize the installation."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d87f7f29.8b85c627.js b/assets/js/d87f7f29.8b85c627.js new file mode 100644 index 0000000..cbae79e --- /dev/null +++ b/assets/js/d87f7f29.8b85c627.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9069],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>f});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){o(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,o=function(e,t){if(null==e)return{};var a,n,o={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var i=n.createContext({}),p=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},c=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=p(a),k=o,f=d["".concat(i,".").concat(k)]||d[k]||u[k]||r;return a?n.createElement(f,s(s({ref:t},c),{},{components:a})):n.createElement(f,s({ref:t},c))}));function f(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=a.length,s=new Array(r);s[0]=k;var l={};for(var i in t)hasOwnProperty.call(t,i)&&(l[i]=t[i]);l.originalType=e,l[d]="string"==typeof e?e:o,s[1]=l;for(var p=2;p<r;p++)s[p]=a[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,a)}k.displayName="MDXCreateElement"},6927:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var n=a(7462),o=(a(7294),a(3905));const r={},s="@produces basics",l={unversionedId:"guides/Guide_21_Produces_Basics",id:"guides/Guide_21_Produces_Basics",title:"@produces basics",description:"You can use @produces decorator to produce messages to Kafka topics.",source:"@site/docs/guides/Guide_21_Produces_Basics.md",sourceDirName:"guides",slug:"/guides/Guide_21_Produces_Basics",permalink:"/docs/next/guides/Guide_21_Produces_Basics",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Batch consuming",permalink:"/docs/next/guides/Guide_12_Batch_Consuming"},next:{title:"Defining a partition key",permalink:"/docs/next/guides/Guide_22_Partition_Keys"}},i={},p=[{value:"Import <code>FastKafka</code>",id:"import-fastkafka",level:2},{value:"Define the structure of the messages",id:"define-the-structure-of-the-messages",level:2},{value:"Create a base FastKafka app",id:"create-a-base-fastkafka-app",level:2},{value:"Create a producer function and decorate it with <code>@produces</code>",id:"create-a-producer-function-and-decorate-it-with-produces",level:2},{value:"Instruct the app to start sending HelloWorld messages",id:"instruct-the-app-to-start-sending-helloworld-messages",level:2},{value:"Final app",id:"final-app",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the message was sent to the Kafka topic",id:"check-if-the-message-was-sent-to-the-kafka-topic",level:2},{value:"Choosing a topic",id:"choosing-a-topic",level:2},{value:"Message data",id:"message-data",level:2}],c={toc:p},d="wrapper";function u(e){let{components:t,...a}=e;return(0,o.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"produces-basics"},"@produces basics"),(0,o.kt)("p",null,"You can use ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator to produce messages to Kafka topics."),(0,o.kt)("p",null,"In this guide we will create a simple FastKafka app that will produce\nhello world messages to hello_world topic."),(0,o.kt)("h2",{id:"import-fastkafka"},"Import ",(0,o.kt)("a",{parentName:"h2",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka"))),(0,o.kt)("p",null,"To use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator, frist we need to import the base\nFastKafka app to create our application."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka import FastKafka\n")),(0,o.kt)("h2",{id:"define-the-structure-of-the-messages"},"Define the structure of the messages"),(0,o.kt)("p",null,"Next, you need to define the structure of the messages you want to send\nto the topic using ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"pydantic"),". For the guide\nwe\u2019ll stick to something basic, but you are free to define any complex\nmessage structure you wish in your project, just make sure it can be\nJSON encoded."),(0,o.kt)("p",null,"Let\u2019s import ",(0,o.kt)("inlineCode",{parentName:"p"},"BaseModel")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"Field")," from pydantic and create a simple\n",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class containing one string parameter ",(0,o.kt)("inlineCode",{parentName:"p"},"msg")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from pydantic import BaseModel, Field\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n')),(0,o.kt)("h2",{id:"create-a-base-fastkafka-app"},"Create a base FastKafka app"),(0,o.kt)("p",null,"Now we will create and define a base FastKafka app, replace the\n",(0,o.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values of your\nKafka bootstrap server"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n')),(0,o.kt)("h2",{id:"create-a-producer-function-and-decorate-it-with-produces"},"Create a producer function and decorate it with ",(0,o.kt)("inlineCode",{parentName:"h2"},"@produces")),(0,o.kt)("p",null,"Let\u2019s create a producer function that will produce ",(0,o.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages\nto ",(0,o.kt)("em",{parentName:"p"},"hello_world")," topic:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n")),(0,o.kt)("p",null,"Now you can call your defined function as any normal python function in\nyour code. The side effect of calling the function will be that the\nvalue you are returning will also be sent to a kafka topic."),(0,o.kt)("p",null,"By default, the topic is determined from your function name, the \u201cto","_",'"\nprefix is stripped and what is left over is used as a topic name. I this\ncase, that is ',(0,o.kt)("em",{parentName:"p"},"hello_world"),"."),(0,o.kt)("h2",{id:"instruct-the-app-to-start-sending-helloworld-messages"},"Instruct the app to start sending HelloWorld messages"),(0,o.kt)("p",null,"Let\u2019s use ",(0,o.kt)("inlineCode",{parentName:"p"},"@run_in_background")," decorator to instruct our app to send\nHelloWorld messages to hello_world topic every second."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,o.kt)("h2",{id:"final-app"},"Final app"),(0,o.kt)("p",null,"Your app code should look like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.produces()\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,o.kt)("h2",{id:"run-the-app"},"Run the app"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'script_file = "producer_example.py"\ncmd = "fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app"\nmd(\n f"Now we can run the app. Copy the code above in producer_example.py and run it by running\\n```shell\\n{cmd}\\n```"\n)\n')),(0,o.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app\n")),(0,o.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\n[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.\n")),(0,o.kt)("h2",{id:"check-if-the-message-was-sent-to-the-kafka-topic"},"Check if the message was sent to the Kafka topic"),(0,o.kt)("p",null,'Lets check the topic and see if there is a \u201cHello world!" message in the\nhello_world topic. In your terminal run:'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n")),(0,o.kt)("p",null,'You should see the {\u201cmsg": \u201cHello world!"} messages in your topic.'),(0,o.kt)("h2",{id:"choosing-a-topic"},"Choosing a topic"),(0,o.kt)("p",null,"You probably noticed that you didn\u2019t define which topic you are sending\nthe message to, this is because the ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator determines the\ntopic by default from your function name. The decorator will take your\nfunction name and strip the default \u201cto","_",'" prefix from it and use the\nrest as the topic name. In this example case, the topic is\n',(0,o.kt)("em",{parentName:"p"},"hello_world"),"."),(0,o.kt)("p",null,'!!! warn "New topics"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.\n")),(0,o.kt)("p",null,"You can choose your custom prefix by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"prefix")," parameter in\nproduces decorator, like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.produces(prefix="send_to_")\nasync def send_to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n')),(0,o.kt)("p",null,"Also, you can define the topic name completely by defining the ",(0,o.kt)("inlineCode",{parentName:"p"},"topic"),"\nin parameter in produces decorator, like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.produces(topic="my_special_topic")\nasync def to_hello_world(msg: str) -> HelloWorld:\n return HelloWorld(msg=msg)\n')),(0,o.kt)("h2",{id:"message-data"},"Message data"),(0,o.kt)("p",null,"The return value from your function will be translated JSON string and\nthen to bytes and sent to defined Kafka topic. The typing of the return\nvalue is used for generating the documentation for your Kafka app."),(0,o.kt)("p",null,"In this example case, the return value is HelloWorld class which will be\ntranslated into JSON formatted string and then to bytes. The translated\ndata will then be sent to Kafka. In the from of:\n",(0,o.kt)("inlineCode",{parentName:"p"},'b\'{"msg":"Hello world!"}\'')))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d9bd3427.e1f1f230.js b/assets/js/d9bd3427.e1f1f230.js new file mode 100644 index 0000000..8b4f6cd --- /dev/null +++ b/assets/js/d9bd3427.e1f1f230.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8796],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function s(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},u=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},k="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),k=l(a),d=r,f=k["".concat(p,".").concat(d)]||k[d]||c[d]||o;return a?n.createElement(f,i(i({ref:t},u),{},{components:a})):n.createElement(f,i({ref:t},u))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=d;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[k]="string"==typeof e?e:r,i[1]=s;for(var l=2;l<o;l++)i[l]=a[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}d.displayName="MDXCreateElement"},7372:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>c,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var n=a(7462),r=(a(7294),a(3905));const o={},i="Defining a partition key",s={unversionedId:"guides/Guide_22_Partition_Keys",id:"version-0.8.0/guides/Guide_22_Partition_Keys",title:"Defining a partition key",description:"Partition keys are used in Apache Kafka to determine which partition a",source:"@site/versioned_docs/version-0.8.0/guides/Guide_22_Partition_Keys.md",sourceDirName:"guides",slug:"/guides/Guide_22_Partition_Keys",permalink:"/docs/guides/Guide_22_Partition_Keys",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"@produces basics",permalink:"/docs/guides/Guide_21_Produces_Basics"},next:{title:"Batch producing",permalink:"/docs/guides/Guide_23_Batch_Producing"}},p={},l=[{value:"Return a key from the producing function",id:"return-a-key-from-the-producing-function",level:2},{value:"App example",id:"app-example",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the message was sent to the Kafka topic with the desired key",id:"check-if-the-message-was-sent-to-the-kafka-topic-with-the-desired-key",level:2}],u={toc:l},k="wrapper";function c(e){let{components:t,...a}=e;return(0,r.kt)(k,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"defining-a-partition-key"},"Defining a partition key"),(0,r.kt)("p",null,"Partition keys are used in Apache Kafka to determine which partition a\nmessage should be written to. This ensures that related messages are\nkept together in the same partition, which can be useful for ensuring\norder or for grouping related messages together for efficient\nprocessing. Additionally, partitioning data across multiple partitions\nallows Kafka to distribute load across multiple brokers and scale\nhorizontally, while replicating data across multiple brokers provides\nfault tolerance."),(0,r.kt)("p",null,"You can define your partition keys when using the ",(0,r.kt)("inlineCode",{parentName:"p"},"@produces")," decorator,\nthis guide will demonstrate to you this feature."),(0,r.kt)("h2",{id:"return-a-key-from-the-producing-function"},"Return a key from the producing function"),(0,r.kt)("p",null,"To define a key for the message that you want to produce to Kafka topic,\nyou need to wrap the response into\n",(0,r.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/KafkaEvent#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass and set the key value. Check the example below:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")\n')),(0,r.kt)("p",null,"In the example, we want to return the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message class with\nthe key defined as ",(0,r.kt)("em",{parentName:"p"},"my_key"),". So, we wrap the message and key into a\nKafkaEvent class and return it as such."),(0,r.kt)("p",null,"While generating the documentation, the\n",(0,r.kt)("a",{parentName:"p",href:"/docs/api/fastkafka/KafkaEvent#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass will be unwrapped and the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class will be documented in\nthe definition of message type, same way if you didn\u2019t use the key."),(0,r.kt)("p",null,'!!! info "Which key to choose?"'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.\n")),(0,r.kt)("h2",{id:"app-example"},"App example"),(0,r.kt)("p",null,"We will modify the app example from ",(0,r.kt)("strong",{parentName:"p"},"@producer basics")," guide to return\nthe ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," with our key. The final app will look like this (make\nsure you replace the ",(0,r.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,r.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values):"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,r.kt)("h2",{id:"run-the-app"},"Run the app"),(0,r.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n")),(0,r.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"[347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.\n")),(0,r.kt)("h2",{id:"check-if-the-message-was-sent-to-the-kafka-topic-with-the-desired-key"},"Check if the message was sent to the Kafka topic with the desired key"),(0,r.kt)("p",null,'Lets check the topic and see if there is a \u201cHello world!" message in the\nhello_world topic with the defined key. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n")),(0,r.kt)("p",null,"You should see the ",(0,r.kt)("em",{parentName:"p"},'my_key {\u201cmsg": \u201cHello world!"}')," messages in your\ntopic appearing, the ",(0,r.kt)("em",{parentName:"p"},"my_key")," part of the message is the key that we\ndefined in our producing function."))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d9ce81b2.de1fc440.js b/assets/js/d9ce81b2.de1fc440.js new file mode 100644 index 0000000..f91c567 --- /dev/null +++ b/assets/js/d9ce81b2.de1fc440.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6803],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>k});var a=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function r(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){i(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function l(e,t){if(null==e)return{};var n,a,i=function(e,t){if(null==e)return{};var n,a,i={},o=Object.keys(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var s=a.createContext({}),m=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},d=function(e){var t=m(e.components);return a.createElement(s.Provider,{value:t},e.children)},p="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,o=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),p=m(n),u=i,k=p["".concat(s,".").concat(u)]||p[u]||c[u]||o;return n?a.createElement(k,r(r({ref:t},d),{},{components:n})):a.createElement(k,r({ref:t},d))}));function k(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=n.length,r=new Array(o);r[0]=u;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:i,r[1]=l;for(var m=2;m<o;m++)r[m]=n[m];return a.createElement.apply(null,r)}return a.createElement.apply(null,n)}u.displayName="MDXCreateElement"},9681:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>r,default:()=>c,frontMatter:()=>o,metadata:()=>l,toc:()=>m});var a=n(7462),i=(n(7294),n(3905));const o={},r=void 0,l={unversionedId:"api/fastkafka/testing/Tester",id:"version-0.5.0/api/fastkafka/testing/Tester",title:"Tester",description:"fastkafka.testing.Tester {fastkafka.testing.Tester}",source:"@site/versioned_docs/version-0.5.0/api/fastkafka/testing/Tester.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/Tester",permalink:"/docs/0.5.0/api/fastkafka/testing/Tester",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"LocalRedpandaBroker",permalink:"/docs/0.5.0/api/fastkafka/testing/LocalRedpandaBroker"},next:{title:"fastkafka",permalink:"/docs/0.5.0/cli/fastkafka"}},s={},m=[{value:"<code>fastkafka.testing.Tester</code>",id:"fastkafka.testing.Tester",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>benchmark</code>",id:"benchmark",level:3},{value:"<code>consumes</code>",id:"consumes",level:3},{value:"<code>create_mocks</code>",id:"create_mocks",level:3},{value:"<code>produces</code>",id:"produces",level:3},{value:"<code>run_in_background</code>",id:"run_in_background",level:3},{value:"<code>using_local_kafka</code>",id:"using_local_kafka",level:3},{value:"<code>using_local_redpanda</code>",id:"using_local_redpanda",level:3}],d={toc:m},p="wrapper";function c(e){let{components:t,...n}=e;return(0,i.kt)(p,(0,a.Z)({},d,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"h2"},"fastkafka.testing.Tester")),(0,i.kt)("h3",{id:"init"},(0,i.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def __init__(self, app: Union[fastkafka.FastKafka, List[fastkafka.FastKafka]], broker: Optional[fastkafka.testing.ApacheKafkaBroker, fastkafka.testing.LocalRedpandaBroker, fastkafka._testing.in_memory_broker.InMemoryBroker] = None, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None")),(0,i.kt)("p",null,"Mirror-like object for testing a FastKafka application"),(0,i.kt)("p",null,"Can be used as context manager"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"zookeeper_port"),": Port for clients (Kafka brokes) to connect"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect")),(0,i.kt)("h3",{id:"benchmark"},(0,i.kt)("inlineCode",{parentName:"h3"},"benchmark")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Union[~O, NoneType]]], typing.Callable[[~I], typing.Union[~O, NoneType]]]")),(0,i.kt)("p",null,"Decorator to benchmark produces/consumes functions"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"interval"),": Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sliding_window_size"),": The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated")),(0,i.kt)("h3",{id:"consumes"},(0,i.kt)("inlineCode",{parentName:"h3"},"consumes")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', prefix: str = 'on_', loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Callable[[pydantic.main.BaseModel], typing.Union[NoneType, typing.Awaitable[NoneType]]]], typing.Callable[[pydantic.main.BaseModel], typing.Union[NoneType, typing.Awaitable[NoneType]]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when a message is received in a topic."),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"decoder"),": Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: "on_". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"*topics"),": optional list of topics to subscribe to. If not set,\ncall :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," or :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".assign")," before consuming records.\nPassing topics directly is same as calling :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," API."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string (or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings) that the consumer should contact to bootstrap\ninitial cluster metadata.")),(0,i.kt)("p",null,"This does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ",(0,i.kt)("inlineCode",{parentName:"p"},"localhost:9092"),"."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~.consumer.group_coordinator.GroupCoordinator"),"\nfor logging with respect to consumer group administration. Default:\n",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-{version}")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"group_id"),": name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_deserializer"),": Any callable that takes a\nraw message key and returns a deserialized key."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_deserializer"),": Any callable that takes a\nraw message value and returns a deserialized value."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_min_bytes"),": Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n",(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_bytes"),": The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms"),": The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_partition_fetch_bytes"),": The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ",(0,i.kt)("inlineCode",{parentName:"li"},"= #partitions * max_partition_fetch_bytes"),".\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_records"),": The maximum number of records returned in a\nsingle call to :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),". Defaults ",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", no limit."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Client request timeout in milliseconds.\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_offset_reset"),": A policy for resetting offsets on\n:exc:",(0,i.kt)("inlineCode",{parentName:"li"},".OffsetOutOfRangeError")," errors: ",(0,i.kt)("inlineCode",{parentName:"li"},"earliest")," will move to the oldest\navailable message, ",(0,i.kt)("inlineCode",{parentName:"li"},"latest")," will move to the most recent, and\n",(0,i.kt)("inlineCode",{parentName:"li"},"none")," will raise an exception so you can handle this case.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"latest"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_auto_commit"),": If true the consumer's offset will be\nperiodically committed in the background. Default: True."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_commit_interval_ms"),": milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"check_crcs"),": Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partition_assignment_strategy"),": List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: ","[:class:",(0,i.kt)("inlineCode",{parentName:"li"},".RoundRobinPartitionAssignor"),"]"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_interval_ms"),": Maximum allowed time between calls to\nconsume messages (e.g., :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),"). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See ",(0,i.kt)("inlineCode",{parentName:"li"},"KIP-62"),"_ for more\ninformation. Default 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"rebalance_timeout_ms"),": The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to ",(0,i.kt)("inlineCode",{parentName:"li"},"max.poll.interval.ms")," configuration,\nbut as ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka")," will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},".ConsumerRebalanceListener")," to delay rebalacing. Defaults\nto ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),": Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(",(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe ",(0,i.kt)("strong",{parentName:"li"},"broker")," configuration properties\n",(0,i.kt)("inlineCode",{parentName:"li"},"group.min.session.timeout.ms")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"group.max.session.timeout.ms"),".\nDefault: 10000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat_interval_ms"),": The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),", but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"consumer_timeout_ms"),": maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"AIOKafkaConsumer")," supports Kafka API versions >=0.9 only.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more information see\n:ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),". Default: None."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"exclude_internal_topics"),": Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying ",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"isolation_level"),": Controls how to read messages written\ntransactionally.")),(0,i.kt)("p",null,"If set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed"),", :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\ntransactional messages which have been committed.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")," (the default), :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will\nreturn all messages, even transactional messages which have been\naborted."),(0,i.kt)("p",null,"Non-transactional messages will be returned unconditionally in\neither mode."),(0,i.kt)("p",null,"Messages will always be returned in offset order. Hence, in\n",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," mode, :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," the seek_to_end method will\nreturn the LSO. See method docs below. Default: ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values are:\n",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider"),": OAuthBearer token provider instance. (See :mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: None")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("h3",{id:"create_mocks"},(0,i.kt)("inlineCode",{parentName:"h3"},"create_mocks")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def create_mocks(self: fastkafka.FastKafka) -> None")),(0,i.kt)("p",null,"Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock"),(0,i.kt)("h3",{id:"produces"},(0,i.kt)("inlineCode",{parentName:"h3"},"produces")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x101ca6040>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x101c80310>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]]]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when delivery report for a produced message is received"),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"encoder"),": Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: "to_". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list. It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ",(0,i.kt)("inlineCode",{parentName:"li"},"localhost:9092"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-producer-#")," (appended with a unique number\nper instance)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_serializer"),": used to convert user-supplied keys to bytes\nIf not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as ",(0,i.kt)("inlineCode",{parentName:"li"},"f(key),")," should return\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_serializer"),": used to convert user-supplied message\nvalues to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),". If not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as\n",(0,i.kt)("inlineCode",{parentName:"li"},"f(value)"),", should return :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"acks"),": one of ",(0,i.kt)("inlineCode",{parentName:"li"},"0"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"1"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"0"),": Producer will not wait for any acknowledgment from the server\nat all. The message will immediately be added to the socket\nbuffer and considered sent. No guarantee can be made that the\nserver has received the record in this case, and the retries\nconfiguration will not take effect (as the client won't\ngenerally know of any failures). The offset given back for each\nrecord will always be set to -1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"1"),": The broker leader will write the record to its local log but\nwill respond without awaiting full acknowledgement from all\nfollowers. In this case should the leader fail immediately\nafter acknowledging the record but before the followers have\nreplicated it then the record will be lost."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"all"),": The broker leader will wait for the full set of in-sync\nreplicas to acknowledge the record. This guarantees that the\nrecord will not be lost as long as at least one in-sync replica\nremains alive. This is the strongest available guarantee.")),(0,i.kt)("p",null,"If unset, defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=1"),". If ",(0,i.kt)("inlineCode",{parentName:"p"},"enable_idempotence")," is\n:data:",(0,i.kt)("inlineCode",{parentName:"p"},"True")," defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=all")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"compression_type"),": The compression type for all data generated by\nthe producer. Valid values are ",(0,i.kt)("inlineCode",{parentName:"li"},"gzip"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"snappy"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"lz4"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"zstd"),"\nor :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),".\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_batch_size"),": Maximum size of buffered data per partition.\nAfter this amount :meth:",(0,i.kt)("inlineCode",{parentName:"li"},"send")," coroutine will block until batch is\ndrained.\nDefault: 16384"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),": The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),", producer will wait ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms - process_time"),".\nDefault: 0 (i.e. no delay)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partitioner"),": Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n",(0,i.kt)("inlineCode",{parentName:"li"},"partitioner(key_bytes, all_partitions, available_partitions)"),".\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_request_size"),": The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"~kafka.protocol.produce.ProduceRequest")," (it's a blocking\ncall), maximum waiting time can be up to ",(0,i.kt)("inlineCode",{parentName:"li"},"2 *\nrequest_timeout_ms"),".\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more\ninformation see :ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_idempotence"),": When set to :data:",(0,i.kt)("inlineCode",{parentName:"li"},"True"),", the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:",(0,i.kt)("inlineCode",{parentName:"li"},"False"),", producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:",(0,i.kt)("inlineCode",{parentName:"li"},"ValueError")," will be thrown.\nNew in version 0.5.0."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values\nare: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider ("),": class:",(0,i.kt)("inlineCode",{parentName:"li"},"~aiokafka.abc.AbstractTokenProvider"),"):\nOAuthBearer token provider instance. (See\n:mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"))),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ValueError"),": when needed")),(0,i.kt)("h3",{id:"run_in_background"},(0,i.kt)("inlineCode",{parentName:"h3"},"run_in_background")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]")),(0,i.kt)("p",null,"Decorator to schedule a task to be run in the background."),(0,i.kt)("p",null,"This decorator is used to schedule a task to be run in the background when the app's ",(0,i.kt)("inlineCode",{parentName:"p"},"_on_startup")," event is triggered."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"A decorator function that takes a background task as an input and stores it to be run in the backround.")),(0,i.kt)("h3",{id:"using_local_kafka"},(0,i.kt)("inlineCode",{parentName:"h3"},"using_local_kafka")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def using_local_kafka(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> Tester")),(0,i.kt)("p",null,"Starts local Kafka broker used by the Tester instance"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"zookeeper_port"),": Port for clients (Kafka brokes) to connect"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"An instance of tester with Kafka as broker")),(0,i.kt)("h3",{id:"using_local_redpanda"},(0,i.kt)("inlineCode",{parentName:"h3"},"using_local_redpanda")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def using_local_redpanda(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug') -> Tester")),(0,i.kt)("p",null,"Starts local Redpanda broker used by the Tester instance"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"tag"),": Tag of Redpanda image to use to start container"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"seastar_core"),": Core(s) to use byt Seastar (the framework Redpanda uses under the hood)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"memory"),": The amount of memory to make available to Redpanda"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"mode"),": Mode to use to load configuration properties in container"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"default_log_level"),": Log levels to use for Redpanda")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"An instance of tester with Redpanda as broker")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/dbc0f590.302c834a.js b/assets/js/dbc0f590.302c834a.js new file mode 100644 index 0000000..faf2c69 --- /dev/null +++ b/assets/js/dbc0f590.302c834a.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[5412],{3905:(e,t,r)=>{r.d(t,{Zo:()=>c,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function s(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?o(Object(r),!0).forEach((function(t){a(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):o(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function i(e,t){if(null==e)return{};var r,n,a=function(e,t){if(null==e)return{};var r,n,a={},o=Object.keys(e);for(n=0;n<o.length;n++)r=o[n],t.indexOf(r)>=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)r=o[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var l=n.createContext({}),p=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},c=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,l=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),f=p(r),k=a,m=f["".concat(l,".").concat(k)]||f[k]||u[k]||o;return r?n.createElement(m,s(s({ref:t},c),{},{components:r})):n.createElement(m,s({ref:t},c))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,s=new Array(o);s[0]=k;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[f]="string"==typeof e?e:a,s[1]=i;for(var p=2;p<o;p++)s[p]=r[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,r)}k.displayName="MDXCreateElement"},8099:(e,t,r)=>{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>u,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var n=r(7462),a=(r(7294),r(3905));const o={},s="run_fastkafka_server_process",i={unversionedId:"cli/run_fastkafka_server_process",id:"version-0.6.0/cli/run_fastkafka_server_process",title:"run_fastkafka_server_process",description:"Usage:",source:"@site/versioned_docs/version-0.6.0/cli/run_fastkafka_server_process.md",sourceDirName:"cli",slug:"/cli/run_fastkafka_server_process",permalink:"/docs/0.6.0/cli/run_fastkafka_server_process",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"fastkafka",permalink:"/docs/0.6.0/cli/fastkafka"},next:{title:"LICENSE",permalink:"/docs/0.6.0/LICENSE"}},l={},p=[],c={toc:p},f="wrapper";function u(e){let{components:t,...r}=e;return(0,a.kt)(f,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"run_fastkafka_server_process"},(0,a.kt)("inlineCode",{parentName:"h1"},"run_fastkafka_server_process")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Usage"),":"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-console"},"$ run_fastkafka_server_process [OPTIONS] APP\n")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,a.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,a.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,a.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Options"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--kafka-broker TEXT"),": kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. ","[required]"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--install-completion"),": Install completion for the current shell."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--show-completion"),": Show completion for the current shell, to copy it or customize the installation."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/dc75700c.84688a9d.js b/assets/js/dc75700c.84688a9d.js new file mode 100644 index 0000000..9e9f4f5 --- /dev/null +++ b/assets/js/dc75700c.84688a9d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4886],{3905:(e,t,a)=>{a.d(t,{Zo:()=>k,kt:()=>m});var n=a(7294);function l(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?r(Object(a),!0).forEach((function(t){l(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):r(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function o(e,t){if(null==e)return{};var a,n,l=function(e,t){if(null==e)return{};var a,n,l={},r=Object.keys(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||(l[a]=e[a]);return l}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n<r.length;n++)a=r[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(l[a]=e[a])}return l}var i=n.createContext({}),p=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},k=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var a=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,k=o(e,["components","mdxType","originalType","parentName"]),f=p(a),d=l,m=f["".concat(i,".").concat(d)]||f[d]||u[d]||r;return a?n.createElement(m,s(s({ref:t},k),{},{components:a})):n.createElement(m,s({ref:t},k))}));function m(e,t){var a=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var r=a.length,s=new Array(r);s[0]=d;var o={};for(var i in t)hasOwnProperty.call(t,i)&&(o[i]=t[i]);o.originalType=e,o[f]="string"==typeof e?e:l,s[1]=o;for(var p=2;p<r;p++)s[p]=a[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,a)}d.displayName="MDXCreateElement"},6780:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var n=a(7462),l=(a(7294),a(3905));const r={},s="fastkafka",o={unversionedId:"cli/fastkafka",id:"version-0.8.0/cli/fastkafka",title:"fastkafka",description:"Usage:",source:"@site/versioned_docs/version-0.8.0/cli/fastkafka.md",sourceDirName:"cli",slug:"/cli/fastkafka",permalink:"/docs/cli/fastkafka",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Tester",permalink:"/docs/api/fastkafka/testing/Tester"},next:{title:"run_fastkafka_server_process",permalink:"/docs/cli/run_fastkafka_server_process"}},i={},p=[{value:"<code>fastkafka docs</code>",id:"fastkafka-docs",level:2},{value:"<code>fastkafka docs generate</code>",id:"fastkafka-docs-generate",level:3},{value:"<code>fastkafka docs install_deps</code>",id:"fastkafka-docs-install_deps",level:3},{value:"<code>fastkafka docs serve</code>",id:"fastkafka-docs-serve",level:3},{value:"<code>fastkafka run</code>",id:"fastkafka-run",level:2},{value:"<code>fastkafka testing</code>",id:"fastkafka-testing",level:2},{value:"<code>fastkafka testing install_deps</code>",id:"fastkafka-testing-install_deps",level:3}],k={toc:p},f="wrapper";function u(e){let{components:t,...a}=e;return(0,l.kt)(f,(0,n.Z)({},k,a,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h1",{id:"fastkafka"},(0,l.kt)("inlineCode",{parentName:"h1"},"fastkafka")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--install-completion"),": Install completion for the current shell."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--show-completion"),": Show completion for the current shell, to copy it or customize the installation."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"docs"),": Commands for managing FastKafka app..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"run"),": Runs Fast Kafka API application"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"testing"),": Commands for managing FastKafka testing")),(0,l.kt)("h2",{id:"fastkafka-docs"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka docs")),(0,l.kt)("p",null,"Commands for managing FastKafka app documentation"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"generate"),": Generates documentation for a FastKafka..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"install_deps"),": Installs dependencies for FastKafka..."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"serve"),": Generates and serves documentation for a...")),(0,l.kt)("h3",{id:"fastkafka-docs-generate"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs generate")),(0,l.kt)("p",null,"Generates documentation for a FastKafka application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs generate [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--root-path TEXT"),": root path under which documentation will be created; default is current directory"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h3",{id:"fastkafka-docs-install_deps"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs install_deps")),(0,l.kt)("p",null,"Installs dependencies for FastKafka documentation generation"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs install_deps [OPTIONS]\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h3",{id:"fastkafka-docs-serve"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka docs serve")),(0,l.kt)("p",null,"Generates and serves documentation for a FastKafka application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka docs serve [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--root-path TEXT"),": root path under which documentation will be created; default is current directory"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--bind TEXT"),": Some info ","[default: 127.0.0.1]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--port INTEGER"),": Some info ","[default: 8000]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h2",{id:"fastkafka-run"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka run")),(0,l.kt)("p",null,"Runs Fast Kafka API application"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka run [OPTIONS] APP\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Arguments"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"APP"),": input in the form of 'path:app', where ",(0,l.kt)("strong",{parentName:"li"},"path")," is the path to a python file and ",(0,l.kt)("strong",{parentName:"li"},"app")," is an object of type ",(0,l.kt)("strong",{parentName:"li"},"FastKafka"),". ","[required]")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--num-workers INTEGER"),": Number of FastKafka instances to run, defaults to number of CPU cores. ","[default: 64]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--kafka-broker TEXT"),": kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. ","[default: localhost]"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("h2",{id:"fastkafka-testing"},(0,l.kt)("inlineCode",{parentName:"h2"},"fastkafka testing")),(0,l.kt)("p",null,"Commands for managing FastKafka testing"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka testing [OPTIONS] COMMAND [ARGS]...\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Commands"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"install_deps"),": Installs dependencies for FastKafka app...")),(0,l.kt)("h3",{id:"fastkafka-testing-install_deps"},(0,l.kt)("inlineCode",{parentName:"h3"},"fastkafka testing install_deps")),(0,l.kt)("p",null,"Installs dependencies for FastKafka app testing"),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Usage"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-console"},"$ fastkafka testing install_deps [OPTIONS]\n")),(0,l.kt)("p",null,(0,l.kt)("strong",{parentName:"p"},"Options"),":"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--help"),": Show this message and exit.")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/dde1ff6e.936ed816.js b/assets/js/dde1ff6e.936ed816.js new file mode 100644 index 0000000..5f2463e --- /dev/null +++ b/assets/js/dde1ff6e.936ed816.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6993],{7085:a=>{a.exports=JSON.parse('{"name":"docusaurus-theme-search-algolia","id":"default"}')}}]); \ No newline at end of file diff --git a/assets/js/de2621c2.5348d14a.js b/assets/js/de2621c2.5348d14a.js new file mode 100644 index 0000000..9885fd2 --- /dev/null +++ b/assets/js/de2621c2.5348d14a.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[298],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function l(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?l(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):l(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function i(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},l=Object.keys(e);for(n=0;n<l.length;n++)a=l[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(n=0;n<l.length;n++)a=l[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),u=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},c=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},p="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,l=e.originalType,s=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),p=u(a),m=r,f=p["".concat(s,".").concat(m)]||p[m]||k[m]||l;return a?n.createElement(f,o(o({ref:t},c),{},{components:a})):n.createElement(f,o({ref:t},c))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var l=a.length,o=new Array(l);o[0]=m;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[p]="string"==typeof e?e:r,o[1]=i;for(var u=2;u<l;u++)o[u]=a[u];return n.createElement.apply(null,o)}return n.createElement.apply(null,a)}m.displayName="MDXCreateElement"},7082:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>o,default:()=>k,frontMatter:()=>l,metadata:()=>i,toc:()=>u});var n=a(7462),r=(a(7294),a(3905));const l={},o=void 0,i={unversionedId:"api/fastkafka/executors/SequentialExecutor",id:"api/fastkafka/executors/SequentialExecutor",title:"SequentialExecutor",description:"fastkafka.executors.SequentialExecutor {fastkafka.executors.SequentialExecutor}",source:"@site/docs/api/fastkafka/executors/SequentialExecutor.md",sourceDirName:"api/fastkafka/executors",slug:"/api/fastkafka/executors/SequentialExecutor",permalink:"/docs/next/api/fastkafka/executors/SequentialExecutor",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"DynamicTaskExecutor",permalink:"/docs/next/api/fastkafka/executors/DynamicTaskExecutor"},next:{title:"ApacheKafkaBroker",permalink:"/docs/next/api/fastkafka/testing/ApacheKafkaBroker"}},s={},u=[{value:"fastkafka.executors.SequentialExecutor",id:"fastkafka.executors.SequentialExecutor",level:2},{value:"<strong>init</strong>",id:"fastkafka._components.task_streaming.SequentialExecutor.init",level:3},{value:"run",id:"fastkafka._components.task_streaming.SequentialExecutor.run",level:3}],c={toc:u},p="wrapper";function k(e){let{components:t,...a}=e;return(0,r.kt)(p,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.executors.SequentialExecutor"},"fastkafka.executors.SequentialExecutor"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/task_streaming.py#L305-L356",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("p",null,"A class that implements a sequential executor for processing consumer records."),(0,r.kt)("p",null,"The SequentialExecutor class extends the StreamExecutor class and provides functionality\nfor running processing tasks in sequence by awaiting their coroutines."),(0,r.kt)("h3",{id:"fastkafka._components.task_streaming.SequentialExecutor.init"},(0,r.kt)("strong",{parentName:"h3"},"init")),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/task_streaming.py#L312-L326",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n self, throw_exceptions=False, max_buffer_size=100000\n)\n")),(0,r.kt)("p",null,"Create an instance of SequentialExecutor"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"throw_exceptions")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bool")),(0,r.kt)("td",{parentName:"tr",align:null},"Flag indicating whether exceptions should be thrown or logged.Defaults to False."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"False"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"max_buffer_size")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"Maximum buffer size for the memory object stream.Defaults to 100_000."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"100000"))))),(0,r.kt)("h3",{id:"fastkafka._components.task_streaming.SequentialExecutor.run"},"run"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/task_streaming.py#L328-L356",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"run(\n self, is_shutting_down_f, generator, processor\n)\n")),(0,r.kt)("p",null,"Runs the sequential executor."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"is_shutting_down_f")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[], bool]")),(0,r.kt)("td",{parentName:"tr",align:null},"Function to check if the executor is shutting down."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"generator")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Generator function for retrieving consumer records."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"processor")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]")),(0,r.kt)("td",{parentName:"tr",align:null},"Processor function for processing consumer records."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/e109b3ff.16e87717.js b/assets/js/e109b3ff.16e87717.js new file mode 100644 index 0000000..77bc3cf --- /dev/null +++ b/assets/js/e109b3ff.16e87717.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4039],{3905:(e,a,n)=>{n.d(a,{Zo:()=>k,kt:()=>m});var t=n(7294);function o(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function r(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function s(e){for(var a=1;a<arguments.length;a++){var n=null!=arguments[a]?arguments[a]:{};a%2?r(Object(n),!0).forEach((function(a){o(e,a,n[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(n,a))}))}return e}function l(e,a){if(null==e)return{};var n,t,o=function(e,a){if(null==e)return{};var n,t,o={},r=Object.keys(e);for(t=0;t<r.length;t++)n=r[t],a.indexOf(n)>=0||(o[n]=e[n]);return o}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(t=0;t<r.length;t++)n=r[t],a.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var i=t.createContext({}),p=function(e){var a=t.useContext(i),n=a;return e&&(n="function"==typeof e?e(a):s(s({},a),e)),n},k=function(e){var a=p(e.components);return t.createElement(i.Provider,{value:a},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},f=t.forwardRef((function(e,a){var n=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,k=l(e,["components","mdxType","originalType","parentName"]),c=p(n),f=o,m=c["".concat(i,".").concat(f)]||c[f]||u[f]||r;return n?t.createElement(m,s(s({ref:a},k),{},{components:n})):t.createElement(m,s({ref:a},k))}));function m(e,a){var n=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var r=n.length,s=new Array(r);s[0]=f;var l={};for(var i in a)hasOwnProperty.call(a,i)&&(l[i]=a[i]);l.originalType=e,l[c]="string"==typeof e?e:o,s[1]=l;for(var p=2;p<r;p++)s[p]=n[p];return t.createElement.apply(null,s)}return t.createElement.apply(null,n)}f.displayName="MDXCreateElement"},231:(e,a,n)=>{n.r(a),n.d(a,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var t=n(7462),o=(n(7294),n(3905));const r={},s="First Steps",l={unversionedId:"guides/Guide_02_First_Steps",id:"version-0.6.0/guides/Guide_02_First_Steps",title:"First Steps",description:"Creating a simple Kafka consumer app",source:"@site/versioned_docs/version-0.6.0/guides/Guide_02_First_Steps.md",sourceDirName:"guides",slug:"/guides/Guide_02_First_Steps",permalink:"/docs/0.6.0/guides/Guide_02_First_Steps",draft:!1,tags:[],version:"0.6.0",frontMatter:{}},i={},p=[{value:"Creating a simple Kafka consumer app",id:"creating-a-simple-kafka-consumer-app",level:2},{value:"Sending first message to your consumer",id:"sending-first-message-to-your-consumer",level:2},{value:"Creating a hello Kafka producer",id:"creating-a-hello-kafka-producer",level:2},{value:"Recap",id:"recap",level:2}],k={toc:p},c="wrapper";function u(e){let{components:a,...n}=e;return(0,o.kt)(c,(0,t.Z)({},k,n,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"first-steps"},"First Steps"),(0,o.kt)("h2",{id:"creating-a-simple-kafka-consumer-app"},"Creating a simple Kafka consumer app"),(0,o.kt)("p",null,"For our first demo we will create the simplest possible Kafka consumer\nand run it using \u2018fastkafka run\u2019 command."),(0,o.kt)("p",null,"The consumer will:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Connect to the Kafka Broker we setup in the Intro guide")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Listen to the hello topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Write any message received from the hello topic to stdout"))),(0,o.kt)("p",null,"To create the consumer, first, create a file named"),(0,o.kt)("b",null,"hello_kafka_consumer.py")," and copy the following code to it:",(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom os import environ\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nkafka_server_url = environ["KAFKA_HOSTNAME"]\nkafka_server_port = environ["KAFKA_PORT"]\n\nkafka_brokers = {\n "localhost": {\n "description": "local development kafka",\n "url": kafka_server_url,\n "port": kafka_server_port\n }\n}\n\nclass HelloKafkaMsg(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_app = FastKafka(\n kafka_brokers=kafka_brokers\n)\n \n@kafka_app.consumes()\nasync def on_hello(msg: HelloKafkaMsg):\n print(f"Got data, msg={msg.msg}", flush=True)\n')),(0,o.kt)("p",null,'!!! info "Kafka configuration"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n")),(0,o.kt)("p",null,'!!! warning "Remember to flush"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.\n")),(0,o.kt)("p",null,"To run this consumer, in your terminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see something similar to the ouput\nbelow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})\n[878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n[878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}. \nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.\n\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.\n")),(0,o.kt)("p",null,"Now you can interact with your consumer, by sending the messages to the\nsubscribed \u2018hello\u2019 topic, don\u2019t worry, we will cover this in the next\nstep of this guide."),(0,o.kt)("h2",{id:"sending-first-message-to-your-consumer"},"Sending first message to your consumer"),(0,o.kt)("p",null,"After we have created and run our first consumer, we should send a\nmessage to it, to make sure it is working properly."),(0,o.kt)("p",null,"If you are using the Kafka setup as described in the Intro guide, you\ncan follow the steps listed here to send a message to the hello topic."),(0,o.kt)("p",null,"First, connect to your running kafka broker by running:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"docker run -it kafka /bin/bash\n")),(0,o.kt)("p",null,"Then, when connected to the container, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello\n")),(0,o.kt)("p",null,"This will open an interactive connection to the hello topic, now you can\nwrite your mesages to the topic and they will be consumed by our\nconsumer."),(0,o.kt)("p",null,"In the shell, type:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'{"msg":"hello"}\n')),(0,o.kt)("p",null,"and press enter. This will send a hello message to the topic which will\nbe read by our running consumer and outputed to stdout."),(0,o.kt)("p",null,"Check the output of your consumer (terminal where you ran the \u2018fastkafka\nrun\u2019 command) and confirm that your consumer has read the Kafka message.\nYou shoud see something like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"Got data, msg=hello\n")),(0,o.kt)("h2",{id:"creating-a-hello-kafka-producer"},"Creating a hello Kafka producer"),(0,o.kt)("p",null,"Consuming messages is only a part of this Library functionality, the\nother big part is producing the messages. So, let\u2019s create our first\nkafka producer which will send it\u2019s greetings to our consumer\nperiodically."),(0,o.kt)("p",null,"The producer will:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Connect to the Kafka Broker we setup in the Intro guide"),(0,o.kt)("li",{parentName:"ol"},"Connect to the hello topic"),(0,o.kt)("li",{parentName:"ol"},"Periodically send a message to the hello world topic")),(0,o.kt)("p",null,"To create the producer, first, create a file named"),(0,o.kt)("b",null,"hello_kafka_producer.py")," and copy the following code to it:",(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom os import environ\n\nimport asyncio\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nkafka_server_url = environ["KAFKA_HOSTNAME"]\nkafka_server_port = environ["KAFKA_PORT"]\n\nkafka_brokers = {\n "localhost": {\n "description": "local development kafka",\n "url": kafka_server_url,\n "port": kafka_server_port\n }\n}\n\nclass HelloKafkaMsg(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_app = FastKafka(\n kafka_brokers=kafka_brokers\n)\n\nlogger = get_logger(__name__)\n\n@kafka_app.produces()\nasync def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:\n logger.info(f"Producing: {msg}")\n return msg\n\n@kafka_app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello(HelloKafkaMsg(msg="hello"))\n await asyncio.sleep(1)\n')),(0,o.kt)("p",null,'!!! info "Kafka configuration"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n")),(0,o.kt)("p",null,"To run this producer, in your terminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see something similar to the ouput\nbelow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.\n\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.\n")),(0,o.kt)("p",null,"Now, while the producer is running, it will send a HelloKafkaMsg every\nsecond to the hello kafka topic. If your consumer is still running, you\nshould see the messages appear in its log."),(0,o.kt)("h2",{id:"recap"},"Recap"),(0,o.kt)("p",null,"In this guide we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Created a simple Kafka consumer using FastKafka"),(0,o.kt)("li",{parentName:"ol"},"Sent a message to our consumer trough Kafka"),(0,o.kt)("li",{parentName:"ol"},"Created a simple Kafka producer using FastKafka")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/e1584d63.e755620a.js b/assets/js/e1584d63.e755620a.js new file mode 100644 index 0000000..69b2556 --- /dev/null +++ b/assets/js/e1584d63.e755620a.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8945],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>f});var o=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function s(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?r(Object(n),!0).forEach((function(t){a(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function l(e,t){if(null==e)return{};var n,o,a=function(e,t){if(null==e)return{};var n,o,a={},r=Object.keys(e);for(o=0;o<r.length;o++)n=r[o],t.indexOf(n)>=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o<r.length;o++)n=r[o],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=o.createContext({}),c=function(e){var t=o.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):s(s({},t),e)),n},u=function(e){var t=c(e.components);return o.createElement(i.Provider,{value:t},e.children)},p="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},d=o.forwardRef((function(e,t){var n=e.components,a=e.mdxType,r=e.originalType,i=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),p=c(n),d=a,f=p["".concat(i,".").concat(d)]||p[d]||m[d]||r;return n?o.createElement(f,s(s({ref:t},u),{},{components:n})):o.createElement(f,s({ref:t},u))}));function f(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var r=n.length,s=new Array(r);s[0]=d;var l={};for(var i in t)hasOwnProperty.call(t,i)&&(l[i]=t[i]);l.originalType=e,l[p]="string"==typeof e?e:a,s[1]=l;for(var c=2;c<r;c++)s[c]=n[c];return o.createElement.apply(null,s)}return o.createElement.apply(null,n)}d.displayName="MDXCreateElement"},680:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>m,frontMatter:()=>r,metadata:()=>l,toc:()=>c});var o=n(7462),a=(n(7294),n(3905));const r={},s="Batch consuming",l={unversionedId:"guides/Guide_12_Batch_Consuming",id:"guides/Guide_12_Batch_Consuming",title:"Batch consuming",description:"If you want to consume data in batches @consumes decorator makes that",source:"@site/docs/guides/Guide_12_Batch_Consuming.md",sourceDirName:"guides",slug:"/guides/Guide_12_Batch_Consuming",permalink:"/docs/next/guides/Guide_12_Batch_Consuming",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"@consumes basics",permalink:"/docs/next/guides/Guide_11_Consumes_Basics"},next:{title:"@produces basics",permalink:"/docs/next/guides/Guide_21_Produces_Basics"}},i={},c=[{value:"Consume function with batching",id:"consume-function-with-batching",level:2},{value:"App example",id:"app-example",level:2},{value:"Send the messages to kafka topic",id:"send-the-messages-to-kafka-topic",level:2}],u={toc:c},p="wrapper";function m(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,o.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"batch-consuming"},"Batch consuming"),(0,a.kt)("p",null,"If you want to consume data in batches ",(0,a.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator makes that\npossible for you. By typing a consumed msg object as a ",(0,a.kt)("inlineCode",{parentName:"p"},"list")," of\nmessages the consumer will call your consuming function with a batch of\nmessages consumed from a single partition. Let\u2019s demonstrate that now."),(0,a.kt)("h2",{id:"consume-function-with-batching"},"Consume function with batching"),(0,a.kt)("p",null,"To consume messages in batches, you need to wrap you message type into a\nlist and the ",(0,a.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will take care of the rest for you.\nYour consumes function will be called with batches grouped by partition\nnow."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes(auto_offset_reset="earliest")\nasync def on_hello_world(msg: List[HelloWorld]):\n logger.info(f"Got msg batch: {msg}")\n')),(0,a.kt)("h2",{id:"app-example"},"App example"),(0,a.kt)("p",null,"We will modify the app example from ",(0,a.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_11_Consumes_Basics"},"@consumes\nbasics")," guide to consume\n",(0,a.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages batch. The final app will look like this (make\nsure you replace the ",(0,a.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,a.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values):"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\nfrom typing import List\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.consumes(auto_offset_reset="earliest")\nasync def on_hello_world(msg: List[HelloWorld]):\n logger.info(f"Got msg batch: {msg}")\n')),(0,a.kt)("h2",{id:"send-the-messages-to-kafka-topic"},"Send the messages to kafka topic"),(0,a.kt)("p",null,"Lets send a couple of ",(0,a.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages to the ",(0,a.kt)("em",{parentName:"p"},"hello_world")," topic\nand check if our consumer kafka application has logged the received\nmessages batch. In your terminal, run the following command at least two\ntimes to create multiple messages in your kafka queue:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre"},'echo { ^"msg^": ^"Hello world^" }\n')),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-shell"},'echo { ^"msg^": ^"Hello world^" } | kafka-console-producer.bat --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n')),(0,a.kt)("p",null,"Now we can run the app. Copy the code of the example app in\nconsumer_example.py and run it by running"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n")),(0,a.kt)("p",null,"You should see the your Kafka messages being logged in batches by your\nconsumer."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/e323208f.514adfe6.js b/assets/js/e323208f.514adfe6.js new file mode 100644 index 0000000..bb41e22 --- /dev/null +++ b/assets/js/e323208f.514adfe6.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9777],{3905:(t,e,a)=>{a.d(e,{Zo:()=>k,kt:()=>u});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function i(t){for(var e=1;e<arguments.length;e++){var a=null!=arguments[e]?arguments[e]:{};e%2?l(Object(a),!0).forEach((function(e){r(t,e,a[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(a)):l(Object(a)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(a,e))}))}return t}function o(t,e){if(null==t)return{};var a,n,r=function(t,e){if(null==t)return{};var a,n,r={},l=Object.keys(t);for(n=0;n<l.length;n++)a=l[n],e.indexOf(a)>=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n<l.length;n++)a=l[n],e.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var d=n.createContext({}),p=function(t){var e=n.useContext(d),a=e;return t&&(a="function"==typeof t?t(e):i(i({},e),t)),a},k=function(t){var e=p(t.components);return n.createElement(d.Provider,{value:e},t.children)},s="mdxType",c={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},m=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,d=t.parentName,k=o(t,["components","mdxType","originalType","parentName"]),s=p(a),m=r,u=s["".concat(d,".").concat(m)]||s[m]||c[m]||l;return a?n.createElement(u,i(i({ref:e},k),{},{components:a})):n.createElement(u,i({ref:e},k))}));function u(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,i=new Array(l);i[0]=m;var o={};for(var d in e)hasOwnProperty.call(e,d)&&(o[d]=e[d]);o.originalType=t,o[s]="string"==typeof t?t:r,i[1]=o;for(var p=2;p<l;p++)i[p]=a[p];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}m.displayName="MDXCreateElement"},3388:(t,e,a)=>{a.r(e),a.d(e,{assets:()=>d,contentTitle:()=>i,default:()=>c,frontMatter:()=>l,metadata:()=>o,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const l={},i=void 0,o={unversionedId:"api/fastkafka/testing/LocalRedpandaBroker",id:"api/fastkafka/testing/LocalRedpandaBroker",title:"LocalRedpandaBroker",description:"fastkafka.testing.LocalRedpandaBroker {fastkafka.testing.LocalRedpandaBroker}",source:"@site/docs/api/fastkafka/testing/LocalRedpandaBroker.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/LocalRedpandaBroker",permalink:"/docs/next/api/fastkafka/testing/LocalRedpandaBroker",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"ApacheKafkaBroker",permalink:"/docs/next/api/fastkafka/testing/ApacheKafkaBroker"},next:{title:"Tester",permalink:"/docs/next/api/fastkafka/testing/Tester"}},d={},p=[{value:"fastkafka.testing.LocalRedpandaBroker",id:"fastkafka.testing.LocalRedpandaBroker",level:2},{value:"<strong>init</strong>",id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.init",level:3},{value:"get_service_config_string",id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.get_service_config_string",level:3},{value:"is_started",id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.is_started",level:3},{value:"start",id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.start",level:3},{value:"stop",id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.stop",level:3}],k={toc:p},s="wrapper";function c(t){let{components:e,...a}=t;return(0,r.kt)(s,(0,n.Z)({},k,a,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.testing.LocalRedpandaBroker"},"fastkafka.testing.LocalRedpandaBroker"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_testing/local_redpanda_broker.py#L84-L200",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("p",null,"LocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing."),(0,r.kt)("h3",{id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.init"},(0,r.kt)("strong",{parentName:"h3"},"init")),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_testing/local_redpanda_broker.py#L88-L120",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n self,\n topics=[],\n retries=3,\n apply_nest_asyncio=False,\n listener_port=9092,\n tag='v23.1.2',\n seastar_core=1,\n memory='1G',\n mode='dev-container',\n default_log_level='debug',\n kwargs,\n)\n")),(0,r.kt)("p",null,"Initialises the LocalRedpandaBroker object"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"topics")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Iterable[str]")),(0,r.kt)("td",{parentName:"tr",align:null},"List of topics to create after sucessfull redpanda broker startup"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"[]"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"retries")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"Number of retries to create redpanda service"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"3"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"apply_nest_asyncio")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bool")),(0,r.kt)("td",{parentName:"tr",align:null},"set to True if running in notebook"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"False"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"listener_port")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"Port on which the clients (producers and consumers) can connect"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"9092"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"tag")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"Tag of Redpanda image to use to start container"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'v23.1.2'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"seastar_core")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"int")),(0,r.kt)("td",{parentName:"tr",align:null},"Core(s) to use byt Seastar (the framework Redpanda uses under the hood)"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"1"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"memory")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"The amount of memory to make available to Redpanda"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'1G'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"mode")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"Mode to use to load configuration properties in container"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'dev-container'"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"default_log_level")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"Log levels to use for Redpanda"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"'debug'"))))),(0,r.kt)("h3",{id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.get_service_config_string"},"get_service_config_string"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_testing/local_redpanda_broker.py#L168-L174",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"get_service_config_string(\n self, service, data_dir\n)\n")),(0,r.kt)("p",null,"Generates a configuration for a service"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"data_dir")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Path")),(0,r.kt)("td",{parentName:"tr",align:null},"Path to the directory where the zookeepeer instance will save data"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"service")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},'"redpanda", defines which service to get config string for'),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))))),(0,r.kt)("h3",{id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.is_started"},"is_started"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_testing/local_redpanda_broker.py#L123-L133",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"@property\nis_started(\n self\n)\n")),(0,r.kt)("p",null,"Property indicating whether the LocalRedpandaBroker object is started."),(0,r.kt)("p",null,"The is_started property indicates if the LocalRedpandaBroker object is currently\nin a started state. This implies that Redpanda docker container has sucesfully\nstarted and is ready for handling events."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"bool")),(0,r.kt)("td",{parentName:"tr",align:null},"True if the object is started, False otherwise.")))),(0,r.kt)("h3",{id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.start"},"start"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_testing/local_redpanda_broker.py#L333-L372",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"start(\n self\n)\n")),(0,r.kt)("p",null,"Starts a local redpanda broker instance synchronously"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"str")),(0,r.kt)("td",{parentName:"tr",align:null},"Redpanda broker bootstrap server address in string format: add:port")))),(0,r.kt)("h3",{id:"fastkafka._testing.local_redpanda_broker.LocalRedpandaBroker.stop"},"stop"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_testing/local_redpanda_broker.py#L376-L388",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"stop(\n self\n)\n")),(0,r.kt)("p",null,"Stops a local redpanda broker instance synchronously"))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/e333f535.1e236bcc.js b/assets/js/e333f535.1e236bcc.js new file mode 100644 index 0000000..2400e46 --- /dev/null +++ b/assets/js/e333f535.1e236bcc.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3776],{3905:(e,t,a)=>{a.d(t,{Zo:()=>k,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),f=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},k=function(e){var t=f(e.components);return n.createElement(p.Provider,{value:t},e.children)},s="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,k=l(e,["components","mdxType","originalType","parentName"]),s=f(a),u=r,d=s["".concat(p,".").concat(u)]||s[u]||c[u]||o;return a?n.createElement(d,i(i({ref:t},k),{},{components:a})):n.createElement(d,i({ref:t},k))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=u;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[s]="string"==typeof e?e:r,i[1]=l;for(var f=2;f<o;f++)i[f]=a[f];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},5783:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>c,frontMatter:()=>o,metadata:()=>l,toc:()=>f});var n=a(7462),r=(a(7294),a(3905));const o={},i=void 0,l={unversionedId:"api/fastkafka/KafkaEvent",id:"api/fastkafka/KafkaEvent",title:"KafkaEvent",description:"fastkafka.KafkaEvent {fastkafka.KafkaEvent}",source:"@site/docs/api/fastkafka/KafkaEvent.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/KafkaEvent",permalink:"/docs/next/api/fastkafka/KafkaEvent",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"FastKafka",permalink:"/docs/next/api/fastkafka/"},next:{title:"AvroBase",permalink:"/docs/next/api/fastkafka/encoder/AvroBase"}},p={},f=[{value:"fastkafka.KafkaEvent",id:"fastkafka.KafkaEvent",level:2},{value:"<strong>init</strong>",id:"fastkafka.KafkaEvent.init",level:3}],k={toc:f},s="wrapper";function c(e){let{components:t,...a}=e;return(0,r.kt)(s,(0,n.Z)({},k,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.KafkaEvent"},"fastkafka.KafkaEvent"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/main/fastkafka/_components/producer_decorator.py#L38-L48",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("p",null,"A generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"message")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"BaseSubmodel")),(0,r.kt)("td",{parentName:"tr",align:null},"The message contained in the Kafka event, can be of type pydantic.BaseModel."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[bytes]")),(0,r.kt)("td",{parentName:"tr",align:null},"The optional key used to identify the Kafka event."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))),(0,r.kt)("h3",{id:"fastkafka.KafkaEvent.init"},(0,r.kt)("strong",{parentName:"h3"},"init")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-py"},"__init__(\n self, message, key=None\n)\n")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/e4d0ad4d.f67d40a9.js b/assets/js/e4d0ad4d.f67d40a9.js new file mode 100644 index 0000000..c244d01 --- /dev/null +++ b/assets/js/e4d0ad4d.f67d40a9.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1122],{40:a=>{a.exports=JSON.parse('{"pluginId":"default","version":"0.7.1","label":"0.7.1","banner":"unmaintained","badge":true,"noIndex":false,"className":"docs-version-0.7.1","isLast":false,"docsSidebars":{"tutorialSidebar":[{"type":"link","label":"FastKafka","href":"/docs/0.7.1/","docId":"index"},{"type":"category","label":"Guides","items":[{"type":"category","label":"Writing services","items":[{"type":"link","label":"@consumes basics","href":"/docs/0.7.1/guides/Guide_11_Consumes_Basics","docId":"guides/Guide_11_Consumes_Basics"},{"type":"link","label":"Batch consuming","href":"/docs/0.7.1/guides/Guide_12_Batch_Consuming","docId":"guides/Guide_12_Batch_Consuming"},{"type":"link","label":"@produces basics","href":"/docs/0.7.1/guides/Guide_21_Produces_Basics","docId":"guides/Guide_21_Produces_Basics"},{"type":"link","label":"Defining a partition key","href":"/docs/0.7.1/guides/Guide_22_Partition_Keys","docId":"guides/Guide_22_Partition_Keys"},{"type":"link","label":"Batch producing","href":"/docs/0.7.1/guides/Guide_23_Batch_Producing","docId":"guides/Guide_23_Batch_Producing"},{"type":"link","label":"Lifespan Events","href":"/docs/0.7.1/guides/Guide_05_Lifespan_Handler","docId":"guides/Guide_05_Lifespan_Handler"},{"type":"link","label":"Encoding and Decoding Kafka Messages with FastKafka","href":"/docs/0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","docId":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka"},{"type":"link","label":"Using multiple Kafka clusters","href":"/docs/0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters","docId":"guides/Guide_24_Using_Multiple_Kafka_Clusters"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Testing","items":[{"type":"link","label":"Using Redpanda to test FastKafka","href":"/docs/0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka","docId":"guides/Guide_31_Using_redpanda_to_test_fastkafka"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Documentation generation","items":[{"type":"link","label":"Deploy FastKafka docs to GitHub Pages","href":"/docs/0.7.1/guides/Guide_04_Github_Actions_Workflow","docId":"guides/Guide_04_Github_Actions_Workflow"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Deployment","items":[{"type":"link","label":"Deploying FastKafka using Docker","href":"/docs/0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka","docId":"guides/Guide_30_Using_docker_to_deploy_fastkafka"},{"type":"link","label":"Using FastAPI to Run FastKafka Application","href":"/docs/0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application","docId":"guides/Guide_32_Using_fastapi_to_run_fastkafka_application"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Benchmarking","items":[{"type":"link","label":"Benchmarking FastKafka app","href":"/docs/0.7.1/guides/Guide_06_Benchmarking_FastKafka","docId":"guides/Guide_06_Benchmarking_FastKafka"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"API","items":[{"type":"link","label":"EventMetadata","href":"/docs/0.7.1/api/fastkafka/EventMetadata","docId":"api/fastkafka/EventMetadata"},{"type":"link","label":"FastKafka","href":"/docs/0.7.1/api/fastkafka/","docId":"api/fastkafka/FastKafka"},{"type":"link","label":"KafkaEvent","href":"/docs/0.7.1/api/fastkafka/KafkaEvent","docId":"api/fastkafka/KafkaEvent"},{"type":"category","label":"encoder","items":[{"type":"link","label":"AvroBase","href":"/docs/0.7.1/api/fastkafka/encoder/AvroBase","docId":"api/fastkafka/encoder/AvroBase"},{"type":"link","label":"avro_decoder","href":"/docs/0.7.1/api/fastkafka/encoder/avro_decoder","docId":"api/fastkafka/encoder/avro_decoder"},{"type":"link","label":"avro_encoder","href":"/docs/0.7.1/api/fastkafka/encoder/avro_encoder","docId":"api/fastkafka/encoder/avro_encoder"},{"type":"link","label":"avsc_to_pydantic","href":"/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic","docId":"api/fastkafka/encoder/avsc_to_pydantic"},{"type":"link","label":"json_decoder","href":"/docs/0.7.1/api/fastkafka/encoder/json_decoder","docId":"api/fastkafka/encoder/json_decoder"},{"type":"link","label":"json_encoder","href":"/docs/0.7.1/api/fastkafka/encoder/json_encoder","docId":"api/fastkafka/encoder/json_encoder"}],"collapsed":true,"collapsible":true},{"type":"category","label":"executors","items":[{"type":"link","label":"DynamicTaskExecutor","href":"/docs/0.7.1/api/fastkafka/executors/DynamicTaskExecutor","docId":"api/fastkafka/executors/DynamicTaskExecutor"},{"type":"link","label":"SequentialExecutor","href":"/docs/0.7.1/api/fastkafka/executors/SequentialExecutor","docId":"api/fastkafka/executors/SequentialExecutor"}],"collapsed":true,"collapsible":true},{"type":"category","label":"testing","items":[{"type":"link","label":"ApacheKafkaBroker","href":"/docs/0.7.1/api/fastkafka/testing/ApacheKafkaBroker","docId":"api/fastkafka/testing/ApacheKafkaBroker"},{"type":"link","label":"LocalRedpandaBroker","href":"/docs/0.7.1/api/fastkafka/testing/LocalRedpandaBroker","docId":"api/fastkafka/testing/LocalRedpandaBroker"},{"type":"link","label":"Tester","href":"/docs/0.7.1/api/fastkafka/testing/Tester","docId":"api/fastkafka/testing/Tester"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"CLI","items":[{"type":"link","label":"fastkafka","href":"/docs/0.7.1/cli/fastkafka","docId":"cli/fastkafka"},{"type":"link","label":"run_fastkafka_server_process","href":"/docs/0.7.1/cli/run_fastkafka_server_process","docId":"cli/run_fastkafka_server_process"}],"collapsed":true,"collapsible":true},{"type":"link","label":"LICENSE","href":"/docs/0.7.1/LICENSE","docId":"LICENSE"},{"type":"link","label":"Contributing to fastkafka","href":"/docs/0.7.1/CONTRIBUTING","docId":"CONTRIBUTING"},{"type":"link","label":"Release notes","href":"/docs/0.7.1/CHANGELOG","docId":"CHANGELOG"}]},"docs":{"api/fastkafka/encoder/avro_decoder":{"id":"api/fastkafka/encoder/avro_decoder","title":"avro_decoder","description":"fastkafka.encoder.avrodecoder {fastkafka.encoder.avrodecoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/avro_encoder":{"id":"api/fastkafka/encoder/avro_encoder","title":"avro_encoder","description":"fastkafka.encoder.avroencoder {fastkafka.encoder.avroencoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/AvroBase":{"id":"api/fastkafka/encoder/AvroBase","title":"AvroBase","description":"fastkafka.encoder.AvroBase {fastkafka.encoder.AvroBase}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/avsc_to_pydantic":{"id":"api/fastkafka/encoder/avsc_to_pydantic","title":"avsc_to_pydantic","description":"fastkafka.encoder.avsctopydantic {fastkafka.encoder.avsctopydantic}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/json_decoder":{"id":"api/fastkafka/encoder/json_decoder","title":"json_decoder","description":"fastkafka.encoder.jsondecoder {fastkafka.encoder.jsondecoder}","sidebar":"tutorialSidebar"},"api/fastkafka/encoder/json_encoder":{"id":"api/fastkafka/encoder/json_encoder","title":"json_encoder","description":"fastkafka.encoder.jsonencoder {fastkafka.encoder.jsonencoder}","sidebar":"tutorialSidebar"},"api/fastkafka/EventMetadata":{"id":"api/fastkafka/EventMetadata","title":"EventMetadata","description":"fastkafka.EventMetadata {fastkafka.EventMetadata}","sidebar":"tutorialSidebar"},"api/fastkafka/executors/DynamicTaskExecutor":{"id":"api/fastkafka/executors/DynamicTaskExecutor","title":"DynamicTaskExecutor","description":"fastkafka.executors.DynamicTaskExecutor {fastkafka.executors.DynamicTaskExecutor}","sidebar":"tutorialSidebar"},"api/fastkafka/executors/SequentialExecutor":{"id":"api/fastkafka/executors/SequentialExecutor","title":"SequentialExecutor","description":"fastkafka.executors.SequentialExecutor {fastkafka.executors.SequentialExecutor}","sidebar":"tutorialSidebar"},"api/fastkafka/FastKafka":{"id":"api/fastkafka/FastKafka","title":"FastKafka","description":"fastkafka.FastKafka {fastkafka.FastKafka}","sidebar":"tutorialSidebar"},"api/fastkafka/KafkaEvent":{"id":"api/fastkafka/KafkaEvent","title":"KafkaEvent","description":"fastkafka.KafkaEvent {fastkafka.KafkaEvent}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/ApacheKafkaBroker":{"id":"api/fastkafka/testing/ApacheKafkaBroker","title":"ApacheKafkaBroker","description":"fastkafka.testing.ApacheKafkaBroker {fastkafka.testing.ApacheKafkaBroker}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/LocalRedpandaBroker":{"id":"api/fastkafka/testing/LocalRedpandaBroker","title":"LocalRedpandaBroker","description":"fastkafka.testing.LocalRedpandaBroker {fastkafka.testing.LocalRedpandaBroker}","sidebar":"tutorialSidebar"},"api/fastkafka/testing/Tester":{"id":"api/fastkafka/testing/Tester","title":"Tester","description":"fastkafka.testing.Tester {fastkafka.testing.Tester}","sidebar":"tutorialSidebar"},"CHANGELOG":{"id":"CHANGELOG","title":"Release notes","description":"0.7.0","sidebar":"tutorialSidebar"},"cli/fastkafka":{"id":"cli/fastkafka","title":"fastkafka","description":"Usage:","sidebar":"tutorialSidebar"},"cli/run_fastkafka_server_process":{"id":"cli/run_fastkafka_server_process","title":"run_fastkafka_server_process","description":"Usage:","sidebar":"tutorialSidebar"},"CONTRIBUTING":{"id":"CONTRIBUTING","title":"Contributing to fastkafka","description":"First off, thanks for taking the time to contribute! \u2764\ufe0f","sidebar":"tutorialSidebar"},"guides/Guide_00_FastKafka_Demo":{"id":"guides/Guide_00_FastKafka_Demo","title":"FastKafka tutorial","description":"FastKafka is a powerful and easy-to-use"},"guides/Guide_01_Intro":{"id":"guides/Guide_01_Intro","title":"Intro","description":"This tutorial will show you how to use FastKafkaAPI, step by"},"guides/Guide_02_First_Steps":{"id":"guides/Guide_02_First_Steps","title":"First Steps","description":"Creating a simple Kafka consumer app"},"guides/Guide_03_Authentication":{"id":"guides/Guide_03_Authentication","title":"Authentication","description":"TLS Authentication"},"guides/Guide_04_Github_Actions_Workflow":{"id":"guides/Guide_04_Github_Actions_Workflow","title":"Deploy FastKafka docs to GitHub Pages","description":"Getting started","sidebar":"tutorialSidebar"},"guides/Guide_05_Lifespan_Handler":{"id":"guides/Guide_05_Lifespan_Handler","title":"Lifespan Events","description":"Did you know that you can define some special code that runs before and","sidebar":"tutorialSidebar"},"guides/Guide_06_Benchmarking_FastKafka":{"id":"guides/Guide_06_Benchmarking_FastKafka","title":"Benchmarking FastKafka app","description":"Prerequisites","sidebar":"tutorialSidebar"},"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka":{"id":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","title":"Encoding and Decoding Kafka Messages with FastKafka","description":"Prerequisites","sidebar":"tutorialSidebar"},"guides/Guide_11_Consumes_Basics":{"id":"guides/Guide_11_Consumes_Basics","title":"@consumes basics","description":"You can use @consumes decorator to consume messages from Kafka topics.","sidebar":"tutorialSidebar"},"guides/Guide_12_Batch_Consuming":{"id":"guides/Guide_12_Batch_Consuming","title":"Batch consuming","description":"If you want to consume data in batches @consumes decorator makes that","sidebar":"tutorialSidebar"},"guides/Guide_21_Produces_Basics":{"id":"guides/Guide_21_Produces_Basics","title":"@produces basics","description":"You can use @produces decorator to produce messages to Kafka topics.","sidebar":"tutorialSidebar"},"guides/Guide_22_Partition_Keys":{"id":"guides/Guide_22_Partition_Keys","title":"Defining a partition key","description":"Partition keys are used in Apache Kafka to determine which partition a","sidebar":"tutorialSidebar"},"guides/Guide_23_Batch_Producing":{"id":"guides/Guide_23_Batch_Producing","title":"Batch producing","description":"If you want to send your data in batches @produces decorator makes","sidebar":"tutorialSidebar"},"guides/Guide_24_Using_Multiple_Kafka_Clusters":{"id":"guides/Guide_24_Using_Multiple_Kafka_Clusters","title":"Using multiple Kafka clusters","description":"Ready to take your FastKafka app to the next level? This guide shows you","sidebar":"tutorialSidebar"},"guides/Guide_30_Using_docker_to_deploy_fastkafka":{"id":"guides/Guide_30_Using_docker_to_deploy_fastkafka","title":"Deploying FastKafka using Docker","description":"Building a Docker Image","sidebar":"tutorialSidebar"},"guides/Guide_31_Using_redpanda_to_test_fastkafka":{"id":"guides/Guide_31_Using_redpanda_to_test_fastkafka","title":"Using Redpanda to test FastKafka","description":"What is FastKafka?","sidebar":"tutorialSidebar"},"guides/Guide_32_Using_fastapi_to_run_fastkafka_application":{"id":"guides/Guide_32_Using_fastapi_to_run_fastkafka_application","title":"Using FastAPI to Run FastKafka Application","description":"When deploying a FastKafka application, the default approach is to","sidebar":"tutorialSidebar"},"index":{"id":"index","title":"FastKafka","description":"Effortless Kafka integration for your web services","sidebar":"tutorialSidebar"},"LICENSE":{"id":"LICENSE","title":"LICENSE","description":"Apache License","sidebar":"tutorialSidebar"}}}')}}]); \ No newline at end of file diff --git a/assets/js/e56c502c.7f764e63.js b/assets/js/e56c502c.7f764e63.js new file mode 100644 index 0000000..a710080 --- /dev/null +++ b/assets/js/e56c502c.7f764e63.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[1202],{3905:(e,r,t)=>{t.d(r,{Zo:()=>p,kt:()=>k});var a=t(7294);function n(e,r,t){return r in e?Object.defineProperty(e,r,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[r]=t,e}function o(e,r){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);r&&(a=a.filter((function(r){return Object.getOwnPropertyDescriptor(e,r).enumerable}))),t.push.apply(t,a)}return t}function c(e){for(var r=1;r<arguments.length;r++){var t=null!=arguments[r]?arguments[r]:{};r%2?o(Object(t),!0).forEach((function(r){n(e,r,t[r])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):o(Object(t)).forEach((function(r){Object.defineProperty(e,r,Object.getOwnPropertyDescriptor(t,r))}))}return e}function i(e,r){if(null==e)return{};var t,a,n=function(e,r){if(null==e)return{};var t,a,n={},o=Object.keys(e);for(a=0;a<o.length;a++)t=o[a],r.indexOf(t)>=0||(n[t]=e[t]);return n}(e,r);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)t=o[a],r.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(n[t]=e[t])}return n}var s=a.createContext({}),f=function(e){var r=a.useContext(s),t=r;return e&&(t="function"==typeof e?e(r):c(c({},r),e)),t},p=function(e){var r=f(e.components);return a.createElement(s.Provider,{value:r},e.children)},l="mdxType",d={inlineCode:"code",wrapper:function(e){var r=e.children;return a.createElement(a.Fragment,{},r)}},u=a.forwardRef((function(e,r){var t=e.components,n=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),l=f(t),u=n,k=l["".concat(s,".").concat(u)]||l[u]||d[u]||o;return t?a.createElement(k,c(c({ref:r},p),{},{components:t})):a.createElement(k,c({ref:r},p))}));function k(e,r){var t=arguments,n=r&&r.mdxType;if("string"==typeof e||n){var o=t.length,c=new Array(o);c[0]=u;var i={};for(var s in r)hasOwnProperty.call(r,s)&&(i[s]=r[s]);i.originalType=e,i[l]="string"==typeof e?e:n,c[1]=i;for(var f=2;f<o;f++)c[f]=t[f];return a.createElement.apply(null,c)}return a.createElement.apply(null,t)}u.displayName="MDXCreateElement"},8988:(e,r,t)=>{t.r(r),t.d(r,{assets:()=>s,contentTitle:()=>c,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>f});var a=t(7462),n=(t(7294),t(3905));const o={},c=void 0,i={unversionedId:"api/fastkafka/encoder/AvroBase",id:"version-0.7.0/api/fastkafka/encoder/AvroBase",title:"AvroBase",description:"fastkafka.encoder.AvroBase {fastkafka.encoder.AvroBase}",source:"@site/versioned_docs/version-0.7.0/api/fastkafka/encoder/AvroBase.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/AvroBase",permalink:"/docs/0.7.0/api/fastkafka/encoder/AvroBase",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"KafkaEvent",permalink:"/docs/0.7.0/api/fastkafka/KafkaEvent"},next:{title:"avro_decoder",permalink:"/docs/0.7.0/api/fastkafka/encoder/avro_decoder"}},s={},f=[{value:"<code>fastkafka.encoder.AvroBase</code>",id:"fastkafka.encoder.AvroBase",level:2}],p={toc:f},l="wrapper";function d(e){let{components:r,...t}=e;return(0,n.kt)(l,(0,a.Z)({},p,t,{components:r,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"fastkafka.encoder.AvroBase"},(0,n.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.AvroBase")),(0,n.kt)("p",null,"This is base pydantic class that will add some methods"))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/e6eb5527.890dde78.js b/assets/js/e6eb5527.890dde78.js new file mode 100644 index 0000000..4296fdf --- /dev/null +++ b/assets/js/e6eb5527.890dde78.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6964],{3905:(e,t,n)=>{n.d(t,{Zo:()=>l,kt:()=>k});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function c(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){a(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function s(e,t){if(null==e)return{};var n,r,a=function(e,t){if(null==e)return{};var n,r,a={},o=Object.keys(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=r.createContext({}),d=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):c(c({},t),e)),n},l=function(e){var t=d(e.components);return r.createElement(i.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},u=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,i=e.parentName,l=s(e,["components","mdxType","originalType","parentName"]),p=d(n),u=a,k=p["".concat(i,".").concat(u)]||p[u]||f[u]||o;return n?r.createElement(k,c(c({ref:t},l),{},{components:n})):r.createElement(k,c({ref:t},l))}));function k(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,c=new Array(o);c[0]=u;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[p]="string"==typeof e?e:a,c[1]=s;for(var d=2;d<o;d++)c[d]=n[d];return r.createElement.apply(null,c)}return r.createElement.apply(null,n)}u.displayName="MDXCreateElement"},9190:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>c,default:()=>f,frontMatter:()=>o,metadata:()=>s,toc:()=>d});var r=n(7462),a=(n(7294),n(3905));const o={},c=void 0,s={unversionedId:"api/fastkafka/encoder/json_decoder",id:"version-0.7.1/api/fastkafka/encoder/json_decoder",title:"json_decoder",description:"fastkafka.encoder.jsondecoder {fastkafka.encoder.jsondecoder}",source:"@site/versioned_docs/version-0.7.1/api/fastkafka/encoder/json_decoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/json_decoder",permalink:"/docs/0.7.1/api/fastkafka/encoder/json_decoder",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avsc_to_pydantic",permalink:"/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic"},next:{title:"json_encoder",permalink:"/docs/0.7.1/api/fastkafka/encoder/json_encoder"}},i={},d=[{value:"<code>fastkafka.encoder.json_decoder</code>",id:"fastkafka.encoder.json_decoder",level:2},{value:"<code>json_decoder</code>",id:"json_decoder",level:3}],l={toc:d},p="wrapper";function f(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,r.Z)({},l,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"fastkafka.encoder.json_decoder"},(0,a.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.json_decoder")),(0,a.kt)("h3",{id:"json_decoder"},(0,a.kt)("inlineCode",{parentName:"h3"},"json_decoder")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def json_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any")),(0,a.kt)("p",null,"Decoder to decode json string in bytes to pydantic model instance"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"raw_msg"),": Bytes message received from Kafka topic"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"cls"),": Pydantic class; This pydantic class will be used to construct instance of same class")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Returns"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"An instance of given pydantic class")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/e7ab2684.124a91a6.js b/assets/js/e7ab2684.124a91a6.js new file mode 100644 index 0000000..d0185f5 --- /dev/null +++ b/assets/js/e7ab2684.124a91a6.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[6147],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>k});var a=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){i(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function r(e,t){if(null==e)return{};var n,a,i=function(e,t){if(null==e)return{};var n,a,i={},o=Object.keys(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var s=a.createContext({}),m=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},d=function(e){var t=m(e.components);return a.createElement(s.Provider,{value:t},e.children)},p="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,o=e.originalType,s=e.parentName,d=r(e,["components","mdxType","originalType","parentName"]),p=m(n),u=i,k=p["".concat(s,".").concat(u)]||p[u]||c[u]||o;return n?a.createElement(k,l(l({ref:t},d),{},{components:n})):a.createElement(k,l({ref:t},d))}));function k(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=n.length,l=new Array(o);l[0]=u;var r={};for(var s in t)hasOwnProperty.call(t,s)&&(r[s]=t[s]);r.originalType=e,r[p]="string"==typeof e?e:i,l[1]=r;for(var m=2;m<o;m++)l[m]=n[m];return a.createElement.apply(null,l)}return a.createElement.apply(null,n)}u.displayName="MDXCreateElement"},1802:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>c,frontMatter:()=>o,metadata:()=>r,toc:()=>m});var a=n(7462),i=(n(7294),n(3905));const o={},l=void 0,r={unversionedId:"api/fastkafka/FastKafka",id:"version-0.6.0/api/fastkafka/FastKafka",title:"FastKafka",description:"fastkafka.FastKafka {fastkafka.FastKafka}",source:"@site/versioned_docs/version-0.6.0/api/fastkafka/FastKafka.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/",permalink:"/docs/0.6.0/api/fastkafka/",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"EventMetadata",permalink:"/docs/0.6.0/api/fastkafka/EventMetadata"},next:{title:"KafkaEvent",permalink:"/docs/0.6.0/api/fastkafka/KafkaEvent"}},s={},m=[{value:"<code>fastkafka.FastKafka</code>",id:"fastkafka.FastKafka",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>benchmark</code>",id:"benchmark",level:3},{value:"<code>consumes</code>",id:"consumes",level:3},{value:"<code>create_mocks</code>",id:"create_mocks",level:3},{value:"<code>produces</code>",id:"produces",level:3},{value:"<code>run_in_background</code>",id:"run_in_background",level:3}],d={toc:m},p="wrapper";function c(e){let{components:t,...n}=e;return(0,i.kt)(p,(0,a.Z)({},d,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"h2"},"fastkafka.FastKafka")),(0,i.kt)("h3",{id:"init"},(0,i.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def __init__(self, title: Optional[str] = None, description: Optional[str] = None, version: Optional[str] = None, contact: Optional[Dict[str, str]] = None, kafka_brokers: Dict[str, Any], root_path: Optional[pathlib.Path, str] = None, lifespan: Optional[Callable[[ForwardRef('FastKafka')], AsyncContextManager[NoneType]]] = None, loop=None, client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fa3e2864f70>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fa3e1879090>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None, group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, exclude_internal_topics=True, isolation_level='read_uncommitted') -> None")),(0,i.kt)("p",null,"Creates FastKafka application"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"title"),": optional title for the documentation. If None,\nthe title will be set to empty string"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"description"),": optional description for the documentation. If\nNone, the description will be set to empty string"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"version"),": optional version for the documentation. If None,\nthe version will be set to empty string"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"contact"),": optional contact for the documentation. If None, the\ncontact will be set to placeholder values:\nname='Author' url=HttpUrl(' ",(0,i.kt)("a",{parentName:"li",href:"https://www.google.com"},"https://www.google.com")," ', ) email='",(0,i.kt)("a",{parentName:"li",href:"mailto:noreply@gmail.com"},"noreply@gmail.com"),"'"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": dictionary describing kafka brokers used for\ngenerating documentation"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"root_path"),": path to where documentation will be created"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"lifespan"),": asynccontextmanager that is used for setting lifespan hooks.\n",(0,i.kt)("strong",{parentName:"li"},"aenter")," is called before app start and ",(0,i.kt)("strong",{parentName:"li"},"aexit")," after app stop.\nThe lifespan is called whe application is started as async context\nmanager, e.g.:",(0,i.kt)("inlineCode",{parentName:"li"},"async with kafka_app...")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-producer-#")," (appended with a unique number\nper instance)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_serializer"),": used to convert user-supplied keys to bytes\nIf not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as ",(0,i.kt)("inlineCode",{parentName:"li"},"f(key),")," should return\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_serializer"),": used to convert user-supplied message\nvalues to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),". If not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as\n",(0,i.kt)("inlineCode",{parentName:"li"},"f(value)"),", should return :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"acks"),": one of ",(0,i.kt)("inlineCode",{parentName:"li"},"0"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"1"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"0"),": Producer will not wait for any acknowledgment from the server\nat all. The message will immediately be added to the socket\nbuffer and considered sent. No guarantee can be made that the\nserver has received the record in this case, and the retries\nconfiguration will not take effect (as the client won't\ngenerally know of any failures). The offset given back for each\nrecord will always be set to -1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"1"),": The broker leader will write the record to its local log but\nwill respond without awaiting full acknowledgement from all\nfollowers. In this case should the leader fail immediately\nafter acknowledging the record but before the followers have\nreplicated it then the record will be lost."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"all"),": The broker leader will wait for the full set of in-sync\nreplicas to acknowledge the record. This guarantees that the\nrecord will not be lost as long as at least one in-sync replica\nremains alive. This is the strongest available guarantee.")),(0,i.kt)("p",null,"If unset, defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=1"),". If ",(0,i.kt)("inlineCode",{parentName:"p"},"enable_idempotence")," is\n:data:",(0,i.kt)("inlineCode",{parentName:"p"},"True")," defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=all")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"compression_type"),": The compression type for all data generated by\nthe producer. Valid values are ",(0,i.kt)("inlineCode",{parentName:"li"},"gzip"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"snappy"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"lz4"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"zstd"),"\nor :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),".\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_batch_size"),": Maximum size of buffered data per partition.\nAfter this amount :meth:",(0,i.kt)("inlineCode",{parentName:"li"},"send")," coroutine will block until batch is\ndrained.\nDefault: 16384"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),": The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),", producer will wait ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms - process_time"),".\nDefault: 0 (i.e. no delay)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partitioner"),": Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n",(0,i.kt)("inlineCode",{parentName:"li"},"partitioner(key_bytes, all_partitions, available_partitions)"),".\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_request_size"),": The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"~kafka.protocol.produce.ProduceRequest")," (it's a blocking\ncall), maximum waiting time can be up to ",(0,i.kt)("inlineCode",{parentName:"li"},"2 *\nrequest_timeout_ms"),".\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more\ninformation see :ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_idempotence"),": When set to :data:",(0,i.kt)("inlineCode",{parentName:"li"},"True"),", the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:",(0,i.kt)("inlineCode",{parentName:"li"},"False"),", producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:",(0,i.kt)("inlineCode",{parentName:"li"},"ValueError")," will be thrown.\nNew in version 0.5.0."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values\nare: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider ("),": class:",(0,i.kt)("inlineCode",{parentName:"li"},"~aiokafka.abc.AbstractTokenProvider"),"):\nOAuthBearer token provider instance. (See\n:mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"*topics"),": optional list of topics to subscribe to. If not set,\ncall :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," or :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".assign")," before consuming records.\nPassing topics directly is same as calling :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," API."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"group_id"),": name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_deserializer"),": Any callable that takes a\nraw message key and returns a deserialized key."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_deserializer"),": Any callable that takes a\nraw message value and returns a deserialized value."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_min_bytes"),": Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n",(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_bytes"),": The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms"),": The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_partition_fetch_bytes"),": The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ",(0,i.kt)("inlineCode",{parentName:"li"},"= #partitions * max_partition_fetch_bytes"),".\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_records"),": The maximum number of records returned in a\nsingle call to :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),". Defaults ",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", no limit."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_offset_reset"),": A policy for resetting offsets on\n:exc:",(0,i.kt)("inlineCode",{parentName:"li"},".OffsetOutOfRangeError")," errors: ",(0,i.kt)("inlineCode",{parentName:"li"},"earliest")," will move to the oldest\navailable message, ",(0,i.kt)("inlineCode",{parentName:"li"},"latest")," will move to the most recent, and\n",(0,i.kt)("inlineCode",{parentName:"li"},"none")," will raise an exception so you can handle this case.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"latest"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_auto_commit"),": If true the consumer's offset will be\nperiodically committed in the background. Default: True."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_commit_interval_ms"),": milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"check_crcs"),": Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partition_assignment_strategy"),": List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: ","[:class:",(0,i.kt)("inlineCode",{parentName:"li"},".RoundRobinPartitionAssignor"),"]"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_interval_ms"),": Maximum allowed time between calls to\nconsume messages (e.g., :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),"). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See ",(0,i.kt)("inlineCode",{parentName:"li"},"KIP-62"),"_ for more\ninformation. Default 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"rebalance_timeout_ms"),": The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to ",(0,i.kt)("inlineCode",{parentName:"li"},"max.poll.interval.ms")," configuration,\nbut as ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka")," will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},".ConsumerRebalanceListener")," to delay rebalacing. Defaults\nto ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),": Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(",(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe ",(0,i.kt)("strong",{parentName:"li"},"broker")," configuration properties\n",(0,i.kt)("inlineCode",{parentName:"li"},"group.min.session.timeout.ms")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"group.max.session.timeout.ms"),".\nDefault: 10000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat_interval_ms"),": The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),", but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"consumer_timeout_ms"),": maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"exclude_internal_topics"),": Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"isolation_level"),": Controls how to read messages written\ntransactionally.")),(0,i.kt)("p",null,"If set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed"),", :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\ntransactional messages which have been committed.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")," (the default), :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will\nreturn all messages, even transactional messages which have been\naborted."),(0,i.kt)("p",null,"Non-transactional messages will be returned unconditionally in\neither mode."),(0,i.kt)("p",null,"Messages will always be returned in offset order. Hence, in\n",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," mode, :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," the seek_to_end method will\nreturn the LSO. See method docs below. Default: ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider"),": OAuthBearer token provider instance. (See :mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: None")),(0,i.kt)("h3",{id:"benchmark"},(0,i.kt)("inlineCode",{parentName:"h3"},"benchmark")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]")),(0,i.kt)("p",null,"Decorator to benchmark produces/consumes functions"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"interval"),": Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sliding_window_size"),": The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated")),(0,i.kt)("h3",{id:"consumes"},(0,i.kt)("inlineCode",{parentName:"h3"},"consumes")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, prefix: str = 'on_', loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[pydantic.main.BaseModel], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel], NoneType], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], NoneType]]], typing.Union[typing.Callable[[pydantic.main.BaseModel], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel], NoneType], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], NoneType]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when a message is received in a topic."),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"decoder"),": Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"executor"),': Type of executor to choose for consuming tasks. Avaliable options\nare "SequentialExecutor" and "DynamicTaskExecutor". The default option is\n"SequentialExecutor" which will execute the consuming tasks sequentially.\nIf the consuming tasks have high latency it is recommended to use\n"DynamicTaskExecutor" which will wrap the consuming functions into tasks\nand run them in on asyncio loop in background. This comes with a cost of\nincreased overhead so use it only in cases when your consume functions have\nhigh latency such as database queries or some other type of networking.'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: "on_". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"*topics"),": optional list of topics to subscribe to. If not set,\ncall :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," or :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".assign")," before consuming records.\nPassing topics directly is same as calling :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," API."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string (or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings) that the consumer should contact to bootstrap\ninitial cluster metadata.")),(0,i.kt)("p",null,"This does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ",(0,i.kt)("inlineCode",{parentName:"p"},"localhost:9092"),"."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~.consumer.group_coordinator.GroupCoordinator"),"\nfor logging with respect to consumer group administration. Default:\n",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-{version}")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"group_id"),": name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_deserializer"),": Any callable that takes a\nraw message key and returns a deserialized key."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_deserializer"),": Any callable that takes a\nraw message value and returns a deserialized value."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_min_bytes"),": Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n",(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_bytes"),": The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms"),": The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_partition_fetch_bytes"),": The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ",(0,i.kt)("inlineCode",{parentName:"li"},"= #partitions * max_partition_fetch_bytes"),".\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_records"),": The maximum number of records returned in a\nsingle call to :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),". Defaults ",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", no limit."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Client request timeout in milliseconds.\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_offset_reset"),": A policy for resetting offsets on\n:exc:",(0,i.kt)("inlineCode",{parentName:"li"},".OffsetOutOfRangeError")," errors: ",(0,i.kt)("inlineCode",{parentName:"li"},"earliest")," will move to the oldest\navailable message, ",(0,i.kt)("inlineCode",{parentName:"li"},"latest")," will move to the most recent, and\n",(0,i.kt)("inlineCode",{parentName:"li"},"none")," will raise an exception so you can handle this case.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"latest"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_auto_commit"),": If true the consumer's offset will be\nperiodically committed in the background. Default: True."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_commit_interval_ms"),": milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"check_crcs"),": Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partition_assignment_strategy"),": List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: ","[:class:",(0,i.kt)("inlineCode",{parentName:"li"},".RoundRobinPartitionAssignor"),"]"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_interval_ms"),": Maximum allowed time between calls to\nconsume messages (e.g., :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),"). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See ",(0,i.kt)("inlineCode",{parentName:"li"},"KIP-62"),"_ for more\ninformation. Default 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"rebalance_timeout_ms"),": The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to ",(0,i.kt)("inlineCode",{parentName:"li"},"max.poll.interval.ms")," configuration,\nbut as ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka")," will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},".ConsumerRebalanceListener")," to delay rebalacing. Defaults\nto ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),": Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(",(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe ",(0,i.kt)("strong",{parentName:"li"},"broker")," configuration properties\n",(0,i.kt)("inlineCode",{parentName:"li"},"group.min.session.timeout.ms")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"group.max.session.timeout.ms"),".\nDefault: 10000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat_interval_ms"),": The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),", but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"consumer_timeout_ms"),": maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"AIOKafkaConsumer")," supports Kafka API versions >=0.9 only.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more information see\n:ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),". Default: None."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"exclude_internal_topics"),": Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying ",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"isolation_level"),": Controls how to read messages written\ntransactionally.")),(0,i.kt)("p",null,"If set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed"),", :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\ntransactional messages which have been committed.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")," (the default), :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will\nreturn all messages, even transactional messages which have been\naborted."),(0,i.kt)("p",null,"Non-transactional messages will be returned unconditionally in\neither mode."),(0,i.kt)("p",null,"Messages will always be returned in offset order. Hence, in\n",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," mode, :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," the seek_to_end method will\nreturn the LSO. See method docs below. Default: ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values are:\n",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider"),": OAuthBearer token provider instance. (See :mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: None")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("h3",{id:"create_mocks"},(0,i.kt)("inlineCode",{parentName:"h3"},"create_mocks")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def create_mocks(self: fastkafka.FastKafka) -> None")),(0,i.kt)("p",null,"Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock"),(0,i.kt)("h3",{id:"produces"},(0,i.kt)("inlineCode",{parentName:"h3"},"produces")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fa3e2864f70>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fa3e1879090>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when delivery report for a produced message is received"),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"encoder"),": Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: "to_". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list. It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ",(0,i.kt)("inlineCode",{parentName:"li"},"localhost:9092"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-producer-#")," (appended with a unique number\nper instance)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_serializer"),": used to convert user-supplied keys to bytes\nIf not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as ",(0,i.kt)("inlineCode",{parentName:"li"},"f(key),")," should return\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_serializer"),": used to convert user-supplied message\nvalues to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),". If not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as\n",(0,i.kt)("inlineCode",{parentName:"li"},"f(value)"),", should return :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"acks"),": one of ",(0,i.kt)("inlineCode",{parentName:"li"},"0"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"1"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"0"),": Producer will not wait for any acknowledgment from the server\nat all. The message will immediately be added to the socket\nbuffer and considered sent. No guarantee can be made that the\nserver has received the record in this case, and the retries\nconfiguration will not take effect (as the client won't\ngenerally know of any failures). The offset given back for each\nrecord will always be set to -1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"1"),": The broker leader will write the record to its local log but\nwill respond without awaiting full acknowledgement from all\nfollowers. In this case should the leader fail immediately\nafter acknowledging the record but before the followers have\nreplicated it then the record will be lost."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"all"),": The broker leader will wait for the full set of in-sync\nreplicas to acknowledge the record. This guarantees that the\nrecord will not be lost as long as at least one in-sync replica\nremains alive. This is the strongest available guarantee.")),(0,i.kt)("p",null,"If unset, defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=1"),". If ",(0,i.kt)("inlineCode",{parentName:"p"},"enable_idempotence")," is\n:data:",(0,i.kt)("inlineCode",{parentName:"p"},"True")," defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=all")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"compression_type"),": The compression type for all data generated by\nthe producer. Valid values are ",(0,i.kt)("inlineCode",{parentName:"li"},"gzip"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"snappy"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"lz4"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"zstd"),"\nor :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),".\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_batch_size"),": Maximum size of buffered data per partition.\nAfter this amount :meth:",(0,i.kt)("inlineCode",{parentName:"li"},"send")," coroutine will block until batch is\ndrained.\nDefault: 16384"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),": The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),", producer will wait ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms - process_time"),".\nDefault: 0 (i.e. no delay)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partitioner"),": Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n",(0,i.kt)("inlineCode",{parentName:"li"},"partitioner(key_bytes, all_partitions, available_partitions)"),".\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_request_size"),": The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"~kafka.protocol.produce.ProduceRequest")," (it's a blocking\ncall), maximum waiting time can be up to ",(0,i.kt)("inlineCode",{parentName:"li"},"2 *\nrequest_timeout_ms"),".\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more\ninformation see :ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_idempotence"),": When set to :data:",(0,i.kt)("inlineCode",{parentName:"li"},"True"),", the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:",(0,i.kt)("inlineCode",{parentName:"li"},"False"),", producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:",(0,i.kt)("inlineCode",{parentName:"li"},"ValueError")," will be thrown.\nNew in version 0.5.0."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values\nare: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider ("),": class:",(0,i.kt)("inlineCode",{parentName:"li"},"~aiokafka.abc.AbstractTokenProvider"),"):\nOAuthBearer token provider instance. (See\n:mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"))),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ValueError"),": when needed")),(0,i.kt)("h3",{id:"run_in_background"},(0,i.kt)("inlineCode",{parentName:"h3"},"run_in_background")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]")),(0,i.kt)("p",null,"Decorator to schedule a task to be run in the background."),(0,i.kt)("p",null,"This decorator is used to schedule a task to be run in the background when the app's ",(0,i.kt)("inlineCode",{parentName:"p"},"_on_startup")," event is triggered."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"A decorator function that takes a background task as an input and stores it to be run in the backround.")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/e8ae88bc.3ee8c934.js b/assets/js/e8ae88bc.3ee8c934.js new file mode 100644 index 0000000..a1a9167 --- /dev/null +++ b/assets/js/e8ae88bc.3ee8c934.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[920],{3905:(a,e,n)=>{n.d(e,{Zo:()=>k,kt:()=>m});var t=n(7294);function i(a,e,n){return e in a?Object.defineProperty(a,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):a[e]=n,a}function o(a,e){var n=Object.keys(a);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(a);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(a,e).enumerable}))),n.push.apply(n,t)}return n}function r(a){for(var e=1;e<arguments.length;e++){var n=null!=arguments[e]?arguments[e]:{};e%2?o(Object(n),!0).forEach((function(e){i(a,e,n[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(a,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(e){Object.defineProperty(a,e,Object.getOwnPropertyDescriptor(n,e))}))}return a}function s(a,e){if(null==a)return{};var n,t,i=function(a,e){if(null==a)return{};var n,t,i={},o=Object.keys(a);for(t=0;t<o.length;t++)n=o[t],e.indexOf(n)>=0||(i[n]=a[n]);return i}(a,e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(a);for(t=0;t<o.length;t++)n=o[t],e.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(a,n)&&(i[n]=a[n])}return i}var p=t.createContext({}),l=function(a){var e=t.useContext(p),n=e;return a&&(n="function"==typeof a?a(e):r(r({},e),a)),n},k=function(a){var e=l(a.components);return t.createElement(p.Provider,{value:e},a.children)},c="mdxType",d={inlineCode:"code",wrapper:function(a){var e=a.children;return t.createElement(t.Fragment,{},e)}},f=t.forwardRef((function(a,e){var n=a.components,i=a.mdxType,o=a.originalType,p=a.parentName,k=s(a,["components","mdxType","originalType","parentName"]),c=l(n),f=i,m=c["".concat(p,".").concat(f)]||c[f]||d[f]||o;return n?t.createElement(m,r(r({ref:e},k),{},{components:n})):t.createElement(m,r({ref:e},k))}));function m(a,e){var n=arguments,i=e&&e.mdxType;if("string"==typeof a||i){var o=n.length,r=new Array(o);r[0]=f;var s={};for(var p in e)hasOwnProperty.call(e,p)&&(s[p]=e[p]);s.originalType=a,s[c]="string"==typeof a?a:i,r[1]=s;for(var l=2;l<o;l++)r[l]=n[l];return t.createElement.apply(null,r)}return t.createElement.apply(null,n)}f.displayName="MDXCreateElement"},4854:(a,e,n)=>{n.r(e),n.d(e,{assets:()=>p,contentTitle:()=>r,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var t=n(7462),i=(n(7294),n(3905));const o={},r="Benchmarking FastKafka app",s={unversionedId:"guides/Guide_06_Benchmarking_FastKafka",id:"version-0.6.0/guides/Guide_06_Benchmarking_FastKafka",title:"Benchmarking FastKafka app",description:"Prerequisites",source:"@site/versioned_docs/version-0.6.0/guides/Guide_06_Benchmarking_FastKafka.md",sourceDirName:"guides",slug:"/guides/Guide_06_Benchmarking_FastKafka",permalink:"/docs/0.6.0/guides/Guide_06_Benchmarking_FastKafka",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Deploying FastKafka using Docker",permalink:"/docs/0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka"},next:{title:"EventMetadata",permalink:"/docs/0.6.0/api/fastkafka/EventMetadata"}},p={},l=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Creating FastKafka Code",id:"creating-fastkafka-code",level:3},{value:"Starting Kafka",id:"starting-kafka",level:3},{value:"Installing Java and Kafka",id:"installing-java-and-kafka",level:4},{value:"Creating configuration for Zookeeper and Kafka",id:"creating-configuration-for-zookeeper-and-kafka",level:4},{value:"Starting Zookeeper and Kafka",id:"starting-zookeeper-and-kafka",level:4},{value:"Creating topics in Kafka",id:"creating-topics-in-kafka",level:4},{value:"Populating topics with dummy data",id:"populating-topics-with-dummy-data",level:4},{value:"Benchmarking FastKafka",id:"benchmarking-fastkafka",level:3}],k={toc:l},c="wrapper";function d(a){let{components:e,...n}=a;return(0,i.kt)(c,(0,t.Z)({},k,n,{components:e,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"benchmarking-fastkafka-app"},"Benchmarking FastKafka app"),(0,i.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("p",null,"To benchmark a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nproject, you will need the following:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A library built with\n",(0,i.kt)("a",{parentName:"li",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"."),(0,i.kt)("li",{parentName:"ol"},"A running ",(0,i.kt)("inlineCode",{parentName:"li"},"Kafka")," instance to benchmark the FastKafka application\nagainst.")),(0,i.kt)("h3",{id:"creating-fastkafka-code"},"Creating FastKafka Code"),(0,i.kt)("p",null,"Let\u2019s create a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\napplication and write it to the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file based on the\n",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nhas a decorator for benchmarking which is appropriately called as\n",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark"),". Let\u2019s edit our ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file and add the\n",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark")," decorator to the consumes method."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file with benchmark\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\n@kafka_app.benchmark(interval=1, sliding_window_size=5)\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"Here we are conducting a benchmark of a function that consumes data from\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic with an interval of 1 second and a sliding window\nsize of 5."),(0,i.kt)("p",null,"This ",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark")," method uses the ",(0,i.kt)("inlineCode",{parentName:"p"},"interval")," parameter to calculate the\nresults over a specific time period, and the ",(0,i.kt)("inlineCode",{parentName:"p"},"sliding_window_size"),"\nparameter to determine the maximum number of results to use in\ncalculating the average throughput and standard deviation."),(0,i.kt)("p",null,"This benchmark is important to ensure that the function is performing\noptimally and to identify any areas for improvement."),(0,i.kt)("h3",{id:"starting-kafka"},"Starting Kafka"),(0,i.kt)("p",null,"If you already have a ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," running somewhere, then you can skip this\nstep."),(0,i.kt)("p",null,"Please keep in mind that your benchmarking results may be affected by\nbottlenecks such as network, CPU cores in the Kafka machine, or even the\nKafka configuration itself."),(0,i.kt)("h4",{id:"installing-java-and-kafka"},"Installing Java and Kafka"),(0,i.kt)("p",null,"We need a working ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),"instance to benchmark our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp, and to run ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," we need ",(0,i.kt)("inlineCode",{parentName:"p"},"Java"),". Thankfully,\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\ncomes with a CLI to install both ",(0,i.kt)("inlineCode",{parentName:"p"},"Java")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," on our machine."),(0,i.kt)("p",null,"So, let\u2019s install ",(0,i.kt)("inlineCode",{parentName:"p"},"Java")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," by executing the following command."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"fastkafka testing install_deps\n")),(0,i.kt)("p",null,"The above command will extract ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),' scripts at the location\n\u201c\\$HOME/.local/kafka_2.13-3.3.2" on your machine.'),(0,i.kt)("h4",{id:"creating-configuration-for-zookeeper-and-kafka"},"Creating configuration for Zookeeper and Kafka"),(0,i.kt)("p",null,"Now we need to start ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," separately, and to start\nthem we need ",(0,i.kt)("inlineCode",{parentName:"p"},"zookeeper.properties")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka.properties")," files."),(0,i.kt)("p",null,"Let\u2019s create a folder inside the folder where ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," scripts were\nextracted and change directory into it."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"mkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir\n")),(0,i.kt)("p",null,"Let\u2019s create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"zookeeper.properties")," and write the\nfollowing content to the file:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"dataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper\nclientPort=2181\nmaxClientCnxns=0\n")),(0,i.kt)("p",null,"Similarly, let\u2019s create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka.properties")," and write the\nfollowing content to the file:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"broker.id=0\nlisteners=PLAINTEXT://:9092\n\nnum.network.threads=3\nnum.io.threads=8\nsocket.send.buffer.bytes=102400\nsocket.receive.buffer.bytes=102400\nsocket.request.max.bytes=104857600\n\nnum.partitions=1\nnum.recovery.threads.per.data.dir=1\noffsets.topic.replication.factor=1\ntransaction.state.log.replication.factor=1\ntransaction.state.log.min.isr=1\n\nlog.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs\nlog.flush.interval.messages=10000\nlog.flush.interval.ms=1000\nlog.retention.hours=168\nlog.retention.bytes=1073741824\nlog.segment.bytes=1073741824\nlog.retention.check.interval.ms=300000\n\nzookeeper.connect=localhost:2181\nzookeeper.connection.timeout.ms=18000\n")),(0,i.kt)("h4",{id:"starting-zookeeper-and-kafka"},"Starting Zookeeper and Kafka"),(0,i.kt)("p",null,"We need two different terminals to run ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," in one and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," in\nanother. Let\u2019s open a new terminal and run the following commands to\nstart ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./zookeeper-server-start.sh ../data_dir/zookeeper.properties\n")),(0,i.kt)("p",null,"Once ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," is up and running, open a new terminal and execute the\nfollwing commands to start ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-server-start.sh ../data_dir/kafka.properties\n")),(0,i.kt)("p",null,"Now we have both ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," up and running."),(0,i.kt)("h4",{id:"creating-topics-in-kafka"},"Creating topics in Kafka"),(0,i.kt)("p",null,"In a new terminal, please execute the following command to create\nnecessary topics in ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092\n./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092\n")),(0,i.kt)("h4",{id:"populating-topics-with-dummy-data"},"Populating topics with dummy data"),(0,i.kt)("p",null,"To benchmark our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp, we need some data in ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," topics."),(0,i.kt)("p",null,"In the same terminal, let\u2019s create some dummy data:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},'yes \'{"sepal_length": 0.7739560486, "sepal_width": 0.8636615789, "petal_length": 0.6122663046, "petal_width": 0.1338914722}\' | head -n 1000000 > /tmp/test_data\n')),(0,i.kt)("p",null,"This command will create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"test_data")," in the ",(0,i.kt)("inlineCode",{parentName:"p"},"tmp")," folder\nwith one million rows of text. This will act as dummy data to populate\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic."),(0,i.kt)("p",null,"Let\u2019s populate the created topic ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," with the dummy data which\nwe created above:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data\n")),(0,i.kt)("p",null,"Now our topic ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," has one million records/messages in it. If\nyou want more messages in topic, you can simply execute the above\ncommand again and again."),(0,i.kt)("h3",{id:"benchmarking-fastkafka"},"Benchmarking FastKafka"),(0,i.kt)("p",null,"Once ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," are ready, benchmarking\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp is as simple as running the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"fastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app\n")),(0,i.kt)("p",null,"This command will start the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp and begin consuming messages from ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),", which we spun up earlier.\nAdditionally, the same command will output all of the benchmark\nthroughputs based on the ",(0,i.kt)("inlineCode",{parentName:"p"},"interval")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"sliding_window_size")," values."),(0,i.kt)("p",null,"The output for the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh\nost:9092', 'max_poll_records': 100}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition\n=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)\n")),(0,i.kt)("p",null,"Based on the output, when using 1 worker, our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp achieved a ",(0,i.kt)("inlineCode",{parentName:"p"},"throughput")," of 93k messages per second and an\n",(0,i.kt)("inlineCode",{parentName:"p"},"average throughput")," of 93k messages per second."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/e968e69e.7145b4d3.js b/assets/js/e968e69e.7145b4d3.js new file mode 100644 index 0000000..ba5f18a --- /dev/null +++ b/assets/js/e968e69e.7145b4d3.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3374],{3905:(a,e,n)=>{n.d(e,{Zo:()=>k,kt:()=>m});var t=n(7294);function i(a,e,n){return e in a?Object.defineProperty(a,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):a[e]=n,a}function o(a,e){var n=Object.keys(a);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(a);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(a,e).enumerable}))),n.push.apply(n,t)}return n}function r(a){for(var e=1;e<arguments.length;e++){var n=null!=arguments[e]?arguments[e]:{};e%2?o(Object(n),!0).forEach((function(e){i(a,e,n[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(a,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(e){Object.defineProperty(a,e,Object.getOwnPropertyDescriptor(n,e))}))}return a}function s(a,e){if(null==a)return{};var n,t,i=function(a,e){if(null==a)return{};var n,t,i={},o=Object.keys(a);for(t=0;t<o.length;t++)n=o[t],e.indexOf(n)>=0||(i[n]=a[n]);return i}(a,e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(a);for(t=0;t<o.length;t++)n=o[t],e.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(a,n)&&(i[n]=a[n])}return i}var p=t.createContext({}),l=function(a){var e=t.useContext(p),n=e;return a&&(n="function"==typeof a?a(e):r(r({},e),a)),n},k=function(a){var e=l(a.components);return t.createElement(p.Provider,{value:e},a.children)},c="mdxType",d={inlineCode:"code",wrapper:function(a){var e=a.children;return t.createElement(t.Fragment,{},e)}},f=t.forwardRef((function(a,e){var n=a.components,i=a.mdxType,o=a.originalType,p=a.parentName,k=s(a,["components","mdxType","originalType","parentName"]),c=l(n),f=i,m=c["".concat(p,".").concat(f)]||c[f]||d[f]||o;return n?t.createElement(m,r(r({ref:e},k),{},{components:n})):t.createElement(m,r({ref:e},k))}));function m(a,e){var n=arguments,i=e&&e.mdxType;if("string"==typeof a||i){var o=n.length,r=new Array(o);r[0]=f;var s={};for(var p in e)hasOwnProperty.call(e,p)&&(s[p]=e[p]);s.originalType=a,s[c]="string"==typeof a?a:i,r[1]=s;for(var l=2;l<o;l++)r[l]=n[l];return t.createElement.apply(null,r)}return t.createElement.apply(null,n)}f.displayName="MDXCreateElement"},1849:(a,e,n)=>{n.r(e),n.d(e,{assets:()=>p,contentTitle:()=>r,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var t=n(7462),i=(n(7294),n(3905));const o={},r="Benchmarking FastKafka app",s={unversionedId:"guides/Guide_06_Benchmarking_FastKafka",id:"guides/Guide_06_Benchmarking_FastKafka",title:"Benchmarking FastKafka app",description:"Prerequisites",source:"@site/docs/guides/Guide_06_Benchmarking_FastKafka.md",sourceDirName:"guides",slug:"/guides/Guide_06_Benchmarking_FastKafka",permalink:"/docs/next/guides/Guide_06_Benchmarking_FastKafka",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Using FastAPI to Run FastKafka Application",permalink:"/docs/next/guides/Guide_32_Using_fastapi_to_run_fastkafka_application"},next:{title:"EventMetadata",permalink:"/docs/next/api/fastkafka/EventMetadata"}},p={},l=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Creating FastKafka Code",id:"creating-fastkafka-code",level:3},{value:"Starting Kafka",id:"starting-kafka",level:3},{value:"Installing Java and Kafka",id:"installing-java-and-kafka",level:4},{value:"Creating configuration for Zookeeper and Kafka",id:"creating-configuration-for-zookeeper-and-kafka",level:4},{value:"Starting Zookeeper and Kafka",id:"starting-zookeeper-and-kafka",level:4},{value:"Creating topics in Kafka",id:"creating-topics-in-kafka",level:4},{value:"Populating topics with dummy data",id:"populating-topics-with-dummy-data",level:4},{value:"Benchmarking FastKafka",id:"benchmarking-fastkafka",level:3}],k={toc:l},c="wrapper";function d(a){let{components:e,...n}=a;return(0,i.kt)(c,(0,t.Z)({},k,n,{components:e,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"benchmarking-fastkafka-app"},"Benchmarking FastKafka app"),(0,i.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("p",null,"To benchmark a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nproject, you will need the following:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A library built with\n",(0,i.kt)("a",{parentName:"li",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"."),(0,i.kt)("li",{parentName:"ol"},"A running ",(0,i.kt)("inlineCode",{parentName:"li"},"Kafka")," instance to benchmark the FastKafka application\nagainst.")),(0,i.kt)("h3",{id:"creating-fastkafka-code"},"Creating FastKafka Code"),(0,i.kt)("p",null,"Let\u2019s create a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\napplication and write it to the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file based on the\n",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nhas a decorator for benchmarking which is appropriately called as\n",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark"),". Let\u2019s edit our ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file and add the\n",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark")," decorator to the consumes method."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file with benchmark\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\n@kafka_app.benchmark(interval=1, sliding_window_size=5)\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"Here we are conducting a benchmark of a function that consumes data from\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic with an interval of 1 second and a sliding window\nsize of 5."),(0,i.kt)("p",null,"This ",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark")," method uses the ",(0,i.kt)("inlineCode",{parentName:"p"},"interval")," parameter to calculate the\nresults over a specific time period, and the ",(0,i.kt)("inlineCode",{parentName:"p"},"sliding_window_size"),"\nparameter to determine the maximum number of results to use in\ncalculating the average throughput and standard deviation."),(0,i.kt)("p",null,"This benchmark is important to ensure that the function is performing\noptimally and to identify any areas for improvement."),(0,i.kt)("h3",{id:"starting-kafka"},"Starting Kafka"),(0,i.kt)("p",null,"If you already have a ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," running somewhere, then you can skip this\nstep."),(0,i.kt)("p",null,"Please keep in mind that your benchmarking results may be affected by\nbottlenecks such as network, CPU cores in the Kafka machine, or even the\nKafka configuration itself."),(0,i.kt)("h4",{id:"installing-java-and-kafka"},"Installing Java and Kafka"),(0,i.kt)("p",null,"We need a working ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),"instance to benchmark our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp, and to run ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," we need ",(0,i.kt)("inlineCode",{parentName:"p"},"Java"),". Thankfully,\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\ncomes with a CLI to install both ",(0,i.kt)("inlineCode",{parentName:"p"},"Java")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," on our machine."),(0,i.kt)("p",null,"So, let\u2019s install ",(0,i.kt)("inlineCode",{parentName:"p"},"Java")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," by executing the following command."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"fastkafka testing install_deps\n")),(0,i.kt)("p",null,"The above command will extract ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),' scripts at the location\n\u201c\\$HOME/.local/kafka_2.13-3.3.2" on your machine.'),(0,i.kt)("h4",{id:"creating-configuration-for-zookeeper-and-kafka"},"Creating configuration for Zookeeper and Kafka"),(0,i.kt)("p",null,"Now we need to start ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," separately, and to start\nthem we need ",(0,i.kt)("inlineCode",{parentName:"p"},"zookeeper.properties")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka.properties")," files."),(0,i.kt)("p",null,"Let\u2019s create a folder inside the folder where ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," scripts were\nextracted and change directory into it."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"mkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir\n")),(0,i.kt)("p",null,"Let\u2019s create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"zookeeper.properties")," and write the\nfollowing content to the file:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"dataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper\nclientPort=2181\nmaxClientCnxns=0\n")),(0,i.kt)("p",null,"Similarly, let\u2019s create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka.properties")," and write the\nfollowing content to the file:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"broker.id=0\nlisteners=PLAINTEXT://:9092\n\nnum.network.threads=3\nnum.io.threads=8\nsocket.send.buffer.bytes=102400\nsocket.receive.buffer.bytes=102400\nsocket.request.max.bytes=104857600\n\nnum.partitions=1\nnum.recovery.threads.per.data.dir=1\noffsets.topic.replication.factor=1\ntransaction.state.log.replication.factor=1\ntransaction.state.log.min.isr=1\n\nlog.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs\nlog.flush.interval.messages=10000\nlog.flush.interval.ms=1000\nlog.retention.hours=168\nlog.retention.bytes=1073741824\nlog.segment.bytes=1073741824\nlog.retention.check.interval.ms=300000\n\nzookeeper.connect=localhost:2181\nzookeeper.connection.timeout.ms=18000\n")),(0,i.kt)("h4",{id:"starting-zookeeper-and-kafka"},"Starting Zookeeper and Kafka"),(0,i.kt)("p",null,"We need two different terminals to run ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," in one and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," in\nanother. Let\u2019s open a new terminal and run the following commands to\nstart ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./zookeeper-server-start.sh ../data_dir/zookeeper.properties\n")),(0,i.kt)("p",null,"Once ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," is up and running, open a new terminal and execute the\nfollwing commands to start ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-server-start.sh ../data_dir/kafka.properties\n")),(0,i.kt)("p",null,"Now we have both ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," up and running."),(0,i.kt)("h4",{id:"creating-topics-in-kafka"},"Creating topics in Kafka"),(0,i.kt)("p",null,"In a new terminal, please execute the following command to create\nnecessary topics in ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092\n./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092\n")),(0,i.kt)("h4",{id:"populating-topics-with-dummy-data"},"Populating topics with dummy data"),(0,i.kt)("p",null,"To benchmark our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp, we need some data in ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," topics."),(0,i.kt)("p",null,"In the same terminal, let\u2019s create some dummy data:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},'yes \'{"sepal_length": 0.7739560486, "sepal_width": 0.8636615789, "petal_length": 0.6122663046, "petal_width": 0.1338914722}\' | head -n 1000000 > /tmp/test_data\n')),(0,i.kt)("p",null,"This command will create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"test_data")," in the ",(0,i.kt)("inlineCode",{parentName:"p"},"tmp")," folder\nwith one million rows of text. This will act as dummy data to populate\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic."),(0,i.kt)("p",null,"Let\u2019s populate the created topic ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," with the dummy data which\nwe created above:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data\n")),(0,i.kt)("p",null,"Now our topic ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," has one million records/messages in it. If\nyou want more messages in topic, you can simply execute the above\ncommand again and again."),(0,i.kt)("h3",{id:"benchmarking-fastkafka"},"Benchmarking FastKafka"),(0,i.kt)("p",null,"Once ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," are ready, benchmarking\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp is as simple as running the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"fastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app\n")),(0,i.kt)("p",null,"This command will start the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp and begin consuming messages from ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),", which we spun up earlier.\nAdditionally, the same command will output all of the benchmark\nthroughputs based on the ",(0,i.kt)("inlineCode",{parentName:"p"},"interval")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"sliding_window_size")," values."),(0,i.kt)("p",null,"The output for the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh\nost:9092', 'max_poll_records': 100}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition\n=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)\n")),(0,i.kt)("p",null,"Based on the output, when using 1 worker, our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp achieved a ",(0,i.kt)("inlineCode",{parentName:"p"},"throughput")," of 93k messages per second and an\n",(0,i.kt)("inlineCode",{parentName:"p"},"average throughput")," of 93k messages per second."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/e97b3564.ead957c7.js b/assets/js/e97b3564.ead957c7.js new file mode 100644 index 0000000..c77ce82 --- /dev/null +++ b/assets/js/e97b3564.ead957c7.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9724],{3905:(e,r,t)=>{t.d(r,{Zo:()=>s,kt:()=>k});var a=t(7294);function n(e,r,t){return r in e?Object.defineProperty(e,r,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[r]=t,e}function o(e,r){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);r&&(a=a.filter((function(r){return Object.getOwnPropertyDescriptor(e,r).enumerable}))),t.push.apply(t,a)}return t}function c(e){for(var r=1;r<arguments.length;r++){var t=null!=arguments[r]?arguments[r]:{};r%2?o(Object(t),!0).forEach((function(r){n(e,r,t[r])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):o(Object(t)).forEach((function(r){Object.defineProperty(e,r,Object.getOwnPropertyDescriptor(t,r))}))}return e}function d(e,r){if(null==e)return{};var t,a,n=function(e,r){if(null==e)return{};var t,a,n={},o=Object.keys(e);for(a=0;a<o.length;a++)t=o[a],r.indexOf(t)>=0||(n[t]=e[t]);return n}(e,r);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)t=o[a],r.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(n[t]=e[t])}return n}var i=a.createContext({}),l=function(e){var r=a.useContext(i),t=r;return e&&(t="function"==typeof e?e(r):c(c({},r),e)),t},s=function(e){var r=l(e.components);return a.createElement(i.Provider,{value:r},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var r=e.children;return a.createElement(a.Fragment,{},r)}},u=a.forwardRef((function(e,r){var t=e.components,n=e.mdxType,o=e.originalType,i=e.parentName,s=d(e,["components","mdxType","originalType","parentName"]),p=l(t),u=n,k=p["".concat(i,".").concat(u)]||p[u]||f[u]||o;return t?a.createElement(k,c(c({ref:r},s),{},{components:t})):a.createElement(k,c({ref:r},s))}));function k(e,r){var t=arguments,n=r&&r.mdxType;if("string"==typeof e||n){var o=t.length,c=new Array(o);c[0]=u;var d={};for(var i in r)hasOwnProperty.call(r,i)&&(d[i]=r[i]);d.originalType=e,d[p]="string"==typeof e?e:n,c[1]=d;for(var l=2;l<o;l++)c[l]=t[l];return a.createElement.apply(null,c)}return a.createElement.apply(null,t)}u.displayName="MDXCreateElement"},6232:(e,r,t)=>{t.r(r),t.d(r,{assets:()=>i,contentTitle:()=>c,default:()=>f,frontMatter:()=>o,metadata:()=>d,toc:()=>l});var a=t(7462),n=(t(7294),t(3905));const o={},c=void 0,d={unversionedId:"api/fastkafka/encoder/avro_decoder",id:"version-0.7.1/api/fastkafka/encoder/avro_decoder",title:"avro_decoder",description:"fastkafka.encoder.avrodecoder {fastkafka.encoder.avrodecoder}",source:"@site/versioned_docs/version-0.7.1/api/fastkafka/encoder/avro_decoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avro_decoder",permalink:"/docs/0.7.1/api/fastkafka/encoder/avro_decoder",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"AvroBase",permalink:"/docs/0.7.1/api/fastkafka/encoder/AvroBase"},next:{title:"avro_encoder",permalink:"/docs/0.7.1/api/fastkafka/encoder/avro_encoder"}},i={},l=[{value:"<code>fastkafka.encoder.avro_decoder</code>",id:"fastkafka.encoder.avro_decoder",level:2},{value:"<code>avro_decoder</code>",id:"avro_decoder",level:3}],s={toc:l},p="wrapper";function f(e){let{components:r,...t}=e;return(0,n.kt)(p,(0,a.Z)({},s,t,{components:r,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"fastkafka.encoder.avro_decoder"},(0,n.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.avro_decoder")),(0,n.kt)("h3",{id:"avro_decoder"},(0,n.kt)("inlineCode",{parentName:"h3"},"avro_decoder")),(0,n.kt)("p",null,(0,n.kt)("inlineCode",{parentName:"p"},"def avro_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any")),(0,n.kt)("p",null,"Decoder to decode avro encoded messages to pydantic model instance"),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"raw_msg"),": Avro encoded bytes message received from Kafka topic"),(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("inlineCode",{parentName:"li"},"cls"),": Pydantic class; This pydantic class will be used to construct instance of same class")),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"Returns"),":"),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"An instance of given pydantic class")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/ebc40d40.75714ad4.js b/assets/js/ebc40d40.75714ad4.js new file mode 100644 index 0000000..83fadf9 --- /dev/null +++ b/assets/js/ebc40d40.75714ad4.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[984],{3905:(e,t,r)=>{r.d(t,{Zo:()=>c,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?o(Object(r),!0).forEach((function(t){a(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):o(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function l(e,t){if(null==e)return{};var r,n,a=function(e,t){if(null==e)return{};var r,n,a={},o=Object.keys(e);for(n=0;n<o.length;n++)r=o[n],t.indexOf(r)>=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)r=o[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var u=n.createContext({}),s=function(e){var t=n.useContext(u),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},c=function(e){var t=s(e.components);return n.createElement(u.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,u=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),p=s(r),k=a,m=p["".concat(u,".").concat(k)]||p[k]||f[k]||o;return r?n.createElement(m,i(i({ref:t},c),{},{components:r})):n.createElement(m,i({ref:t},c))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,i=new Array(o);i[0]=k;var l={};for(var u in t)hasOwnProperty.call(t,u)&&(l[u]=t[u]);l.originalType=e,l[p]="string"==typeof e?e:a,i[1]=l;for(var s=2;s<o;s++)i[s]=r[s];return n.createElement.apply(null,i)}return n.createElement.apply(null,r)}k.displayName="MDXCreateElement"},4118:(e,t,r)=>{r.r(t),r.d(t,{assets:()=>u,contentTitle:()=>i,default:()=>f,frontMatter:()=>o,metadata:()=>l,toc:()=>s});var n=r(7462),a=(r(7294),r(3905));const o={},i=void 0,l={unversionedId:"api/fastkafka/executors/SequentialExecutor",id:"version-0.7.1/api/fastkafka/executors/SequentialExecutor",title:"SequentialExecutor",description:"fastkafka.executors.SequentialExecutor {fastkafka.executors.SequentialExecutor}",source:"@site/versioned_docs/version-0.7.1/api/fastkafka/executors/SequentialExecutor.md",sourceDirName:"api/fastkafka/executors",slug:"/api/fastkafka/executors/SequentialExecutor",permalink:"/docs/0.7.1/api/fastkafka/executors/SequentialExecutor",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"DynamicTaskExecutor",permalink:"/docs/0.7.1/api/fastkafka/executors/DynamicTaskExecutor"},next:{title:"ApacheKafkaBroker",permalink:"/docs/0.7.1/api/fastkafka/testing/ApacheKafkaBroker"}},u={},s=[{value:"<code>fastkafka.executors.SequentialExecutor</code>",id:"fastkafka.executors.SequentialExecutor",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>run</code>",id:"run",level:3}],c={toc:s},p="wrapper";function f(e){let{components:t,...r}=e;return(0,a.kt)(p,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"fastkafka.executors.SequentialExecutor"},(0,a.kt)("inlineCode",{parentName:"h2"},"fastkafka.executors.SequentialExecutor")),(0,a.kt)("p",null,"A class that implements a sequential executor for processing consumer records."),(0,a.kt)("p",null,"The SequentialExecutor class extends the StreamExecutor class and provides functionality\nfor running processing tasks in sequence by awaiting their coroutines."),(0,a.kt)("h3",{id:"init"},(0,a.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000) -> None")),(0,a.kt)("p",null,"Create an instance of SequentialExecutor"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"throw_exceptions"),": Flag indicating whether exceptions should be thrown or logged.\nDefaults to False."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"max_buffer_size"),": Maximum buffer size for the memory object stream.\nDefaults to 100_000.")),(0,a.kt)("h3",{id:"run"},(0,a.kt)("inlineCode",{parentName:"h3"},"run")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None")),(0,a.kt)("p",null,"Runs the sequential executor."),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"is_shutting_down_f"),": Function to check if the executor is shutting down."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"generator"),": Generator function for retrieving consumer records."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"processor"),": Processor function for processing consumer records.")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Returns"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"None")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/ee2e0a62.ea183c41.js b/assets/js/ee2e0a62.ea183c41.js new file mode 100644 index 0000000..a2a44c8 --- /dev/null +++ b/assets/js/ee2e0a62.ea183c41.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[836],{3905:(e,a,n)=>{n.d(a,{Zo:()=>k,kt:()=>m});var t=n(7294);function o(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function r(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function s(e){for(var a=1;a<arguments.length;a++){var n=null!=arguments[a]?arguments[a]:{};a%2?r(Object(n),!0).forEach((function(a){o(e,a,n[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(n,a))}))}return e}function l(e,a){if(null==e)return{};var n,t,o=function(e,a){if(null==e)return{};var n,t,o={},r=Object.keys(e);for(t=0;t<r.length;t++)n=r[t],a.indexOf(n)>=0||(o[n]=e[n]);return o}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(t=0;t<r.length;t++)n=r[t],a.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var i=t.createContext({}),p=function(e){var a=t.useContext(i),n=a;return e&&(n="function"==typeof e?e(a):s(s({},a),e)),n},k=function(e){var a=p(e.components);return t.createElement(i.Provider,{value:a},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},f=t.forwardRef((function(e,a){var n=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,k=l(e,["components","mdxType","originalType","parentName"]),c=p(n),f=o,m=c["".concat(i,".").concat(f)]||c[f]||u[f]||r;return n?t.createElement(m,s(s({ref:a},k),{},{components:n})):t.createElement(m,s({ref:a},k))}));function m(e,a){var n=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var r=n.length,s=new Array(r);s[0]=f;var l={};for(var i in a)hasOwnProperty.call(a,i)&&(l[i]=a[i]);l.originalType=e,l[c]="string"==typeof e?e:o,s[1]=l;for(var p=2;p<r;p++)s[p]=n[p];return t.createElement.apply(null,s)}return t.createElement.apply(null,n)}f.displayName="MDXCreateElement"},1950:(e,a,n)=>{n.r(a),n.d(a,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var t=n(7462),o=(n(7294),n(3905));const r={},s="First Steps",l={unversionedId:"guides/Guide_02_First_Steps",id:"version-0.7.1/guides/Guide_02_First_Steps",title:"First Steps",description:"Creating a simple Kafka consumer app",source:"@site/versioned_docs/version-0.7.1/guides/Guide_02_First_Steps.md",sourceDirName:"guides",slug:"/guides/Guide_02_First_Steps",permalink:"/docs/0.7.1/guides/Guide_02_First_Steps",draft:!1,tags:[],version:"0.7.1",frontMatter:{}},i={},p=[{value:"Creating a simple Kafka consumer app",id:"creating-a-simple-kafka-consumer-app",level:2},{value:"Sending first message to your consumer",id:"sending-first-message-to-your-consumer",level:2},{value:"Creating a hello Kafka producer",id:"creating-a-hello-kafka-producer",level:2},{value:"Recap",id:"recap",level:2}],k={toc:p},c="wrapper";function u(e){let{components:a,...n}=e;return(0,o.kt)(c,(0,t.Z)({},k,n,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"first-steps"},"First Steps"),(0,o.kt)("h2",{id:"creating-a-simple-kafka-consumer-app"},"Creating a simple Kafka consumer app"),(0,o.kt)("p",null,"For our first demo we will create the simplest possible Kafka consumer\nand run it using \u2018fastkafka run\u2019 command."),(0,o.kt)("p",null,"The consumer will:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Connect to the Kafka Broker we setup in the Intro guide")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Listen to the hello topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Write any message received from the hello topic to stdout"))),(0,o.kt)("p",null,"To create the consumer, first, create a file named"),(0,o.kt)("b",null,"hello_kafka_consumer.py")," and copy the following code to it:",(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom os import environ\n\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nkafka_server_url = environ["KAFKA_HOSTNAME"]\nkafka_server_port = environ["KAFKA_PORT"]\n\nkafka_brokers = {\n "localhost": {\n "description": "local development kafka",\n "url": kafka_server_url,\n "port": kafka_server_port\n }\n}\n\nclass HelloKafkaMsg(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_app = FastKafka(\n kafka_brokers=kafka_brokers\n)\n \n@kafka_app.consumes()\nasync def on_hello(msg: HelloKafkaMsg):\n print(f"Got data, msg={msg.msg}", flush=True)\n')),(0,o.kt)("p",null,'!!! info "Kafka configuration"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n")),(0,o.kt)("p",null,'!!! warning "Remember to flush"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.\n")),(0,o.kt)("p",null,"To run this consumer, in your terminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see something similar to the ouput\nbelow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})\n[878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n[878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}. \nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.\n\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.\n")),(0,o.kt)("p",null,"Now you can interact with your consumer, by sending the messages to the\nsubscribed \u2018hello\u2019 topic, don\u2019t worry, we will cover this in the next\nstep of this guide."),(0,o.kt)("h2",{id:"sending-first-message-to-your-consumer"},"Sending first message to your consumer"),(0,o.kt)("p",null,"After we have created and run our first consumer, we should send a\nmessage to it, to make sure it is working properly."),(0,o.kt)("p",null,"If you are using the Kafka setup as described in the Intro guide, you\ncan follow the steps listed here to send a message to the hello topic."),(0,o.kt)("p",null,"First, connect to your running kafka broker by running:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"docker run -it kafka /bin/bash\n")),(0,o.kt)("p",null,"Then, when connected to the container, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello\n")),(0,o.kt)("p",null,"This will open an interactive connection to the hello topic, now you can\nwrite your mesages to the topic and they will be consumed by our\nconsumer."),(0,o.kt)("p",null,"In the shell, type:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},'{"msg":"hello"}\n')),(0,o.kt)("p",null,"and press enter. This will send a hello message to the topic which will\nbe read by our running consumer and outputed to stdout."),(0,o.kt)("p",null,"Check the output of your consumer (terminal where you ran the \u2018fastkafka\nrun\u2019 command) and confirm that your consumer has read the Kafka message.\nYou shoud see something like this:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"Got data, msg=hello\n")),(0,o.kt)("h2",{id:"creating-a-hello-kafka-producer"},"Creating a hello Kafka producer"),(0,o.kt)("p",null,"Consuming messages is only a part of this Library functionality, the\nother big part is producing the messages. So, let\u2019s create our first\nkafka producer which will send it\u2019s greetings to our consumer\nperiodically."),(0,o.kt)("p",null,"The producer will:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Connect to the Kafka Broker we setup in the Intro guide"),(0,o.kt)("li",{parentName:"ol"},"Connect to the hello topic"),(0,o.kt)("li",{parentName:"ol"},"Periodically send a message to the hello world topic")),(0,o.kt)("p",null,"To create the producer, first, create a file named"),(0,o.kt)("b",null,"hello_kafka_producer.py")," and copy the following code to it:",(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom os import environ\n\nimport asyncio\nfrom pydantic import BaseModel, Field\n\nfrom fastkafka import FastKafka\nfrom fastkafka._components.logger import get_logger\n\nkafka_server_url = environ["KAFKA_HOSTNAME"]\nkafka_server_port = environ["KAFKA_PORT"]\n\nkafka_brokers = {\n "localhost": {\n "description": "local development kafka",\n "url": kafka_server_url,\n "port": kafka_server_port\n }\n}\n\nclass HelloKafkaMsg(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_app = FastKafka(\n kafka_brokers=kafka_brokers\n)\n\nlogger = get_logger(__name__)\n\n@kafka_app.produces()\nasync def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:\n logger.info(f"Producing: {msg}")\n return msg\n\n@kafka_app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello(HelloKafkaMsg(msg="hello"))\n await asyncio.sleep(1)\n')),(0,o.kt)("p",null,'!!! info "Kafka configuration"'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').\n")),(0,o.kt)("p",null,"To run this producer, in your terminal, run:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app\n")),(0,o.kt)("p",null,"After running the command, you should see something similar to the ouput\nbelow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...\n[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.\n\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.\n")),(0,o.kt)("p",null,"Now, while the producer is running, it will send a HelloKafkaMsg every\nsecond to the hello kafka topic. If your consumer is still running, you\nshould see the messages appear in its log."),(0,o.kt)("h2",{id:"recap"},"Recap"),(0,o.kt)("p",null,"In this guide we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Created a simple Kafka consumer using FastKafka"),(0,o.kt)("li",{parentName:"ol"},"Sent a message to our consumer trough Kafka"),(0,o.kt)("li",{parentName:"ol"},"Created a simple Kafka producer using FastKafka")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/f2954f34.e942d91a.js b/assets/js/f2954f34.e942d91a.js new file mode 100644 index 0000000..94a961c --- /dev/null +++ b/assets/js/f2954f34.e942d91a.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7408],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>u});var n=t(7294);function o(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function s(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function r(e){for(var a=1;a<arguments.length;a++){var t=null!=arguments[a]?arguments[a]:{};a%2?s(Object(t),!0).forEach((function(a){o(e,a,t[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(t,a))}))}return e}function i(e,a){if(null==e)return{};var t,n,o=function(e,a){if(null==e)return{};var t,n,o={},s=Object.keys(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||(o[t]=e[t]);return o}(e,a);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(n=0;n<s.length;n++)t=s[n],a.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var p=n.createContext({}),l=function(e){var a=n.useContext(p),t=a;return e&&(t="function"==typeof e?e(a):r(r({},a),e)),t},c=function(e){var a=l(e.components);return n.createElement(p.Provider,{value:a},e.children)},k="mdxType",f={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},d=n.forwardRef((function(e,a){var t=e.components,o=e.mdxType,s=e.originalType,p=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),k=l(t),d=o,u=k["".concat(p,".").concat(d)]||k[d]||f[d]||s;return t?n.createElement(u,r(r({ref:a},c),{},{components:t})):n.createElement(u,r({ref:a},c))}));function u(e,a){var t=arguments,o=a&&a.mdxType;if("string"==typeof e||o){var s=t.length,r=new Array(s);r[0]=d;var i={};for(var p in a)hasOwnProperty.call(a,p)&&(i[p]=a[p]);i.originalType=e,i[k]="string"==typeof e?e:o,r[1]=i;for(var l=2;l<s;l++)r[l]=t[l];return n.createElement.apply(null,r)}return n.createElement.apply(null,t)}d.displayName="MDXCreateElement"},541:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>p,contentTitle:()=>r,default:()=>f,frontMatter:()=>s,metadata:()=>i,toc:()=>l});var n=t(7462),o=(t(7294),t(3905));const s={},r="FastKafka",i={unversionedId:"index",id:"version-0.5.0/index",title:"FastKafka",description:"Effortless Kafka integration for your web services",source:"@site/versioned_docs/version-0.5.0/index.md",sourceDirName:".",slug:"/",permalink:"/docs/0.5.0/",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",next:{title:"@consumes basics",permalink:"/docs/0.5.0/guides/Guide_11_Consumes_Basics"}},p={},l=[{value:"\u2b50\u2b50\u2b50 Stay in touch \u2b50\u2b50\u2b50",id:"-stay-in-touch-",level:4},{value:"\ud83d\udc1d\ud83d\udc1d\ud83d\udc1d We were busy lately \ud83d\udc1d\ud83d\udc1d\ud83d\udc1d",id:"-we-were-busy-lately-",level:4},{value:"Install",id:"install",level:2},{value:"Tutorial",id:"tutorial",level:2},{value:"Writing server code",id:"writing-server-code",level:2},{value:"Preparing the demo model",id:"preparing-the-demo-model",level:3},{value:"Messages",id:"messages",level:3},{value:"Application",id:"application",level:3},{value:"Function decorators",id:"function-decorators",level:3},{value:"Testing the service",id:"testing-the-service",level:2},{value:"Recap",id:"recap",level:3},{value:"Running the service",id:"running-the-service",level:2},{value:"Documentation",id:"documentation",level:2},{value:"License",id:"license",level:2}],c={toc:l},k="wrapper";function f(e){let{components:a,...t}=e;return(0,o.kt)(k,(0,n.Z)({},c,t,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"fastkafka"},"FastKafka"),(0,o.kt)("b",null,"Effortless Kafka integration for your web services"),(0,o.kt)("hr",null),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/v/fastkafka.png",alt:"PyPI"})," ",(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/dm/fastkafka.png",alt:"PyPI -\nDownloads"})," ",(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/pypi/pyversions/fastkafka.png",alt:"PyPI - Python\nVersion"})),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/github/actions/workflow/status/airtai/fastkafka/test.yaml",alt:"GitHub Workflow\nStatus"}),"\n",(0,o.kt)("img",{parentName:"p",src:"https://github.com/airtai/fastkafka//actions/workflows/codeql.yml/badge.svg",alt:"CodeQL"}),"\n",(0,o.kt)("img",{parentName:"p",src:"https://github.com/airtai/fastkafka//actions/workflows/dependency-review.yml/badge.svg",alt:"Dependency\nReview"})),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://img.shields.io/github/license/airtai/fastkafka.png",alt:"GitHub"})),(0,o.kt)("hr",null),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"https://fastkafka.airt.ai/"},"FastKafka")," is a powerful and easy-to-use\nPython library for building asynchronous services that interact with\nKafka topics. Built on top of ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic"),",\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/aio-libs/aiokafka"},"AIOKafka")," and\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/"},"AsyncAPI"),", FastKafka simplifies the process\nof writing producers and consumers for Kafka topics, handling all the\nparsing, networking, task scheduling and data generation automatically.\nWith FastKafka, you can quickly prototype and develop high-performance\nKafka-based services with minimal code, making it an ideal choice for\ndevelopers looking to streamline their workflow and accelerate their\nprojects."),(0,o.kt)("hr",null),(0,o.kt)("h4",{id:"-stay-in-touch-"},"\u2b50\u2b50\u2b50 Stay in touch \u2b50\u2b50\u2b50"),(0,o.kt)("p",null,"Please show your support and stay in touch by:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"giving our ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/airtai/fastkafka/"},"GitHub repository")," a\nstar, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"joining our ",(0,o.kt)("a",{parentName:"p",href:"https://discord.gg/CJWmYpyFbc"},"Discord server"),"."))),(0,o.kt)("p",null,"Your support helps us to stay in touch with you and encourages us to\ncontinue developing and improving the library. Thank you for your\nsupport!"),(0,o.kt)("hr",null),(0,o.kt)("h4",{id:"-we-were-busy-lately-"},"\ud83d\udc1d\ud83d\udc1d\ud83d\udc1d We were busy lately \ud83d\udc1d\ud83d\udc1d\ud83d\udc1d"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://repobeats.axiom.co/api/embed/21f36049093d5eb8e5fdad18c3c5d8df5428ca30.svg",alt:"Activity",title:"Repobeats analytics image"})),(0,o.kt)("h2",{id:"install"},"Install"),(0,o.kt)("p",null,"FastKafka works on macOS, Linux, and most Unix-style operating systems.\nYou can install base version of ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka")," with ",(0,o.kt)("inlineCode",{parentName:"p"},"pip")," as usual:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka\n")),(0,o.kt)("p",null,"To install fastkafka with testing features please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[test]\n")),(0,o.kt)("p",null,"To install fastkafka with asyncapi docs please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[docs]\n")),(0,o.kt)("p",null,"To install fastkafka with all the features please use:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install fastkafka[test,docs]\n")),(0,o.kt)("h2",{id:"tutorial"},"Tutorial"),(0,o.kt)("p",null,"You can start an interactive tutorial in Google Colab by clicking the\nbutton below:"),(0,o.kt)("a",{href:"https://colab.research.google.com/github/airtai/fastkafka/blob/main/nbs/guides/Guide_00_FastKafka_Demo.ipynb",target:"_blank"},(0,o.kt)("img",{src:"https://colab.research.google.com/assets/colab-badge.svg",alt:"Open In Colab"})),(0,o.kt)("h2",{id:"writing-server-code"},"Writing server code"),(0,o.kt)("p",null,"Here is an example python script using FastKafka that takes data from a\nKafka topic, makes a prediction using a predictive model, and outputs\nthe prediction to another Kafka topic."),(0,o.kt)("h3",{id:"preparing-the-demo-model"},"Preparing the demo model"),(0,o.kt)("p",null,"First we will prepare our model using the Iris dataset so that we can\ndemonstrate the predictions using FastKafka. The following call\ndownloads the dataset and trains the model."),(0,o.kt)("p",null,"We will wrap the model creation into a lifespan of our app so that the\nmodel is created just before the app is started."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n')),(0,o.kt)("h3",{id:"messages"},"Messages"),(0,o.kt)("p",null,"FastKafka uses ",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"Pydantic")," to parse input\nJSON-encoded data into Python objects, making it easy to work with\nstructured data in your Kafka-based applications. Pydantic\u2019s\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/usage/models/"},(0,o.kt)("inlineCode",{parentName:"a"},"BaseModel"))," class allows you\nto define messages using a declarative syntax, making it easy to specify\nthe fields and types of your messages."),(0,o.kt)("p",null,"This example defines two message classes for use in a FastKafka\napplication:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," class is used to represent input data for a\npredictive model. It has four fields of type\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/latest/api/types/#pydantic.types.NonNegativeFloat"},(0,o.kt)("inlineCode",{parentName:"a"},"NonNegativeFloat")),",\nwhich is a subclass of float that only allows non-negative floating\npoint values.")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," class is used to represent the output of the\npredictive model. It has a single field ",(0,o.kt)("inlineCode",{parentName:"p"},"species")," of type string\nrepresenting the predicted species."))),(0,o.kt)("p",null,"These message classes will be used to parse and validate incoming data\nin Kafka consumers and producers."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from pydantic import BaseModel, Field, NonNegativeFloat\n\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n')),(0,o.kt)("h3",{id:"application"},"Application"),(0,o.kt)("p",null,"This example shows how to initialize a FastKafka application."),(0,o.kt)("p",null,"It starts by defining a dictionary called ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers"),", which\ncontains two entries: ",(0,o.kt)("inlineCode",{parentName:"p"},'"localhost"')," and ",(0,o.kt)("inlineCode",{parentName:"p"},'"production"'),", specifying local\ndevelopment and production Kafka brokers. Each entry specifies the URL,\nport, and other details of a Kafka broker. This dictionary is used for\nboth generating the documentation and later to run the actual server\nagainst one of the given kafka broker."),(0,o.kt)("p",null,"Next, an object of the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,o.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nclass is initialized with the minimum set of arguments:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"kafka_brokers"),": a dictionary used for generation of documentation")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n')),(0,o.kt)("h3",{id:"function-decorators"},"Function decorators"),(0,o.kt)("p",null,"FastKafka provides convenient function decorators ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes"),"\nand ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," to allow you to delegate the actual process of"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"consuming and producing data to Kafka, and")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"decoding and encoding JSON encode messages"))),(0,o.kt)("p",null,"from user defined functions to the framework. The FastKafka framework\ndelegates these jobs to AIOKafka and Pydantic libraries."),(0,o.kt)("p",null,"These decorators make it easy to specify the processing logic for your\nKafka consumers and producers, allowing you to focus on the core\nbusiness logic of your application without worrying about the underlying\nKafka integration."),(0,o.kt)("p",null,"This following example shows how to use the ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," and\n",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.produces")," decorators in a FastKafka application:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@kafka_app.consumes")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),'\nfunction, which specifies that this function should be called whenever\na message is received on the \u201cinput_data" Kafka topic. The\n',(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data")," function takes a single argument which is expected to\nbe an instance of the ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData")," message class. Specifying the\ntype of the single argument is instructing the Pydantic to use\n",(0,o.kt)("inlineCode",{parentName:"p"},"IrisInputData.parse_raw()")," on the consumed message before passing it\nto the user defined function ",(0,o.kt)("inlineCode",{parentName:"p"},"on_input_data"),".")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"@produces")," decorator is applied to the ",(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),' function,\nwhich specifies that this function should produce a message to the\n\u201cpredictions" Kafka topic whenever it is called. The ',(0,o.kt)("inlineCode",{parentName:"p"},"to_predictions"),"\nfunction takes a single integer argument ",(0,o.kt)("inlineCode",{parentName:"p"},"species_class")," representing\none of three possible strign values predicted by the mdoel. It creates\na new ",(0,o.kt)("inlineCode",{parentName:"p"},"IrisPrediction")," message using this value and then returns it.\nThe framework will call the ",(0,o.kt)("inlineCode",{parentName:"p"},'IrisPrediction.json().encode("utf-8")'),"\nfunction on the returned value and produce it to the specified topic."))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("h2",{id:"testing-the-service"},"Testing the service"),(0,o.kt)("p",null,"The service can be tested using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/testing/Tester/#fastkafka.testing.Tester"},(0,o.kt)("inlineCode",{parentName:"a"},"Tester")),"\ninstances which internally starts InMemory implementation of Kafka\nbroker."),(0,o.kt)("p",null,"The Tester will redirect your consumes and produces decorated functions\nto the InMemory Kafka broker so that you can quickly test your app\nwithout the need for a running Kafka broker and all its dependencies."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka.testing import Tester\n\nmsg = IrisInputData(\n sepal_length=0.1,\n sepal_width=0.2,\n petal_length=0.3,\n petal_width=0.4,\n)\n\n# Start Tester app and create InMemory Kafka broker for testing\nasync with Tester(kafka_app) as tester:\n # Send IrisInputData message to input_data topic\n await tester.to_input_data(msg)\n\n # Assert that the kafka_app responded with IrisPrediction in predictions topic\n await tester.awaited_mocks.on_predictions.assert_awaited_with(\n IrisPrediction(species="setosa"), timeout=2\n )\n')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called\n[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping\n")),(0,o.kt)("h3",{id:"recap"},"Recap"),(0,o.kt)("p",null,"We have created a Iris classification model and encapulated it into our\nfastkafka application. The app will consume the IrisInputData from the\n",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic and produce the predictions to ",(0,o.kt)("inlineCode",{parentName:"p"},"predictions")," topic."),(0,o.kt)("p",null,"To test the app we have:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Created the app")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Started our Tester class which mirrors the developed app topics for\ntesting purposes")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Sent IrisInputData message to ",(0,o.kt)("inlineCode",{parentName:"p"},"input_data")," topic")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Asserted and checked that the developed iris classification service\nhas reacted to IrisInputData message"))),(0,o.kt)("h2",{id:"running-the-service"},"Running the service"),(0,o.kt)("p",null,"The service can be started using builtin faskafka run CLI command.\nBefore we can do that, we will concatenate the code snippets from above\nand save them in a file ",(0,o.kt)("inlineCode",{parentName:"p"},'"application.py"')),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n \nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\ndef to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,o.kt)("p",null,"To run the service, you will need a running Kafka broker on localhost as\nspecified in the ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")," parameter above. We can start the Kafka\nbroker locally using the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker/#fastkafka.testing.ApacheKafkaBroker"},(0,o.kt)("inlineCode",{parentName:"a"},"ApacheKafkaBroker")),"."),(0,o.kt)("p",null,"To use\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker/#fastkafka.testing.ApacheKafkaBroker"},(0,o.kt)("inlineCode",{parentName:"a"},"ApacheKafkaBroker")),",\nyou need to install JRE and Kafka to your environment. To simplify this\nprocess, fastkafka comes with a CLI command that does just that, to run\nit, in your terminal execute the following:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka testing install_deps\n")),(0,o.kt)("p",null,"Now we can run\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker/#fastkafka.testing.ApacheKafkaBroker"},(0,o.kt)("inlineCode",{parentName:"a"},"ApacheKafkaBroker")),"\nthat will start a Kafka broker instance for us."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka.testing import ApacheKafkaBroker\n\nbroker = ApacheKafkaBroker(apply_nest_asyncio=True)\n\nbroker.start()\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!\n[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()\n[INFO] fastkafka._components.test_dependencies: Java is already installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._components.test_dependencies: Kafka is installed.\n[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...\n[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...\n[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.\n\n'127.0.0.1:9092'\n")),(0,o.kt)("p",null,"Then, we start the FastKafka service by running the following command in\nthe folder where the ",(0,o.kt)("inlineCode",{parentName:"p"},"application.py")," file is located:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app\n")),(0,o.kt)("p",null,"In the above command, we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--num-workers")," option to specify how many\nworkers to launch and we use ",(0,o.kt)("inlineCode",{parentName:"p"},"--kafka-broker")," option to specify which\nkafka broker configuration to use from earlier specified ",(0,o.kt)("inlineCode",{parentName:"p"},"kafka_brokers")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[801767]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[801765]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[801767]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[801765]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}\n[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[801767]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[801767]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[801765]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[801765]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[801765]: [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata\n[801765]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[801767]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization\n[801767]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}. \n[801767]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[801765]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)\n[801767]: [ERROR] aiokafka: Unable to update metadata from [0]\n[801765]: [ERROR] aiokafka: Unable to update metadata from [0]\n^C\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 801765...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 801767...\n[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...\n[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n")),(0,o.kt)("p",null,"You need to interupt running of the cell above by selecting\n",(0,o.kt)("inlineCode",{parentName:"p"},"Runtime->Interupt execution")," on the toolbar above."),(0,o.kt)("p",null,"Finally, we can stop the local Kafka Broker:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"broker.stop()\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 801303...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 801303 was already terminated.\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 800930...\n[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 800930 was already terminated.\n[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.\n")),(0,o.kt)("h2",{id:"documentation"},"Documentation"),(0,o.kt)("p",null,"The kafka app comes with builtin documentation generation using\n",(0,o.kt)("a",{parentName:"p",href:"https://www.asyncapi.com/tools/generator"},"AsyncApi HTML generator"),"."),(0,o.kt)("p",null,"AsyncApi requires Node.js to be installed and we provide the following\nconvenience command line for it:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs install_deps\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed\n")),(0,o.kt)("p",null,"To generate the documentation programatically you just need to call the\nfolloving command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs generate application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/asyncapi/docs.\n")),(0,o.kt)("p",null,". This will generate the ",(0,o.kt)("em",{parentName:"p"},"asyncapi")," folder in relative path where all\nyour documentation will be saved. You can check out the content of it\nwith:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"ls -l asyncapi\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"total 8\ndrwxrwxr-x 4 kumaran kumaran 4096 Mar 21 09:14 docs\ndrwxrwxr-x 2 kumaran kumaran 4096 Mar 21 09:14 spec\n")),(0,o.kt)("p",null,"In docs folder you will find the servable static html file of your\ndocumentation. This can also be served using our ",(0,o.kt)("inlineCode",{parentName:"p"},"fastkafka docs serve"),"\nCLI command (more on that in our guides)."),(0,o.kt)("p",null,"In spec folder you will find a asyncapi.yml file containing the async\nAPI specification of your application."),(0,o.kt)("p",null,"We can locally preview the generated documentation by running the\nfollowing command:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fastkafka docs serve application:kafka_app\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/asyncapi/spec/asyncapi.yml'\n[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'\n[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'\n\nDone! \u2728\nCheck out your shiny new generated files at /work/fastkafka/nbs/asyncapi/docs.\n\n\nServing documentation on http://127.0.0.1:8000\n^C\nInterupting serving of documentation and cleaning up...\n")),(0,o.kt)("p",null,"From the parameters passed to the application constructor, we get the\ndocumentation bellow:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n)\n')),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-servers.png",alt:"Kafka_servers"})),(0,o.kt)("p",null,"The following documentation snippet are for the consumer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-consumer.png",alt:"Kafka_consumer"})),(0,o.kt)("p",null,"The following documentation snippet are for the producer as specified in\nthe code above:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-producer.png",alt:"Kafka_producer"})),(0,o.kt)("p",null,"Finally, all messages as defined as subclasses of ",(0,o.kt)("em",{parentName:"p"},"BaseModel")," are\ndocumented as well:"),(0,o.kt)("p",null,(0,o.kt)("img",{parentName:"p",src:"https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png",alt:"Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)"})),(0,o.kt)("h2",{id:"license"},"License"),(0,o.kt)("p",null,"FastKafka is licensed under the Apache License 2.0"),(0,o.kt)("p",null,"A permissive license whose main conditions require preservation of\ncopyright and license notices. Contributors provide an express grant of\npatent rights. Licensed works, modifications, and larger works may be\ndistributed under different terms and without source code."),(0,o.kt)("p",null,"The full text of the license can be found\n",(0,o.kt)("a",{parentName:"p",href:"https://raw.githubusercontent.com/airtai/fastkafka/main/LICENSE"},"here"),"."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/f2aaa4e5.df8937bd.js b/assets/js/f2aaa4e5.df8937bd.js new file mode 100644 index 0000000..db0b9ea --- /dev/null +++ b/assets/js/f2aaa4e5.df8937bd.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8861],{3905:(e,t,a)=>{a.d(t,{Zo:()=>s,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function l(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function i(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),f=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},s=function(e){var t=f(e.components);return n.createElement(p.Provider,{value:t},e.children)},k="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,s=i(e,["components","mdxType","originalType","parentName"]),k=f(a),u=r,d=k["".concat(p,".").concat(u)]||k[u]||c[u]||o;return a?n.createElement(d,l(l({ref:t},s),{},{components:a})):n.createElement(d,l({ref:t},s))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,l=new Array(o);l[0]=u;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[k]="string"==typeof e?e:r,l[1]=i;for(var f=2;f<o;f++)l[f]=a[f];return n.createElement.apply(null,l)}return n.createElement.apply(null,a)}u.displayName="MDXCreateElement"},3123:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>c,frontMatter:()=>o,metadata:()=>i,toc:()=>f});var n=a(7462),r=(a(7294),a(3905));const o={},l=void 0,i={unversionedId:"api/fastkafka/KafkaEvent",id:"version-0.8.0/api/fastkafka/KafkaEvent",title:"KafkaEvent",description:"fastkafka.KafkaEvent {fastkafka.KafkaEvent}",source:"@site/versioned_docs/version-0.8.0/api/fastkafka/KafkaEvent.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/KafkaEvent",permalink:"/docs/api/fastkafka/KafkaEvent",draft:!1,tags:[],version:"0.8.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"FastKafka",permalink:"/docs/api/fastkafka/"},next:{title:"AvroBase",permalink:"/docs/api/fastkafka/encoder/AvroBase"}},p={},f=[{value:"fastkafka.KafkaEvent",id:"fastkafka.KafkaEvent",level:2}],s={toc:f},k="wrapper";function c(e){let{components:t,...a}=e;return(0,r.kt)(k,(0,n.Z)({},s,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.KafkaEvent"},"fastkafka.KafkaEvent"),(0,r.kt)("a",{href:"https://github.com/airtai/fastkafka/blob/0.8.0/fastkafka/_components/producer_decorator.py#L36-L46",class:"link-to-source",target:"_blank"},"View source"),(0,r.kt)("p",null,"A generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Type"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"),(0,r.kt)("th",{parentName:"tr",align:null},"Default"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"message")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"BaseSubmodel")),(0,r.kt)("td",{parentName:"tr",align:null},"The message contained in the Kafka event, can be of type pydantic.BaseModel."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("em",{parentName:"td"},"required"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"key")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"Optional[bytes]")),(0,r.kt)("td",{parentName:"tr",align:null},"The optional key used to identify the Kafka event."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("inlineCode",{parentName:"td"},"None"))))))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/f35e2aba.396a2a4d.js b/assets/js/f35e2aba.396a2a4d.js new file mode 100644 index 0000000..b02f875 --- /dev/null +++ b/assets/js/f35e2aba.396a2a4d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3690],{3905:(e,r,t)=>{t.d(r,{Zo:()=>s,kt:()=>k});var n=t(7294);function a(e,r,t){return r in e?Object.defineProperty(e,r,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[r]=t,e}function o(e,r){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);r&&(n=n.filter((function(r){return Object.getOwnPropertyDescriptor(e,r).enumerable}))),t.push.apply(t,n)}return t}function c(e){for(var r=1;r<arguments.length;r++){var t=null!=arguments[r]?arguments[r]:{};r%2?o(Object(t),!0).forEach((function(r){a(e,r,t[r])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):o(Object(t)).forEach((function(r){Object.defineProperty(e,r,Object.getOwnPropertyDescriptor(t,r))}))}return e}function i(e,r){if(null==e)return{};var t,n,a=function(e,r){if(null==e)return{};var t,n,a={},o=Object.keys(e);for(n=0;n<o.length;n++)t=o[n],r.indexOf(t)>=0||(a[t]=e[t]);return a}(e,r);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)t=o[n],r.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var d=n.createContext({}),l=function(e){var r=n.useContext(d),t=r;return e&&(t="function"==typeof e?e(r):c(c({},r),e)),t},s=function(e){var r=l(e.components);return n.createElement(d.Provider,{value:r},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var r=e.children;return n.createElement(n.Fragment,{},r)}},u=n.forwardRef((function(e,r){var t=e.components,a=e.mdxType,o=e.originalType,d=e.parentName,s=i(e,["components","mdxType","originalType","parentName"]),p=l(t),u=a,k=p["".concat(d,".").concat(u)]||p[u]||f[u]||o;return t?n.createElement(k,c(c({ref:r},s),{},{components:t})):n.createElement(k,c({ref:r},s))}));function k(e,r){var t=arguments,a=r&&r.mdxType;if("string"==typeof e||a){var o=t.length,c=new Array(o);c[0]=u;var i={};for(var d in r)hasOwnProperty.call(r,d)&&(i[d]=r[d]);i.originalType=e,i[p]="string"==typeof e?e:a,c[1]=i;for(var l=2;l<o;l++)c[l]=t[l];return n.createElement.apply(null,c)}return n.createElement.apply(null,t)}u.displayName="MDXCreateElement"},4128:(e,r,t)=>{t.r(r),t.d(r,{assets:()=>d,contentTitle:()=>c,default:()=>f,frontMatter:()=>o,metadata:()=>i,toc:()=>l});var n=t(7462),a=(t(7294),t(3905));const o={},c=void 0,i={unversionedId:"api/fastkafka/encoder/avro_encoder",id:"version-0.7.1/api/fastkafka/encoder/avro_encoder",title:"avro_encoder",description:"fastkafka.encoder.avroencoder {fastkafka.encoder.avroencoder}",source:"@site/versioned_docs/version-0.7.1/api/fastkafka/encoder/avro_encoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/avro_encoder",permalink:"/docs/0.7.1/api/fastkafka/encoder/avro_encoder",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avro_decoder",permalink:"/docs/0.7.1/api/fastkafka/encoder/avro_decoder"},next:{title:"avsc_to_pydantic",permalink:"/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic"}},d={},l=[{value:"<code>fastkafka.encoder.avro_encoder</code>",id:"fastkafka.encoder.avro_encoder",level:2},{value:"<code>avro_encoder</code>",id:"avro_encoder",level:3}],s={toc:l},p="wrapper";function f(e){let{components:r,...t}=e;return(0,a.kt)(p,(0,n.Z)({},s,t,{components:r,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"fastkafka.encoder.avro_encoder"},(0,a.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.avro_encoder")),(0,a.kt)("h3",{id:"avro_encoder"},(0,a.kt)("inlineCode",{parentName:"h3"},"avro_encoder")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def avro_encoder(msg: pydantic.main.BaseModel) -> bytes")),(0,a.kt)("p",null,"Encoder to encode pydantic instances to avro message"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"msg"),": An instance of pydantic basemodel")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Returns"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"A bytes message which is encoded from pydantic basemodel")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/f39642a1.dcbef318.js b/assets/js/f39642a1.dcbef318.js new file mode 100644 index 0000000..58d82bc --- /dev/null +++ b/assets/js/f39642a1.dcbef318.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9107],{3905:(e,t,n)=>{n.d(t,{Zo:()=>l,kt:()=>k});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function c(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){a(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function s(e,t){if(null==e)return{};var n,r,a=function(e,t){if(null==e)return{};var n,r,a={},o=Object.keys(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r<o.length;r++)n=o[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=r.createContext({}),d=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):c(c({},t),e)),n},l=function(e){var t=d(e.components);return r.createElement(i.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},u=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,i=e.parentName,l=s(e,["components","mdxType","originalType","parentName"]),p=d(n),u=a,k=p["".concat(i,".").concat(u)]||p[u]||f[u]||o;return n?r.createElement(k,c(c({ref:t},l),{},{components:n})):r.createElement(k,c({ref:t},l))}));function k(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,c=new Array(o);c[0]=u;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[p]="string"==typeof e?e:a,c[1]=s;for(var d=2;d<o;d++)c[d]=n[d];return r.createElement.apply(null,c)}return r.createElement.apply(null,n)}u.displayName="MDXCreateElement"},795:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>c,default:()=>f,frontMatter:()=>o,metadata:()=>s,toc:()=>d});var r=n(7462),a=(n(7294),n(3905));const o={},c=void 0,s={unversionedId:"api/fastkafka/encoder/json_decoder",id:"version-0.6.0/api/fastkafka/encoder/json_decoder",title:"json_decoder",description:"fastkafka.encoder.jsondecoder {fastkafka.encoder.jsondecoder}",source:"@site/versioned_docs/version-0.6.0/api/fastkafka/encoder/json_decoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/json_decoder",permalink:"/docs/0.6.0/api/fastkafka/encoder/json_decoder",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"avsc_to_pydantic",permalink:"/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic"},next:{title:"json_encoder",permalink:"/docs/0.6.0/api/fastkafka/encoder/json_encoder"}},i={},d=[{value:"<code>fastkafka.encoder.json_decoder</code>",id:"fastkafka.encoder.json_decoder",level:2},{value:"<code>json_decoder</code>",id:"json_decoder",level:3}],l={toc:d},p="wrapper";function f(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,r.Z)({},l,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"fastkafka.encoder.json_decoder"},(0,a.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.json_decoder")),(0,a.kt)("h3",{id:"json_decoder"},(0,a.kt)("inlineCode",{parentName:"h3"},"json_decoder")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"def json_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any")),(0,a.kt)("p",null,"Decoder to decode json string in bytes to pydantic model instance"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"raw_msg"),": Bytes message received from Kafka topic"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"cls"),": Pydantic class; This pydantic class will be used to construct instance of same class")),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Returns"),":"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"An instance of given pydantic class")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/f7e229b3.1f12b6b5.js b/assets/js/f7e229b3.1f12b6b5.js new file mode 100644 index 0000000..7032f09 --- /dev/null +++ b/assets/js/f7e229b3.1f12b6b5.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3331],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>k});var o=t(7294);function n(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function s(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);a&&(o=o.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,o)}return t}function r(e){for(var a=1;a<arguments.length;a++){var t=null!=arguments[a]?arguments[a]:{};a%2?s(Object(t),!0).forEach((function(a){n(e,a,t[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):s(Object(t)).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(t,a))}))}return e}function l(e,a){if(null==e)return{};var t,o,n=function(e,a){if(null==e)return{};var t,o,n={},s=Object.keys(e);for(o=0;o<s.length;o++)t=s[o],a.indexOf(t)>=0||(n[t]=e[t]);return n}(e,a);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(o=0;o<s.length;o++)t=s[o],a.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(n[t]=e[t])}return n}var i=o.createContext({}),p=function(e){var a=o.useContext(i),t=a;return e&&(t="function"==typeof e?e(a):r(r({},a),e)),t},c=function(e){var a=p(e.components);return o.createElement(i.Provider,{value:a},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var a=e.children;return o.createElement(o.Fragment,{},a)}},d=o.forwardRef((function(e,a){var t=e.components,n=e.mdxType,s=e.originalType,i=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=p(t),d=n,k=u["".concat(i,".").concat(d)]||u[d]||m[d]||s;return t?o.createElement(k,r(r({ref:a},c),{},{components:t})):o.createElement(k,r({ref:a},c))}));function k(e,a){var t=arguments,n=a&&a.mdxType;if("string"==typeof e||n){var s=t.length,r=new Array(s);r[0]=d;var l={};for(var i in a)hasOwnProperty.call(a,i)&&(l[i]=a[i]);l.originalType=e,l[u]="string"==typeof e?e:n,r[1]=l;for(var p=2;p<s;p++)r[p]=t[p];return o.createElement.apply(null,r)}return o.createElement.apply(null,t)}d.displayName="MDXCreateElement"},4494:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>i,contentTitle:()=>r,default:()=>m,frontMatter:()=>s,metadata:()=>l,toc:()=>p});var o=t(7462),n=(t(7294),t(3905));const s={},r="@consumes basics",l={unversionedId:"guides/Guide_11_Consumes_Basics",id:"version-0.6.0/guides/Guide_11_Consumes_Basics",title:"@consumes basics",description:"You can use @consumes decorator to consume messages from Kafka topics.",source:"@site/versioned_docs/version-0.6.0/guides/Guide_11_Consumes_Basics.md",sourceDirName:"guides",slug:"/guides/Guide_11_Consumes_Basics",permalink:"/docs/0.6.0/guides/Guide_11_Consumes_Basics",draft:!1,tags:[],version:"0.6.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"FastKafka",permalink:"/docs/0.6.0/"},next:{title:"@produces basics",permalink:"/docs/0.6.0/guides/Guide_21_Produces_Basics"}},i={},p=[{value:"Import <code>FastKafka</code>",id:"import-fastkafka",level:2},{value:"Define the structure of the messages",id:"define-the-structure-of-the-messages",level:2},{value:"Create a base FastKafka app",id:"create-a-base-fastkafka-app",level:2},{value:"Create a consumer function and decorate it with <code>@consumes</code>",id:"create-a-consumer-function-and-decorate-it-with-consumes",level:2},{value:"Final app",id:"final-app",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Send the message to kafka topic",id:"send-the-message-to-kafka-topic",level:2},{value:"Choosing a topic",id:"choosing-a-topic",level:2},{value:"Message data",id:"message-data",level:2},{value:"Message metadata",id:"message-metadata",level:2},{value:"Create a consumer function with metadata",id:"create-a-consumer-function-with-metadata",level:3},{value:"Dealing with high latency consuming functions",id:"dealing-with-high-latency-consuming-functions",level:2}],c={toc:p},u="wrapper";function m(e){let{components:a,...t}=e;return(0,n.kt)(u,(0,o.Z)({},c,t,{components:a,mdxType:"MDXLayout"}),(0,n.kt)("h1",{id:"consumes-basics"},"@consumes basics"),(0,n.kt)("p",null,"You can use ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator to consume messages from Kafka topics."),(0,n.kt)("p",null,"In this guide we will create a simple FastKafka app that will consume\n",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages from hello_world topic."),(0,n.kt)("h2",{id:"import-fastkafka"},"Import ",(0,n.kt)("a",{parentName:"h2",href:"/docs/0.6.0/api/fastkafka//#fastkafka.FastKafka"},(0,n.kt)("inlineCode",{parentName:"a"},"FastKafka"))),(0,n.kt)("p",null,"To use the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator, first we need to import the base\nFastKafka app to create our application."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka import FastKafka\n")),(0,n.kt)("p",null,"In this demo we will log the messages to the output so that we can\ninspect and verify that our app is consuming properly. For that we need\nto import the logger."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},"from fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n")),(0,n.kt)("h2",{id:"define-the-structure-of-the-messages"},"Define the structure of the messages"),(0,n.kt)("p",null,"Next, you need to define the structure of the messages you want to\nconsume from the topic using ",(0,n.kt)("a",{parentName:"p",href:"https://docs.pydantic.dev/"},"pydantic"),". For\nthe guide we\u2019ll stick to something basic, but you are free to define any\ncomplex message structure you wish in your project, just make sure it\ncan be JSON encoded."),(0,n.kt)("p",null,"Let\u2019s import ",(0,n.kt)("inlineCode",{parentName:"p"},"BaseModel")," and ",(0,n.kt)("inlineCode",{parentName:"p"},"Field")," from pydantic and create a simple\n",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class containing one string parameter ",(0,n.kt)("inlineCode",{parentName:"p"},"msg")),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},"from pydantic import BaseModel, Field\n")),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n')),(0,n.kt)("h2",{id:"create-a-base-fastkafka-app"},"Create a base FastKafka app"),(0,n.kt)("p",null,"Now we will create and define a base FastKafka app, replace the\n",(0,n.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,n.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values of your\nKafka bootstrap server"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n')),(0,n.kt)("h2",{id:"create-a-consumer-function-and-decorate-it-with-consumes"},"Create a consumer function and decorate it with ",(0,n.kt)("inlineCode",{parentName:"h2"},"@consumes")),(0,n.kt)("p",null,"Let\u2019s create a consumer function that will consume ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," messages\nfrom ",(0,n.kt)("em",{parentName:"p"},"hello_world")," topic and log them."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("p",null,"The function decorated with the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will be called\nwhen a message is produced to Kafka."),(0,n.kt)("p",null,"The message will then be injected into the typed ",(0,n.kt)("em",{parentName:"p"},"msg")," argument of the\nfunction and its type will be used to parse the message."),(0,n.kt)("p",null,"In this example case, when the message is sent into a ",(0,n.kt)("em",{parentName:"p"},"hello_world"),"\ntopic, it will be parsed into a HelloWorld class and ",(0,n.kt)("inlineCode",{parentName:"p"},"on_hello_world"),"\nfunction will be called with the parsed class as ",(0,n.kt)("em",{parentName:"p"},"msg")," argument value."),(0,n.kt)("h2",{id:"final-app"},"Final app"),(0,n.kt)("p",null,"Your app code should look like this:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'from fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\nfrom fastkafka._components.logger import get_logger\n\nlogger = get_logger(__name__)\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("h2",{id:"run-the-app"},"Run the app"),(0,n.kt)("p",null,"Now we can run the app. Copy the code above in consumer_example.py and\nrun it by running"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app\n")),(0,n.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre"},"[513863]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[513863]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n[513863]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[513863]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 513863...\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 513863 terminated.\n")),(0,n.kt)("h2",{id:"send-the-message-to-kafka-topic"},"Send the message to kafka topic"),(0,n.kt)("p",null,"Lets send a ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message to the ",(0,n.kt)("em",{parentName:"p"},"hello_world")," topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-shell"},'echo {\\"msg\\": \\"Hello world\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n')),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'print(consumer_task.value[1].decode("UTF-8"))\n')),(0,n.kt)("p",null,"You should see the \u201cGot msg: msg='Hello world'\" being logged by your\nconsumer."),(0,n.kt)("h2",{id:"choosing-a-topic"},"Choosing a topic"),(0,n.kt)("p",null,"You probably noticed that you didn\u2019t define which topic you are\nreceiving the message from, this is because the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator\ndetermines the topic by default from your function name. The decorator\nwill take your function name and strip the default \u201con","_",'" prefix from it\nand use the rest as the topic name. In this example case, the topic is\n',(0,n.kt)("em",{parentName:"p"},"hello_world"),"."),(0,n.kt)("p",null,"You can choose your custom prefix by defining the ",(0,n.kt)("inlineCode",{parentName:"p"},"prefix")," parameter in\nconsumes decorator, like this:"),(0,n.kt)("p",null,"Also, you can define the topic name completely by defining the ",(0,n.kt)("inlineCode",{parentName:"p"},"topic"),"\nin parameter in consumes decorator, like this:"),(0,n.kt)("h2",{id:"message-data"},"Message data"),(0,n.kt)("p",null,"The message received from kafka is translated from binary JSON\nrepresentation int the class defined by typing of ",(0,n.kt)("em",{parentName:"p"},"msg")," parameter in the\nfunction decorated by the ",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator."),(0,n.kt)("p",null,"In this example case, the message will be parsed into a ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld"),"\nclass."),(0,n.kt)("h2",{id:"message-metadata"},"Message metadata"),(0,n.kt)("p",null,"If you need any of Kafka message metadata such as timestamp, partition\nor headers you can access the metadata by adding a EventMetadata typed\nargument to your consumes function and the metadata from the incoming\nmessage will be automatically injected when calling the consumes\nfunction."),(0,n.kt)("p",null,"Let\u2019s demonstrate that."),(0,n.kt)("h3",{id:"create-a-consumer-function-with-metadata"},"Create a consumer function with metadata"),(0,n.kt)("p",null,"The only difference from the original basic consume function is that we\nare now passing the ",(0,n.kt)("inlineCode",{parentName:"p"},"meta: EventMetadata")," argument to the function. The\n",(0,n.kt)("inlineCode",{parentName:"p"},"@consumes")," decorator will register that and, when a message is\nconsumed, it will also pass the metadata to your function. Now you can\nuse the metadata in your consume function. Lets log it to see what it\ncontains."),(0,n.kt)("p",null,"First, we need to import the EventMetadata"),(0,n.kt)("p",null,"Now we can add the ",(0,n.kt)("inlineCode",{parentName:"p"},"meta")," argument to our consuming function."),(0,n.kt)("p",null,"Your final app should look like this:"),(0,n.kt)("p",null,"Now lets run the app and send a message to the broker to see the logged\nmessage metadata."),(0,n.kt)("p",null,"You should see a similar log as the one below and the metadata being\nlogged in your app."),(0,n.kt)("p",null,"As you can see in the log, from the metadata you now have the\ninformation about the partition, offset, timestamp, key and headers.\n\ud83c\udf89"),(0,n.kt)("h2",{id:"dealing-with-high-latency-consuming-functions"},"Dealing with high latency consuming functions"),(0,n.kt)("p",null,"If your functions have high latency due to, for example, lengthy\ndatabase calls you will notice a big decrease in performance. This is\ndue to the issue of how the consumes decorator executes your consume\nfunctions when consumeing events. By default, the consume function will\nrun the consuming funtions for one topic sequentially, this is the most\nstraightforward approach and results with the least amount of overhead."),(0,n.kt)("p",null,"But, to handle those high latency tasks and run them in parallel,\nFastKafka has a\n",(0,n.kt)("a",{parentName:"p",href:"/docs/0.6.0/api/fastkafka/executors/DynamicTaskExecutor/#fastkafka.executors.DynamicTaskExecutor"},(0,n.kt)("inlineCode",{parentName:"a"},"DynamicTaskExecutor")),"\nprepared for your consumers. This executor comes with additional\noverhead, so use it only when you need to handle high latency functions."),(0,n.kt)("p",null,"Lets demonstrate how to use it."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'decorate_consumes_executor = """@app.consumes(executor="DynamicTaskExecutor")\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n"""\nmd(f"```python\\n{decorate_consumes}\\n```")\n')),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'@app.consumes()\nasync def on_hello_world(msg: HelloWorld):\n logger.info(f"Got msg: {msg}")\n')),(0,n.kt)("p",null,"Lets send a ",(0,n.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message to the ",(0,n.kt)("em",{parentName:"p"},"hello_world")," topic and check if\nour consumer kafka application has logged the received message. In your\nterminal, run:"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-shell"},'echo {\\"msg\\": \\"Hello world\\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>\n')),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-python"},'print(consumer_task.value[1].decode("UTF-8"))\n')),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre"},"[6814]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:50361'\n[6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:50361', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}\n[6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[6814]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})\n[6814]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}\n[6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[6814]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}. \n[6814]: ConsumerRecord(topic='hello_world', partition=0, offset=0, timestamp=1683803949271, timestamp_type=0, key=None, value=b'{\"msg\": \"Hello world\"}', checksum=None, serialized_key_size=-1, serialized_value_size=22, headers=())\n[6814]: [INFO] consumer_example: Got msg: msg='Hello world'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 6814...\n[6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.\n[6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 6814 terminated.\n")),(0,n.kt)("p",null,"You should see the \u201cGot msg: msg='Hello world'\" being logged by your\nconsumer."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/f8edae29.e9acd7b2.js b/assets/js/f8edae29.e9acd7b2.js new file mode 100644 index 0000000..d0376e4 --- /dev/null +++ b/assets/js/f8edae29.e9acd7b2.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[7710],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function s(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function i(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var l=n.createContext({}),p=function(e){var t=n.useContext(l),a=t;return e&&(a="function"==typeof e?e(t):s(s({},t),e)),a},c=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,l=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),d=p(a),k=r,f=d["".concat(l,".").concat(k)]||d[k]||u[k]||o;return a?n.createElement(f,s(s({ref:t},c),{},{components:a})):n.createElement(f,s({ref:t},c))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,s=new Array(o);s[0]=k;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[d]="string"==typeof e?e:r,s[1]=i;for(var p=2;p<o;p++)s[p]=a[p];return n.createElement.apply(null,s)}return n.createElement.apply(null,a)}k.displayName="MDXCreateElement"},4586:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>u,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={},s="Batch producing",i={unversionedId:"guides/Guide_23_Batch_Producing",id:"guides/Guide_23_Batch_Producing",title:"Batch producing",description:"If you want to send your data in batches @produces decorator makes",source:"@site/docs/guides/Guide_23_Batch_Producing.md",sourceDirName:"guides",slug:"/guides/Guide_23_Batch_Producing",permalink:"/docs/next/guides/Guide_23_Batch_Producing",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Defining a partition key",permalink:"/docs/next/guides/Guide_22_Partition_Keys"},next:{title:"Lifespan Events",permalink:"/docs/next/guides/Guide_05_Lifespan_Handler"}},l={},p=[{value:"Return a batch from the producing function",id:"return-a-batch-from-the-producing-function",level:2},{value:"App example",id:"app-example",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the batch was sent to the Kafka topic with the defined key",id:"check-if-the-batch-was-sent-to-the-kafka-topic-with-the-defined-key",level:2},{value:"Batch key",id:"batch-key",level:2},{value:"Check if the batch was sent to the Kafka topic",id:"check-if-the-batch-was-sent-to-the-kafka-topic",level:2}],c={toc:p},d="wrapper";function u(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"batch-producing"},"Batch producing"),(0,r.kt)("p",null,"If you want to send your data in batches ",(0,r.kt)("inlineCode",{parentName:"p"},"@produces")," decorator makes\nthat possible for you. By returning a ",(0,r.kt)("inlineCode",{parentName:"p"},"list")," of messages you want to\nsend in a batch the producer will collect the messages and send them in\na batch to a Kafka broker."),(0,r.kt)("p",null,"This guide will demonstrate how to use this feature."),(0,r.kt)("h2",{id:"return-a-batch-from-the-producing-function"},"Return a batch from the producing function"),(0,r.kt)("p",null,"To define a batch that you want to produce to Kafka topic, you need to\nreturn the ",(0,r.kt)("inlineCode",{parentName:"p"},"List")," of the messages that you want to be batched from your\nproducing function."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n return [HelloWorld(msg=msg) for msg in msgs]\n")),(0,r.kt)("p",null,"In the example, we want to return the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message class batch\nthat is created from a list of msgs we passed into our producing\nfunction."),(0,r.kt)("p",null,'Lets also prepare a backgound task that will send a batch of \u201chello\nworld" messages when the app starts.'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n')),(0,r.kt)("h2",{id:"app-example"},"App example"),(0,r.kt)("p",null,"We will modify the app example from ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_21_Produces_Basics"},"@producer\nbasics")," guide to return the\n",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," batch. The final app will look like this (make sure you\nreplace the ",(0,r.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,r.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values):"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n\nfrom typing import List\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> List[HelloWorld]:\n return [HelloWorld(msg=msg) for msg in msgs]\n')),(0,r.kt)("h2",{id:"run-the-app"},"Run the app"),(0,r.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n")),(0,r.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"[46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task\n[46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish\n[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.\n")),(0,r.kt)("h2",{id:"check-if-the-batch-was-sent-to-the-kafka-topic-with-the-defined-key"},"Check if the batch was sent to the Kafka topic with the defined key"),(0,r.kt)("p",null,'Lets check the topic and see if there are \u201cHello world" messages in the\nhello_world topic. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n")),(0,r.kt)("p",null,"You should see the batch of messages in your topic."),(0,r.kt)("h2",{id:"batch-key"},"Batch key"),(0,r.kt)("p",null,"To define a key for your batch like in ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_22_Partition_Keys"},"Defining a partition\nkey")," guide you can wrap the\nreturning value in a\n",(0,r.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/KafkaEvent#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass. To learn more about defining a partition ke and\n",(0,r.kt)("a",{parentName:"p",href:"/docs/next/api/fastkafka/KafkaEvent#fastkafka.KafkaEvent"},(0,r.kt)("inlineCode",{parentName:"a"},"KafkaEvent")),"\nclass, please, have a look at ",(0,r.kt)("a",{parentName:"p",href:"/docs/next/guides/Guide_22_Partition_Keys"},"Defining a partition\nkey")," guide."),(0,r.kt)("p",null,"Let\u2019s demonstrate that."),(0,r.kt)("p",null,"To define a key, we just need to modify our producing function, like\nthis:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")\n')),(0,r.kt)("p",null,"Now our app looks like this:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nimport asyncio\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\n@app.run_in_background()\nasync def prepare_and_send_hello_batch():\n msgs=[f"Hello world {i}" for i in range(10)]\n await to_hello_world(msgs)\n\nfrom typing import List\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:\n return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")\n')),(0,r.kt)("h2",{id:"check-if-the-batch-was-sent-to-the-kafka-topic"},"Check if the batch was sent to the Kafka topic"),(0,r.kt)("p",null,'Lets check the topic and see if there are \u201cHello world" messages in the\nhello_world topic, containing a defined key. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n")),(0,r.kt)("p",null,"You should see the batch of messages with the defined key in your topic."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/fb969bb3.8d0af030.js b/assets/js/fb969bb3.8d0af030.js new file mode 100644 index 0000000..6ac81df --- /dev/null +++ b/assets/js/fb969bb3.8d0af030.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9571],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>f});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?o(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):o(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function s(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},o=Object.keys(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n<o.length;n++)a=o[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var l=n.createContext({}),p=function(e){var t=n.useContext(l),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},u=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,l=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),c=p(a),k=r,f=c["".concat(l,".").concat(k)]||c[k]||d[k]||o;return a?n.createElement(f,i(i({ref:t},u),{},{components:a})):n.createElement(f,i({ref:t},u))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=k;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:r,i[1]=s;for(var p=2;p<o;p++)i[p]=a[p];return n.createElement.apply(null,i)}return n.createElement.apply(null,a)}k.displayName="MDXCreateElement"},3911:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={},i="Defining a partition key",s={unversionedId:"guides/Guide_22_Partition_Keys",id:"version-0.7.0/guides/Guide_22_Partition_Keys",title:"Defining a partition key",description:"Partition keys are used in Apache Kafka to determine which partition a",source:"@site/versioned_docs/version-0.7.0/guides/Guide_22_Partition_Keys.md",sourceDirName:"guides",slug:"/guides/Guide_22_Partition_Keys",permalink:"/docs/0.7.0/guides/Guide_22_Partition_Keys",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"@produces basics",permalink:"/docs/0.7.0/guides/Guide_21_Produces_Basics"},next:{title:"Batch producing",permalink:"/docs/0.7.0/guides/Guide_23_Batch_Producing"}},l={},p=[{value:"Return a key from the producing function",id:"return-a-key-from-the-producing-function",level:2},{value:"App example",id:"app-example",level:2},{value:"Run the app",id:"run-the-app",level:2},{value:"Check if the message was sent to the Kafka topic with the desired key",id:"check-if-the-message-was-sent-to-the-kafka-topic-with-the-desired-key",level:2}],u={toc:p},c="wrapper";function d(e){let{components:t,...a}=e;return(0,r.kt)(c,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"defining-a-partition-key"},"Defining a partition key"),(0,r.kt)("p",null,"Partition keys are used in Apache Kafka to determine which partition a\nmessage should be written to. This ensures that related messages are\nkept together in the same partition, which can be useful for ensuring\norder or for grouping related messages together for efficient\nprocessing. Additionally, partitioning data across multiple partitions\nallows Kafka to distribute load across multiple brokers and scale\nhorizontally, while replicating data across multiple brokers provides\nfault tolerance."),(0,r.kt)("p",null,"You can define your partition keys when using the ",(0,r.kt)("inlineCode",{parentName:"p"},"@produces")," decorator,\nthis guide will demonstrate to you this feature."),(0,r.kt)("h2",{id:"return-a-key-from-the-producing-function"},"Return a key from the producing function"),(0,r.kt)("p",null,"To define a key for the message that you want to produce to Kafka topic,\nyou need to wrap the response into ",(0,r.kt)("inlineCode",{parentName:"p"},"KafkaEvent")," class and set the key\nvalue. Check the example below:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")\n')),(0,r.kt)("p",null,"In the example, we want to return the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," message class with\nthe key defined as ",(0,r.kt)("em",{parentName:"p"},"my_key"),". So, we wrap the message and key into a\nKafkaEvent class and return it as such."),(0,r.kt)("p",null,"While generating the documentation, the ",(0,r.kt)("inlineCode",{parentName:"p"},"KafkaEvent")," class will be\nunwrapped and the ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," class will be documented in the\ndefinition of message type, same way if you didn\u2019t use the key."),(0,r.kt)("p",null,'!!! info "Which key to choose?"'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.\n")),(0,r.kt)("h2",{id:"app-example"},"App example"),(0,r.kt)("p",null,"We will modify the app example from ",(0,r.kt)("strong",{parentName:"p"},"@producer basics")," guide to return\nthe ",(0,r.kt)("inlineCode",{parentName:"p"},"HelloWorld")," with our key. The final app will look like this (make\nsure you replace the ",(0,r.kt)("inlineCode",{parentName:"p"},"<url_of_your_kafka_bootstrap_server>")," and\n",(0,r.kt)("inlineCode",{parentName:"p"},"<port_of_your_kafka_bootstrap_server>")," with the actual values):"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'\nfrom fastkafka import FastKafka\nfrom pydantic import BaseModel, Field\n\nclass HelloWorld(BaseModel):\n msg: str = Field(\n ...,\n example="Hello",\n description="Demo hello world message",\n )\n\nkafka_brokers = {\n "demo_broker": {\n "url": "<url_of_your_kafka_bootstrap_server>",\n "description": "local demo kafka broker",\n "port": "<port_of_your_kafka_bootstrap_server>",\n }\n}\n\napp = FastKafka(kafka_brokers=kafka_brokers)\n\nfrom fastkafka import KafkaEvent\n\n@app.produces()\nasync def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:\n return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")\n\nimport asyncio\n\n@app.run_in_background()\nasync def hello_every_second():\n while(True):\n await to_hello_world(msg="Hello world!")\n await asyncio.sleep(1)\n')),(0,r.kt)("h2",{id:"run-the-app"},"Run the app"),(0,r.kt)("p",null,"Now we can run the app. Copy the code above in producer_example.py and\nrun it by running"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app\n")),(0,r.kt)("p",null,"After running the command, you should see this output in your terminal:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"[347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task\n[347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'\n[347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'\n[347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'\nStarting process cleanup, this may take a few seconds...\n[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish\n[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'\n[INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.\n")),(0,r.kt)("h2",{id:"check-if-the-message-was-sent-to-the-kafka-topic-with-the-desired-key"},"Check if the message was sent to the Kafka topic with the desired key"),(0,r.kt)("p",null,'Lets check the topic and see if there is a \u201cHello world!" message in the\nhello_world topic with the defined key. In your terminal run:'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-shell"},"kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>\n")),(0,r.kt)("p",null,"You should see the ",(0,r.kt)("em",{parentName:"p"},'my_key {\u201cmsg": \u201cHello world!"}')," messages in your\ntopic appearing, the ",(0,r.kt)("em",{parentName:"p"},"my_key")," part of the message is the key that we\ndefined in our producing function."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/fc8a86b2.1a0d2a4d.js b/assets/js/fc8a86b2.1a0d2a4d.js new file mode 100644 index 0000000..22c1d27 --- /dev/null +++ b/assets/js/fc8a86b2.1a0d2a4d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[8197],{3905:(a,e,n)=>{n.d(e,{Zo:()=>k,kt:()=>m});var t=n(7294);function i(a,e,n){return e in a?Object.defineProperty(a,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):a[e]=n,a}function o(a,e){var n=Object.keys(a);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(a);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(a,e).enumerable}))),n.push.apply(n,t)}return n}function r(a){for(var e=1;e<arguments.length;e++){var n=null!=arguments[e]?arguments[e]:{};e%2?o(Object(n),!0).forEach((function(e){i(a,e,n[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(a,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(e){Object.defineProperty(a,e,Object.getOwnPropertyDescriptor(n,e))}))}return a}function s(a,e){if(null==a)return{};var n,t,i=function(a,e){if(null==a)return{};var n,t,i={},o=Object.keys(a);for(t=0;t<o.length;t++)n=o[t],e.indexOf(n)>=0||(i[n]=a[n]);return i}(a,e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(a);for(t=0;t<o.length;t++)n=o[t],e.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(a,n)&&(i[n]=a[n])}return i}var p=t.createContext({}),l=function(a){var e=t.useContext(p),n=e;return a&&(n="function"==typeof a?a(e):r(r({},e),a)),n},k=function(a){var e=l(a.components);return t.createElement(p.Provider,{value:e},a.children)},c="mdxType",d={inlineCode:"code",wrapper:function(a){var e=a.children;return t.createElement(t.Fragment,{},e)}},f=t.forwardRef((function(a,e){var n=a.components,i=a.mdxType,o=a.originalType,p=a.parentName,k=s(a,["components","mdxType","originalType","parentName"]),c=l(n),f=i,m=c["".concat(p,".").concat(f)]||c[f]||d[f]||o;return n?t.createElement(m,r(r({ref:e},k),{},{components:n})):t.createElement(m,r({ref:e},k))}));function m(a,e){var n=arguments,i=e&&e.mdxType;if("string"==typeof a||i){var o=n.length,r=new Array(o);r[0]=f;var s={};for(var p in e)hasOwnProperty.call(e,p)&&(s[p]=e[p]);s.originalType=a,s[c]="string"==typeof a?a:i,r[1]=s;for(var l=2;l<o;l++)r[l]=n[l];return t.createElement.apply(null,r)}return t.createElement.apply(null,n)}f.displayName="MDXCreateElement"},333:(a,e,n)=>{n.r(e),n.d(e,{assets:()=>p,contentTitle:()=>r,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var t=n(7462),i=(n(7294),n(3905));const o={},r="Benchmarking FastKafka app",s={unversionedId:"guides/Guide_06_Benchmarking_FastKafka",id:"version-0.5.0/guides/Guide_06_Benchmarking_FastKafka",title:"Benchmarking FastKafka app",description:"Prerequisites",source:"@site/versioned_docs/version-0.5.0/guides/Guide_06_Benchmarking_FastKafka.md",sourceDirName:"guides",slug:"/guides/Guide_06_Benchmarking_FastKafka",permalink:"/docs/0.5.0/guides/Guide_06_Benchmarking_FastKafka",draft:!1,tags:[],version:"0.5.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Deploying FastKafka using Docker",permalink:"/docs/0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka"},next:{title:"FastKafka",permalink:"/docs/0.5.0/api/fastkafka/"}},p={},l=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Creating FastKafka Code",id:"creating-fastkafka-code",level:3},{value:"Starting Kafka",id:"starting-kafka",level:3},{value:"Installing Java and Kafka",id:"installing-java-and-kafka",level:4},{value:"Creating configuration for Zookeeper and Kafka",id:"creating-configuration-for-zookeeper-and-kafka",level:4},{value:"Starting Zookeeper and Kafka",id:"starting-zookeeper-and-kafka",level:4},{value:"Creating topics in Kafka",id:"creating-topics-in-kafka",level:4},{value:"Populating topics with dummy data",id:"populating-topics-with-dummy-data",level:4},{value:"Benchmarking FastKafka",id:"benchmarking-fastkafka",level:3}],k={toc:l},c="wrapper";function d(a){let{components:e,...n}=a;return(0,i.kt)(c,(0,t.Z)({},k,n,{components:e,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"benchmarking-fastkafka-app"},"Benchmarking FastKafka app"),(0,i.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("p",null,"To benchmark a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nproject, you will need the following:"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"A library built with\n",(0,i.kt)("a",{parentName:"li",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"."),(0,i.kt)("li",{parentName:"ol"},"A running ",(0,i.kt)("inlineCode",{parentName:"li"},"Kafka")," instance to benchmark the FastKafka application\nagainst.")),(0,i.kt)("h3",{id:"creating-fastkafka-code"},"Creating FastKafka Code"),(0,i.kt)("p",null,"Let\u2019s create a\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"-based\napplication and write it to the ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file based on the\n",(0,i.kt)("a",{parentName:"p",href:"/docs#tutorial"},"tutorial"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\nhas a decorator for benchmarking which is appropriately called as\n",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark"),". Let\u2019s edit our ",(0,i.kt)("inlineCode",{parentName:"p"},"application.py")," file and add the\n",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark")," decorator to the consumes method."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'# content of the "application.py" file with benchmark\n\nfrom contextlib import asynccontextmanager\n\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\nfrom fastkafka import FastKafka\n\nml_models = {}\n\n\n@asynccontextmanager\nasync def lifespan(app: FastKafka):\n # Load the ML model\n X, y = load_iris(return_X_y=True)\n ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(\n X, y\n )\n yield\n # Clean up the ML models and release the resources\n ml_models.clear()\n\n\nfrom pydantic import BaseModel, NonNegativeFloat, Field\n\nclass IrisInputData(BaseModel):\n sepal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal length in cm"\n )\n sepal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Sepal width in cm"\n )\n petal_length: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal length in cm"\n )\n petal_width: NonNegativeFloat = Field(\n ..., example=0.5, description="Petal width in cm"\n )\n\n\nclass IrisPrediction(BaseModel):\n species: str = Field(..., example="setosa", description="Predicted species")\n\nfrom fastkafka import FastKafka\n\nkafka_brokers = {\n "localhost": {\n "url": "localhost",\n "description": "local development kafka broker",\n "port": 9092,\n },\n "production": {\n "url": "kafka.airt.ai",\n "description": "production kafka broker",\n "port": 9092,\n "protocol": "kafka-secure",\n "security": {"type": "plain"},\n },\n}\n\nkafka_app = FastKafka(\n title="Iris predictions",\n kafka_brokers=kafka_brokers,\n lifespan=lifespan,\n)\n\n@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")\n@kafka_app.benchmark(interval=1, sliding_window_size=5)\nasync def on_input_data(msg: IrisInputData):\n species_class = ml_models["iris_predictor"].predict(\n [[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]\n )[0]\n\n await to_predictions(species_class)\n\n\n@kafka_app.produces(topic="predictions")\nasync def to_predictions(species_class: int) -> IrisPrediction:\n iris_species = ["setosa", "versicolor", "virginica"]\n\n prediction = IrisPrediction(species=iris_species[species_class])\n return prediction\n')),(0,i.kt)("p",null,"Here we are conducting a benchmark of a function that consumes data from\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic with an interval of 1 second and a sliding window\nsize of 5."),(0,i.kt)("p",null,"This ",(0,i.kt)("inlineCode",{parentName:"p"},"benchmark")," method uses the ",(0,i.kt)("inlineCode",{parentName:"p"},"interval")," parameter to calculate the\nresults over a specific time period, and the ",(0,i.kt)("inlineCode",{parentName:"p"},"sliding_window_size"),"\nparameter to determine the maximum number of results to use in\ncalculating the average throughput and standard deviation."),(0,i.kt)("p",null,"This benchmark is important to ensure that the function is performing\noptimally and to identify any areas for improvement."),(0,i.kt)("h3",{id:"starting-kafka"},"Starting Kafka"),(0,i.kt)("p",null,"If you already have a ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," running somewhere, then you can skip this\nstep."),(0,i.kt)("p",null,"Please keep in mind that your benchmarking results may be affected by\nbottlenecks such as network, CPU cores in the Kafka machine, or even the\nKafka configuration itself."),(0,i.kt)("h4",{id:"installing-java-and-kafka"},"Installing Java and Kafka"),(0,i.kt)("p",null,"We need a working ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),"instance to benchmark our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp, and to run ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," we need ",(0,i.kt)("inlineCode",{parentName:"p"},"Java"),". Thankfully,\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\ncomes with a CLI to install both ",(0,i.kt)("inlineCode",{parentName:"p"},"Java")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," on our machine."),(0,i.kt)("p",null,"So, let\u2019s install ",(0,i.kt)("inlineCode",{parentName:"p"},"Java")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," by executing the following command."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"fastkafka testing install_deps\n")),(0,i.kt)("p",null,"The above command will extract ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),' scripts at the location\n\u201c\\$HOME/.local/kafka_2.13-3.3.2" on your machine.'),(0,i.kt)("h4",{id:"creating-configuration-for-zookeeper-and-kafka"},"Creating configuration for Zookeeper and Kafka"),(0,i.kt)("p",null,"Now we need to start ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," separately, and to start\nthem we need ",(0,i.kt)("inlineCode",{parentName:"p"},"zookeeper.properties")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka.properties")," files."),(0,i.kt)("p",null,"Let\u2019s create a folder inside the folder where ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," scripts were\nextracted and change directory into it."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"mkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir\n")),(0,i.kt)("p",null,"Let\u2019s create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"zookeeper.properties")," and write the\nfollowing content to the file:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"dataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper\nclientPort=2181\nmaxClientCnxns=0\n")),(0,i.kt)("p",null,"Similarly, let\u2019s create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"kafka.properties")," and write the\nfollowing content to the file:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"broker.id=0\nlisteners=PLAINTEXT://:9092\n\nnum.network.threads=3\nnum.io.threads=8\nsocket.send.buffer.bytes=102400\nsocket.receive.buffer.bytes=102400\nsocket.request.max.bytes=104857600\n\nnum.partitions=1\nnum.recovery.threads.per.data.dir=1\noffsets.topic.replication.factor=1\ntransaction.state.log.replication.factor=1\ntransaction.state.log.min.isr=1\n\nlog.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs\nlog.flush.interval.messages=10000\nlog.flush.interval.ms=1000\nlog.retention.hours=168\nlog.retention.bytes=1073741824\nlog.segment.bytes=1073741824\nlog.retention.check.interval.ms=300000\n\nzookeeper.connect=localhost:2181\nzookeeper.connection.timeout.ms=18000\n")),(0,i.kt)("h4",{id:"starting-zookeeper-and-kafka"},"Starting Zookeeper and Kafka"),(0,i.kt)("p",null,"We need two different terminals to run ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," in one and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," in\nanother. Let\u2019s open a new terminal and run the following commands to\nstart ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./zookeeper-server-start.sh ../data_dir/zookeeper.properties\n")),(0,i.kt)("p",null,"Once ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," is up and running, open a new terminal and execute the\nfollwing commands to start ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-server-start.sh ../data_dir/kafka.properties\n")),(0,i.kt)("p",null,"Now we have both ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," up and running."),(0,i.kt)("h4",{id:"creating-topics-in-kafka"},"Creating topics in Kafka"),(0,i.kt)("p",null,"In a new terminal, please execute the following command to create\nnecessary topics in ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),":"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin\ncd $HOME/.local/kafka_2.13-3.3.2/bin\n./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092\n./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092\n")),(0,i.kt)("h4",{id:"populating-topics-with-dummy-data"},"Populating topics with dummy data"),(0,i.kt)("p",null,"To benchmark our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp, we need some data in ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," topics."),(0,i.kt)("p",null,"In the same terminal, let\u2019s create some dummy data:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},'yes \'{"sepal_length": 0.7739560486, "sepal_width": 0.8636615789, "petal_length": 0.6122663046, "petal_width": 0.1338914722}\' | head -n 1000000 > /tmp/test_data\n')),(0,i.kt)("p",null,"This command will create a file called ",(0,i.kt)("inlineCode",{parentName:"p"},"test_data")," in the ",(0,i.kt)("inlineCode",{parentName:"p"},"tmp")," folder\nwith one million rows of text. This will act as dummy data to populate\nthe ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," topic."),(0,i.kt)("p",null,"Let\u2019s populate the created topic ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," with the dummy data which\nwe created above:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data\n")),(0,i.kt)("p",null,"Now our topic ",(0,i.kt)("inlineCode",{parentName:"p"},"input_data")," has one million records/messages in it. If\nyou want more messages in topic, you can simply execute the above\ncommand again and again."),(0,i.kt)("h3",{id:"benchmarking-fastkafka"},"Benchmarking FastKafka"),(0,i.kt)("p",null,"Once ",(0,i.kt)("inlineCode",{parentName:"p"},"Zookeeper")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka")," are ready, benchmarking\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp is as simple as running the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-cmd"},"fastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app\n")),(0,i.kt)("p",null,"This command will start the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp and begin consuming messages from ",(0,i.kt)("inlineCode",{parentName:"p"},"Kafka"),", which we spun up earlier.\nAdditionally, the same command will output all of the benchmark\nthroughputs based on the ",(0,i.kt)("inlineCode",{parentName:"p"},"interval")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"sliding_window_size")," values."),(0,i.kt)("p",null,"The output for the ",(0,i.kt)("inlineCode",{parentName:"p"},"fastkafka run")," command is:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-txt"},"[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'\n[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...\n[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh\nost:9092', 'max_poll_records': 100}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})\n[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}\n[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc\n[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1\n[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition\n=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW\n[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)\n[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)\n")),(0,i.kt)("p",null,"Based on the output, when using 1 worker, our\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.5.0/api/fastkafka//#fastkafka.FastKafka"},(0,i.kt)("inlineCode",{parentName:"a"},"FastKafka")),"\napp achieved a ",(0,i.kt)("inlineCode",{parentName:"p"},"throughput")," of 93k messages per second and an\n",(0,i.kt)("inlineCode",{parentName:"p"},"average throughput")," of 93k messages per second."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/fd2e624b.b7df8a31.js b/assets/js/fd2e624b.b7df8a31.js new file mode 100644 index 0000000..7f679d3 --- /dev/null +++ b/assets/js/fd2e624b.b7df8a31.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9851],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>k});var a=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function r(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?o(Object(n),!0).forEach((function(t){i(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):o(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function l(e,t){if(null==e)return{};var n,a,i=function(e,t){if(null==e)return{};var n,a,i={},o=Object.keys(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var s=a.createContext({}),p=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},d=function(e){var t=p(e.components);return a.createElement(s.Provider,{value:t},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,o=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),m=p(n),u=i,k=m["".concat(s,".").concat(u)]||m[u]||c[u]||o;return n?a.createElement(k,r(r({ref:t},d),{},{components:n})):a.createElement(k,r({ref:t},d))}));function k(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=n.length,r=new Array(o);r[0]=u;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[m]="string"==typeof e?e:i,r[1]=l;for(var p=2;p<o;p++)r[p]=n[p];return a.createElement.apply(null,r)}return a.createElement.apply(null,n)}u.displayName="MDXCreateElement"},9421:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>r,default:()=>c,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var a=n(7462),i=(n(7294),n(3905));const o={},r=void 0,l={unversionedId:"api/fastkafka/testing/Tester",id:"version-0.7.0/api/fastkafka/testing/Tester",title:"Tester",description:"fastkafka.testing.Tester {fastkafka.testing.Tester}",source:"@site/versioned_docs/version-0.7.0/api/fastkafka/testing/Tester.md",sourceDirName:"api/fastkafka/testing",slug:"/api/fastkafka/testing/Tester",permalink:"/docs/0.7.0/api/fastkafka/testing/Tester",draft:!1,tags:[],version:"0.7.0",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"LocalRedpandaBroker",permalink:"/docs/0.7.0/api/fastkafka/testing/LocalRedpandaBroker"},next:{title:"fastkafka",permalink:"/docs/0.7.0/cli/fastkafka"}},s={},p=[{value:"<code>fastkafka.testing.Tester</code>",id:"fastkafka.testing.Tester",level:2},{value:"<code>__init__</code>",id:"init",level:3},{value:"<code>benchmark</code>",id:"benchmark",level:3},{value:"<code>consumes</code>",id:"consumes",level:3},{value:"<code>create_docs</code>",id:"create_docs",level:3},{value:"<code>create_mocks</code>",id:"create_mocks",level:3},{value:"<code>fastapi_lifespan</code>",id:"fastapi_lifespan",level:3},{value:"<code>get_topics</code>",id:"get_topics",level:3},{value:"<code>produces</code>",id:"produces",level:3},{value:"<code>run_in_background</code>",id:"run_in_background",level:3},{value:"<code>set_kafka_broker</code>",id:"set_kafka_broker",level:3},{value:"<code>using_local_kafka</code>",id:"using_local_kafka",level:3},{value:"<code>using_local_redpanda</code>",id:"using_local_redpanda",level:3}],d={toc:p},m="wrapper";function c(e){let{components:t,...n}=e;return(0,i.kt)(m,(0,a.Z)({},d,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"fastkafka.testing.Tester"},(0,i.kt)("inlineCode",{parentName:"h2"},"fastkafka.testing.Tester")),(0,i.kt)("h3",{id:"init"},(0,i.kt)("inlineCode",{parentName:"h3"},"__init__")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def __init__(self, app: Union[fastkafka.FastKafka, List[fastkafka.FastKafka]], broker: Optional[fastkafka.testing.ApacheKafkaBroker, fastkafka.testing.LocalRedpandaBroker, fastkafka._testing.in_memory_broker.InMemoryBroker] = None, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None")),(0,i.kt)("p",null,"Mirror-like object for testing a FastKafka application"),(0,i.kt)("p",null,"Can be used as context manager"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"app"),": The FastKafka application to be tested."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"broker"),": An optional broker to start and to use for testing."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"zookeeper_port"),": Port for clients (Kafka brokes) to connect"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect")),(0,i.kt)("h3",{id:"benchmark"},(0,i.kt)("inlineCode",{parentName:"h3"},"benchmark")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]")),(0,i.kt)("p",null,"Decorator to benchmark produces/consumes functions"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"interval"),": Period to use to calculate throughput. If value is of type int,\nthen it will be used as seconds. If value is of type timedelta,\nthen it will be used as it is. default: 1 - one second"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sliding_window_size"),": The size of the sliding window to use to calculate\naverage throughput. default: None - By default average throughput is\nnot calculated")),(0,i.kt)("h3",{id:"consumes"},(0,i.kt)("inlineCode",{parentName:"h3"},"consumes")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, prefix: str = 'on_', description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]], typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when a message is received in a topic."),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the consumer will subscribe to and execute the\ndecorated function when it receives a message from the topic,\ndefault: None. If the topic is not specified, topic name will be\ninferred from the decorated function name by stripping the defined prefix"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"decoder"),": Decoder to use to decode messages consumed from the topic,\ndefault: json - By default, it uses json decoder to decode\nbytes to json string and then it creates instance of pydantic\nBaseModel. It also accepts custom decoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"executor"),': Type of executor to choose for consuming tasks. Avaliable options\nare "SequentialExecutor" and "DynamicTaskExecutor". The default option is\n"SequentialExecutor" which will execute the consuming tasks sequentially.\nIf the consuming tasks have high latency it is recommended to use\n"DynamicTaskExecutor" which will wrap the consuming functions into tasks\nand run them in on asyncio loop in background. This comes with a cost of\nincreased overhead so use it only in cases when your consume functions have\nhigh latency such as database queries or some other type of networking.'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic name\nif the topic argument is not passed, default: "on_". If the decorated\nfunction name is not prefixed with the defined prefix and topic argument\nis not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"brokers"),": Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"description"),": Optional description of the consuming function async docs.\nIf not provided, consuming function ",(0,i.kt)("strong",{parentName:"li"},"doc")," attr will be used."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"*topics"),": optional list of topics to subscribe to. If not set,\ncall :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," or :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".assign")," before consuming records.\nPassing topics directly is same as calling :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".subscribe")," API."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string (or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings) that the consumer should contact to bootstrap\ninitial cluster metadata.")),(0,i.kt)("p",null,"This does not have to be the full node list.\nIt just needs to have at least one broker that will respond to a\nMetadata API Request. Default port is 9092. If no servers are\nspecified, will default to ",(0,i.kt)("inlineCode",{parentName:"p"},"localhost:9092"),"."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client. Also\nsubmitted to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~.consumer.group_coordinator.GroupCoordinator"),"\nfor logging with respect to consumer group administration. Default:\n",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-{version}")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"group_id"),": name of the consumer group to join for dynamic\npartition assignment (if enabled), and to use for fetching and\ncommitting offsets. If None, auto-partition assignment (via\ngroup coordinator) and offset commits are disabled.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_deserializer"),": Any callable that takes a\nraw message key and returns a deserialized key."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_deserializer"),": Any callable that takes a\nraw message value and returns a deserialized value."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_min_bytes"),": Minimum amount of data the server should\nreturn for a fetch request, otherwise wait up to\n",(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms")," for more data to accumulate. Default: 1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_bytes"),": The maximum amount of data the server should\nreturn for a fetch request. This is not an absolute maximum, if\nthe first message in the first non-empty partition of the fetch\nis larger than this value, the message will still be returned\nto ensure that the consumer can make progress. NOTE: consumer\nperforms fetches to multiple brokers in parallel so memory\nusage will depend on the number of brokers containing\npartitions for the topic.\nSupported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fetch_max_wait_ms"),": The maximum amount of time in milliseconds\nthe server will block before answering the fetch request if\nthere isn't sufficient data to immediately satisfy the\nrequirement given by fetch_min_bytes. Default: 500."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_partition_fetch_bytes"),": The maximum amount of data\nper-partition the server will return. The maximum total memory\nused for a request ",(0,i.kt)("inlineCode",{parentName:"li"},"= #partitions * max_partition_fetch_bytes"),".\nThis size must be at least as large as the maximum message size\nthe server allows or else it is possible for the producer to\nsend messages larger than the consumer can fetch. If that\nhappens, the consumer can get stuck trying to fetch a large\nmessage on a certain partition. Default: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_records"),": The maximum number of records returned in a\nsingle call to :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),". Defaults ",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", no limit."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Client request timeout in milliseconds.\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_offset_reset"),": A policy for resetting offsets on\n:exc:",(0,i.kt)("inlineCode",{parentName:"li"},".OffsetOutOfRangeError")," errors: ",(0,i.kt)("inlineCode",{parentName:"li"},"earliest")," will move to the oldest\navailable message, ",(0,i.kt)("inlineCode",{parentName:"li"},"latest")," will move to the most recent, and\n",(0,i.kt)("inlineCode",{parentName:"li"},"none")," will raise an exception so you can handle this case.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"latest"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_auto_commit"),": If true the consumer's offset will be\nperiodically committed in the background. Default: True."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"auto_commit_interval_ms"),": milliseconds between automatic\noffset commits, if enable_auto_commit is True. Default: 5000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"check_crcs"),": Automatically check the CRC32 of the records\nconsumed. This ensures no on-the-wire or on-disk corruption to\nthe messages occurred. This check adds some overhead, so it may\nbe disabled in cases seeking extreme performance. Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partition_assignment_strategy"),": List of objects to use to\ndistribute partition ownership amongst consumer instances when\ngroup management is used. This preference is implicit in the order\nof the strategies in the list. When assignment strategy changes:\nto support a change to the assignment strategy, new versions must\nenable support both for the old assignment strategy and the new\none. The coordinator will choose the old assignment strategy until\nall members have been updated. Then it will choose the new\nstrategy. Default: ","[:class:",(0,i.kt)("inlineCode",{parentName:"li"},".RoundRobinPartitionAssignor"),"]"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_poll_interval_ms"),": Maximum allowed time between calls to\nconsume messages (e.g., :meth:",(0,i.kt)("inlineCode",{parentName:"li"},".getmany"),"). If this interval\nis exceeded the consumer is considered failed and the group will\nrebalance in order to reassign the partitions to another consumer\ngroup member. If API methods block waiting for messages, that time\ndoes not count against this timeout. See ",(0,i.kt)("inlineCode",{parentName:"li"},"KIP-62"),"_ for more\ninformation. Default 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"rebalance_timeout_ms"),": The maximum time server will wait for this\nconsumer to rejoin the group in a case of rebalance. In Java client\nthis behaviour is bound to ",(0,i.kt)("inlineCode",{parentName:"li"},"max.poll.interval.ms")," configuration,\nbut as ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka")," will rejoin the group in the background, we\ndecouple this setting to allow finer tuning by users that use\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},".ConsumerRebalanceListener")," to delay rebalacing. Defaults\nto ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),": Client group session and failure detection\ntimeout. The consumer sends periodic heartbeats\n(",(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat.interval.ms"),") to indicate its liveness to the broker.\nIf no hearts are received by the broker for a group member within\nthe session timeout, the broker will remove the consumer from the\ngroup and trigger a rebalance. The allowed range is configured with\nthe ",(0,i.kt)("strong",{parentName:"li"},"broker")," configuration properties\n",(0,i.kt)("inlineCode",{parentName:"li"},"group.min.session.timeout.ms")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"group.max.session.timeout.ms"),".\nDefault: 10000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"heartbeat_interval_ms"),": The expected time in milliseconds\nbetween heartbeats to the consumer coordinator when using\nKafka's group management feature. Heartbeats are used to ensure\nthat the consumer's session stays active and to facilitate\nrebalancing when new consumers join or leave the group. The\nvalue must be set lower than ",(0,i.kt)("inlineCode",{parentName:"li"},"session_timeout_ms"),", but typically\nshould be set no higher than 1/3 of that value. It can be\nadjusted even lower to control the expected time for normal\nrebalances. Default: 3000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"consumer_timeout_ms"),": maximum wait timeout for background fetching\nroutine. Mostly defines how fast the system will see rebalance and\nrequest new data for new partitions. Default: 200"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"AIOKafkaConsumer")," supports Kafka API versions >=0.9 only.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more information see\n:ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),". Default: None."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"exclude_internal_topics"),": Whether records from internal topics\n(such as offsets) should be exposed to the consumer. If set to True\nthe only way to receive records from an internal topic is\nsubscribing to it. Requires 0.10+ Default: True"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying ",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"isolation_level"),": Controls how to read messages written\ntransactionally.")),(0,i.kt)("p",null,"If set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed"),", :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\ntransactional messages which have been committed.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")," (the default), :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will\nreturn all messages, even transactional messages which have been\naborted."),(0,i.kt)("p",null,"Non-transactional messages will be returned unconditionally in\neither mode."),(0,i.kt)("p",null,"Messages will always be returned in offset order. Hence, in\n",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," mode, :meth:",(0,i.kt)("inlineCode",{parentName:"p"},".getmany")," will only return\nmessages up to the last stable offset (LSO), which is the one less\nthan the offset of the first open transaction. In particular any\nmessages appearing after messages belonging to ongoing transactions\nwill be withheld until the relevant transaction has been completed.\nAs a result, ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," consumers will not be able to read up\nto the high watermark when there are in flight transactions.\nFurther, when in ",(0,i.kt)("inlineCode",{parentName:"p"},"read_committed")," the seek_to_end method will\nreturn the LSO. See method docs below. Default: ",(0,i.kt)("inlineCode",{parentName:"p"},"read_uncommitted")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values are:\n",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: None"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider"),": OAuthBearer token provider instance. (See :mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: None")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("h3",{id:"create_docs"},(0,i.kt)("inlineCode",{parentName:"h3"},"create_docs")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def create_docs(self: fastkafka.FastKafka) -> None")),(0,i.kt)("p",null,"Create the asyncapi documentation based on the configured consumers and producers."),(0,i.kt)("p",null,"This function exports the asyncapi specification based on the configured consumers\nand producers in the FastKafka instance. It generates the asyncapi documentation by\nextracting the topics and callbacks from the consumers and producers."),(0,i.kt)("p",null,"Note:\nThe asyncapi documentation is saved to the location specified by the ",(0,i.kt)("inlineCode",{parentName:"p"},"_asyncapi_path"),"\nattribute of the FastKafka instance."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"None")),(0,i.kt)("h3",{id:"create_mocks"},(0,i.kt)("inlineCode",{parentName:"h3"},"create_mocks")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def create_mocks(self: fastkafka.FastKafka) -> None")),(0,i.kt)("p",null,"Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock"),(0,i.kt)("h3",{id:"fastapi_lifespan"},(0,i.kt)("inlineCode",{parentName:"h3"},"fastapi_lifespan")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def fastapi_lifespan(self: fastkafka.FastKafka, kafka_broker_name: str) -> typing.Callable[[ForwardRef('FastAPI')], typing.AsyncIterator[NoneType]]")),(0,i.kt)("p",null,"Method for managing the lifespan of a FastAPI application with a specific Kafka broker."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_broker_name"),": The name of the Kafka broker to start FastKafka")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"Lifespan function to use for initializing FastAPI")),(0,i.kt)("h3",{id:"get_topics"},(0,i.kt)("inlineCode",{parentName:"h3"},"get_topics")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def get_topics(self: fastkafka.FastKafka) -> typing.Iterable[str]")),(0,i.kt)("p",null,"Get all topics for both producing and consuming."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"A set of topics for both producing and consuming.")),(0,i.kt)("h3",{id:"produces"},(0,i.kt)("inlineCode",{parentName:"h3"},"produces")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7f1c4d890f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7f1c4c8a1210>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]")),(0,i.kt)("p",null,"Decorator registering the callback called when delivery report for a produced message is received"),(0,i.kt)("p",null,"This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"topic"),": Kafka topic that the producer will send returned values from\nthe decorated function to, default: None- If the topic is not\nspecified, topic name will be inferred from the decorated function\nname by stripping the defined prefix."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"encoder"),": Encoder to use to encode messages before sending it to topic,\ndefault: json - By default, it uses json encoder to convert\npydantic basemodel to json string and then encodes the string to bytes\nusing 'utf-8' encoding. It also accepts custom encoder function."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix"),': Prefix stripped from the decorated function to define a topic\nname if the topic argument is not passed, default: "to_". If the\ndecorated function name is not prefixed with the defined prefix\nand topic argument is not passed, then this method will throw ValueError'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"brokers"),": Optional argument specifying multiple broker clusters for consuming\nmessages from different Kafka clusters in FastKafka."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"description"),": Optional description of the producing function async docs.\nIf not provided, producing function ",(0,i.kt)("strong",{parentName:"li"},"doc")," attr will be used."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"bootstrap_servers"),": a ",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," string or list of\n",(0,i.kt)("inlineCode",{parentName:"li"},"host[:port]")," strings that the producer should contact to\nbootstrap initial cluster metadata. This does not have to be the\nfull node list. It just needs to have at least one broker that will\nrespond to a Metadata API Request. Default port is 9092. If no\nservers are specified, will default to ",(0,i.kt)("inlineCode",{parentName:"li"},"localhost:9092"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"client_id"),": a name for this client. This string is passed in\neach request to servers and can be used to identify specific\nserver-side log entries that correspond to this client.\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"aiokafka-producer-#")," (appended with a unique number\nper instance)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key_serializer"),": used to convert user-supplied keys to bytes\nIf not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as ",(0,i.kt)("inlineCode",{parentName:"li"},"f(key),")," should return\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"value_serializer"),": used to convert user-supplied message\nvalues to :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),". If not :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", called as\n",(0,i.kt)("inlineCode",{parentName:"li"},"f(value)"),", should return :class:",(0,i.kt)("inlineCode",{parentName:"li"},"bytes"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"acks"),": one of ",(0,i.kt)("inlineCode",{parentName:"li"},"0"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"1"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". The number of acknowledgments\nthe producer requires the leader to have received before considering a\nrequest complete. This controls the durability of records that are\nsent. The following settings are common:")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"0"),": Producer will not wait for any acknowledgment from the server\nat all. The message will immediately be added to the socket\nbuffer and considered sent. No guarantee can be made that the\nserver has received the record in this case, and the retries\nconfiguration will not take effect (as the client won't\ngenerally know of any failures). The offset given back for each\nrecord will always be set to -1."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"1"),": The broker leader will write the record to its local log but\nwill respond without awaiting full acknowledgement from all\nfollowers. In this case should the leader fail immediately\nafter acknowledging the record but before the followers have\nreplicated it then the record will be lost."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"all"),": The broker leader will wait for the full set of in-sync\nreplicas to acknowledge the record. This guarantees that the\nrecord will not be lost as long as at least one in-sync replica\nremains alive. This is the strongest available guarantee.")),(0,i.kt)("p",null,"If unset, defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=1"),". If ",(0,i.kt)("inlineCode",{parentName:"p"},"enable_idempotence")," is\n:data:",(0,i.kt)("inlineCode",{parentName:"p"},"True")," defaults to ",(0,i.kt)("inlineCode",{parentName:"p"},"acks=all")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"compression_type"),": The compression type for all data generated by\nthe producer. Valid values are ",(0,i.kt)("inlineCode",{parentName:"li"},"gzip"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"snappy"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"lz4"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"zstd"),"\nor :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),".\nCompression is of full batches of data, so the efficacy of batching\nwill also impact the compression ratio (more batching means better\ncompression). Default: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_batch_size"),": Maximum size of buffered data per partition.\nAfter this amount :meth:",(0,i.kt)("inlineCode",{parentName:"li"},"send")," coroutine will block until batch is\ndrained.\nDefault: 16384"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),": The producer groups together any records that arrive\nin between request transmissions into a single batched request.\nNormally this occurs only under load when records arrive faster\nthan they can be sent out. However in some circumstances the client\nmay want to reduce the number of requests even under moderate load.\nThis setting accomplishes this by adding a small amount of\nartificial delay; that is, if first request is processed faster,\nthan ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms"),", producer will wait ",(0,i.kt)("inlineCode",{parentName:"li"},"linger_ms - process_time"),".\nDefault: 0 (i.e. no delay)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"partitioner"),": Callable used to determine which partition\neach message is assigned to. Called (after key serialization):\n",(0,i.kt)("inlineCode",{parentName:"li"},"partitioner(key_bytes, all_partitions, available_partitions)"),".\nThe default partitioner implementation hashes each non-None key\nusing the same murmur2 algorithm as the Java client so that\nmessages with the same key are assigned to the same partition.\nWhen a key is :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"),", the message is delivered to a random partition\n(filtered to partitions with available leaders only, if possible)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"max_request_size"),": The maximum size of a request. This is also\neffectively a cap on the maximum record size. Note that the server\nhas its own cap on record size which may be different from this.\nThis setting will limit the number of record batches the producer\nwill send in a single request to avoid sending huge requests.\nDefault: 1048576."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"metadata_max_age_ms"),": The period of time in milliseconds after\nwhich we force a refresh of metadata even if we haven't seen any\npartition leadership changes to proactively discover any new\nbrokers or partitions. Default: 300000"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"request_timeout_ms"),": Produce request timeout in milliseconds.\nAs it's sent as part of\n:class:",(0,i.kt)("inlineCode",{parentName:"li"},"~kafka.protocol.produce.ProduceRequest")," (it's a blocking\ncall), maximum waiting time can be up to ",(0,i.kt)("inlineCode",{parentName:"li"},"2 *\nrequest_timeout_ms"),".\nDefault: 40000."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"retry_backoff_ms"),": Milliseconds to backoff when retrying on\nerrors. Default: 100."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"api_version"),": specify which kafka API version to use.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"li"},"auto"),", will attempt to infer the broker version by\nprobing various APIs. Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"auto")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"security_protocol"),": Protocol used to communicate with brokers.\nValid values are: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SSL"),". Default: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAINTEXT"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ssl_context"),": pre-configured :class:",(0,i.kt)("inlineCode",{parentName:"li"},"~ssl.SSLContext"),"\nfor wrapping socket connections. Directly passed into asyncio's\n:meth:",(0,i.kt)("inlineCode",{parentName:"li"},"~asyncio.loop.create_connection"),". For more\ninformation see :ref:",(0,i.kt)("inlineCode",{parentName:"li"},"ssl_auth"),".\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"connections_max_idle_ms"),": Close idle connections after the number\nof milliseconds specified by this config. Specifying :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")," will\ndisable idle checks. Default: 540000 (9 minutes)."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enable_idempotence"),": When set to :data:",(0,i.kt)("inlineCode",{parentName:"li"},"True"),", the producer will\nensure that exactly one copy of each message is written in the\nstream. If :data:",(0,i.kt)("inlineCode",{parentName:"li"},"False"),", producer retries due to broker failures,\netc., may write duplicates of the retried message in the stream.\nNote that enabling idempotence acks to set to ",(0,i.kt)("inlineCode",{parentName:"li"},"all"),". If it is not\nexplicitly set by the user it will be chosen. If incompatible\nvalues are set, a :exc:",(0,i.kt)("inlineCode",{parentName:"li"},"ValueError")," will be thrown.\nNew in version 0.5.0."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_mechanism"),": Authentication mechanism when security_protocol\nis configured for ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_PLAINTEXT")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"SASL_SSL"),". Valid values\nare: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"GSSAPI"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-256"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"SCRAM-SHA-512"),",\n",(0,i.kt)("inlineCode",{parentName:"li"},"OAUTHBEARER"),".\nDefault: ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_username"),": username for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_plain_password"),": password for SASL ",(0,i.kt)("inlineCode",{parentName:"li"},"PLAIN")," authentication.\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sasl_oauth_token_provider ("),": class:",(0,i.kt)("inlineCode",{parentName:"li"},"~aiokafka.abc.AbstractTokenProvider"),"):\nOAuthBearer token provider instance. (See\n:mod:",(0,i.kt)("inlineCode",{parentName:"li"},"kafka.oauth.abstract"),").\nDefault: :data:",(0,i.kt)("inlineCode",{parentName:"li"},"None"))),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},": A function returning the same function")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ValueError"),": when needed")),(0,i.kt)("h3",{id:"run_in_background"},(0,i.kt)("inlineCode",{parentName:"h3"},"run_in_background")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]")),(0,i.kt)("p",null,"Decorator to schedule a task to be run in the background."),(0,i.kt)("p",null,"This decorator is used to schedule a task to be run in the background when the app's ",(0,i.kt)("inlineCode",{parentName:"p"},"_on_startup")," event is triggered."),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"A decorator function that takes a background task as an input and stores it to be run in the backround.")),(0,i.kt)("h3",{id:"set_kafka_broker"},(0,i.kt)("inlineCode",{parentName:"h3"},"set_kafka_broker")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def set_kafka_broker(self, kafka_broker_name: str) -> None")),(0,i.kt)("p",null,"Sets the Kafka broker to start FastKafka with"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"kafka_broker_name"),": The name of the Kafka broker to start FastKafka")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"None")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Exceptions"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"ValueError"),": If the provided kafka_broker_name is not found in dictionary of kafka_brokers")),(0,i.kt)("h3",{id:"using_local_kafka"},(0,i.kt)("inlineCode",{parentName:"h3"},"using_local_kafka")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def using_local_kafka(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> Tester")),(0,i.kt)("p",null,"Starts local Kafka broker used by the Tester instance"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"data_dir"),": Path to the directory where the zookeepeer instance will save data"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"zookeeper_port"),": Port for clients (Kafka brokes) to connect"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"An instance of tester with Kafka as broker")),(0,i.kt)("h3",{id:"using_local_redpanda"},(0,i.kt)("inlineCode",{parentName:"h3"},"using_local_redpanda")),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"def using_local_redpanda(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug') -> Tester")),(0,i.kt)("p",null,"Starts local Redpanda broker used by the Tester instance"),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"listener_port"),": Port on which the clients (producers and consumers) can connect"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"tag"),": Tag of Redpanda image to use to start container"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"seastar_core"),": Core(s) to use byt Seastar (the framework Redpanda uses under the hood)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"memory"),": The amount of memory to make available to Redpanda"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"mode"),": Mode to use to load configuration properties in container"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"default_log_level"),": Log levels to use for Redpanda")),(0,i.kt)("p",null,(0,i.kt)("strong",{parentName:"p"},"Returns"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"An instance of tester with Redpanda as broker")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/fdc5233c.73a1ac59.js b/assets/js/fdc5233c.73a1ac59.js new file mode 100644 index 0000000..5d4e369 --- /dev/null +++ b/assets/js/fdc5233c.73a1ac59.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[9794],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>u});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function i(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?i(Object(a),!0).forEach((function(t){r(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):i(Object(a)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}function l(e,t){if(null==e)return{};var a,n,r=function(e,t){if(null==e)return{};var a,n,r={},i=Object.keys(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n<i.length;n++)a=i[n],t.indexOf(a)>=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),s=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},c=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},d="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,i=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=s(a),k=r,u=d["".concat(p,".").concat(k)]||d[k]||f[k]||i;return a?n.createElement(u,o(o({ref:t},c),{},{components:a})):n.createElement(u,o({ref:t},c))}));function u(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var i=a.length,o=new Array(i);o[0]=k;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[d]="string"==typeof e?e:r,o[1]=l;for(var s=2;s<i;s++)o[s]=a[s];return n.createElement.apply(null,o)}return n.createElement.apply(null,a)}k.displayName="MDXCreateElement"},3444:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>f,frontMatter:()=>i,metadata:()=>l,toc:()=>s});var n=a(7462),r=(a(7294),a(3905));const i={},o=void 0,l={unversionedId:"api/fastkafka/EventMetadata",id:"version-0.7.1/api/fastkafka/EventMetadata",title:"EventMetadata",description:"fastkafka.EventMetadata {fastkafka.EventMetadata}",source:"@site/versioned_docs/version-0.7.1/api/fastkafka/EventMetadata.md",sourceDirName:"api/fastkafka",slug:"/api/fastkafka/EventMetadata",permalink:"/docs/0.7.1/api/fastkafka/EventMetadata",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Benchmarking FastKafka app",permalink:"/docs/0.7.1/guides/Guide_06_Benchmarking_FastKafka"},next:{title:"FastKafka",permalink:"/docs/0.7.1/api/fastkafka/"}},p={},s=[{value:"<code>fastkafka.EventMetadata</code>",id:"fastkafka.EventMetadata",level:2},{value:"<code>create_event_metadata</code>",id:"create_event_metadata",level:3}],c={toc:s},d="wrapper";function f(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"fastkafka.EventMetadata"},(0,r.kt)("inlineCode",{parentName:"h2"},"fastkafka.EventMetadata")),(0,r.kt)("p",null,"A class for encapsulating Kafka record metadata."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"topic"),": The topic this record is received from"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"partition"),": The partition from which this record is received"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"offset"),": The position of this record in the corresponding Kafka partition"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"timestamp"),": The timestamp of this record"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"timestamp_type"),": The timestamp type of this record"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"key"),": The key (or ",(0,r.kt)("inlineCode",{parentName:"li"},"None")," if no key is specified)"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"value"),": The value"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"serialized_key_size"),": The size of the serialized, uncompressed key in bytes"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"serialized_value_size"),": The size of the serialized, uncompressed value in bytes"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"headers"),": The headers")),(0,r.kt)("h3",{id:"create_event_metadata"},(0,r.kt)("inlineCode",{parentName:"h3"},"create_event_metadata")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"def create_event_metadata(record: aiokafka.structs.ConsumerRecord) -> EventMetadata")),(0,r.kt)("p",null,"Creates an instance of EventMetadata from a ConsumerRecord."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"record"),": The Kafka ConsumerRecord.")),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Returns"),":"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"The created EventMetadata instance.")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/fe73cc84.50771324.js b/assets/js/fe73cc84.50771324.js new file mode 100644 index 0000000..9460422 --- /dev/null +++ b/assets/js/fe73cc84.50771324.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[3251],{3905:(e,n,t)=>{t.d(n,{Zo:()=>l,kt:()=>k});var r=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function c(e){for(var n=1;n<arguments.length;n++){var t=null!=arguments[n]?arguments[n]:{};n%2?a(Object(t),!0).forEach((function(n){o(e,n,t[n])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):a(Object(t)).forEach((function(n){Object.defineProperty(e,n,Object.getOwnPropertyDescriptor(t,n))}))}return e}function i(e,n){if(null==e)return{};var t,r,o=function(e,n){if(null==e)return{};var t,r,o={},a=Object.keys(e);for(r=0;r<a.length;r++)t=a[r],n.indexOf(t)>=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r<a.length;r++)t=a[r],n.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var s=r.createContext({}),d=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):c(c({},n),e)),t},l=function(e){var n=d(e.components);return r.createElement(s.Provider,{value:n},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},u=r.forwardRef((function(e,n){var t=e.components,o=e.mdxType,a=e.originalType,s=e.parentName,l=i(e,["components","mdxType","originalType","parentName"]),p=d(t),u=o,k=p["".concat(s,".").concat(u)]||p[u]||f[u]||a;return t?r.createElement(k,c(c({ref:n},l),{},{components:t})):r.createElement(k,c({ref:n},l))}));function k(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var a=t.length,c=new Array(a);c[0]=u;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[p]="string"==typeof e?e:o,c[1]=i;for(var d=2;d<a;d++)c[d]=t[d];return r.createElement.apply(null,c)}return r.createElement.apply(null,t)}u.displayName="MDXCreateElement"},604:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>c,default:()=>f,frontMatter:()=>a,metadata:()=>i,toc:()=>d});var r=t(7462),o=(t(7294),t(3905));const a={},c=void 0,i={unversionedId:"api/fastkafka/encoder/json_encoder",id:"version-0.7.1/api/fastkafka/encoder/json_encoder",title:"json_encoder",description:"fastkafka.encoder.jsonencoder {fastkafka.encoder.jsonencoder}",source:"@site/versioned_docs/version-0.7.1/api/fastkafka/encoder/json_encoder.md",sourceDirName:"api/fastkafka/encoder",slug:"/api/fastkafka/encoder/json_encoder",permalink:"/docs/0.7.1/api/fastkafka/encoder/json_encoder",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"json_decoder",permalink:"/docs/0.7.1/api/fastkafka/encoder/json_decoder"},next:{title:"DynamicTaskExecutor",permalink:"/docs/0.7.1/api/fastkafka/executors/DynamicTaskExecutor"}},s={},d=[{value:"<code>fastkafka.encoder.json_encoder</code>",id:"fastkafka.encoder.json_encoder",level:2},{value:"<code>json_encoder</code>",id:"json_encoder",level:3}],l={toc:d},p="wrapper";function f(e){let{components:n,...t}=e;return(0,o.kt)(p,(0,r.Z)({},l,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"fastkafka.encoder.json_encoder"},(0,o.kt)("inlineCode",{parentName:"h2"},"fastkafka.encoder.json_encoder")),(0,o.kt)("h3",{id:"json_encoder"},(0,o.kt)("inlineCode",{parentName:"h3"},"json_encoder")),(0,o.kt)("p",null,(0,o.kt)("inlineCode",{parentName:"p"},"def json_encoder(msg: pydantic.main.BaseModel) -> bytes")),(0,o.kt)("p",null,"Encoder to encode pydantic instances to json string"),(0,o.kt)("p",null,(0,o.kt)("strong",{parentName:"p"},"Parameters"),":"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"msg"),": An instance of pydantic basemodel")),(0,o.kt)("p",null,(0,o.kt)("strong",{parentName:"p"},"Returns"),":"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Json string in bytes which is encoded from pydantic basemodel")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/fff0a46d.a0ecfadf.js b/assets/js/fff0a46d.a0ecfadf.js new file mode 100644 index 0000000..bb42710 --- /dev/null +++ b/assets/js/fff0a46d.a0ecfadf.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[4641],{3905:(e,t,n)=>{n.d(t,{Zo:()=>h,kt:()=>m});var i=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function a(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?r(Object(n),!0).forEach((function(t){o(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function s(e,t){if(null==e)return{};var n,i,o=function(e,t){if(null==e)return{};var n,i,o={},r=Object.keys(e);for(i=0;i<r.length;i++)n=r[i],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(i=0;i<r.length;i++)n=r[i],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=i.createContext({}),l=function(e){var t=i.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},h=function(e){var t=l(e.components);return i.createElement(c.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},p=i.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,c=e.parentName,h=s(e,["components","mdxType","originalType","parentName"]),d=l(n),p=o,m=d["".concat(c,".").concat(p)]||d[p]||u[p]||r;return n?i.createElement(m,a(a({ref:t},h),{},{components:n})):i.createElement(m,a({ref:t},h))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,a=new Array(r);a[0]=p;var s={};for(var c in t)hasOwnProperty.call(t,c)&&(s[c]=t[c]);s.originalType=e,s[d]="string"==typeof e?e:o,a[1]=s;for(var l=2;l<r;l++)a[l]=n[l];return i.createElement.apply(null,a)}return i.createElement.apply(null,n)}p.displayName="MDXCreateElement"},7129:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>a,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>l});var i=n(7462),o=(n(7294),n(3905));const r={},a=void 0,s={unversionedId:"LICENSE",id:"version-0.7.1/LICENSE",title:"LICENSE",description:"Apache License",source:"@site/versioned_docs/version-0.7.1/LICENSE.md",sourceDirName:".",slug:"/LICENSE",permalink:"/docs/0.7.1/LICENSE",draft:!1,tags:[],version:"0.7.1",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"run_fastkafka_server_process",permalink:"/docs/0.7.1/cli/run_fastkafka_server_process"},next:{title:"Contributing to fastkafka",permalink:"/docs/0.7.1/CONTRIBUTING"}},c={},l=[],h={toc:l},d="wrapper";function u(e){let{components:t,...n}=e;return(0,o.kt)(d,(0,i.Z)({},h,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Apache License\nVersion 2.0, January 2004\n",(0,o.kt)("a",{parentName:"p",href:"http://www.apache.org/licenses/"},"http://www.apache.org/licenses/")),(0,o.kt)("p",null," TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Definitions."),(0,o.kt)("p",{parentName:"li"},'"License" shall mean the terms and conditions for use, reproduction,\nand distribution as defined by Sections 1 through 9 of this document.'),(0,o.kt)("p",{parentName:"li"},'"Licensor" shall mean the copyright owner or entity authorized by\nthe copyright owner that is granting the License.'),(0,o.kt)("p",{parentName:"li"},'"Legal Entity" shall mean the union of the acting entity and all\nother entities that control, are controlled by, or are under common\ncontrol with that entity. For the purposes of this definition,\n"control" means (i) the power, direct or indirect, to cause the\ndirection or management of such entity, whether by contract or\notherwise, or (ii) ownership of fifty percent (50%) or more of the\noutstanding shares, or (iii) beneficial ownership of such entity.'),(0,o.kt)("p",{parentName:"li"},'"You" (or "Your") shall mean an individual or Legal Entity\nexercising permissions granted by this License.'),(0,o.kt)("p",{parentName:"li"},'"Source" form shall mean the preferred form for making modifications,\nincluding but not limited to software source code, documentation\nsource, and configuration files.'),(0,o.kt)("p",{parentName:"li"},'"Object" form shall mean any form resulting from mechanical\ntransformation or translation of a Source form, including but\nnot limited to compiled object code, generated documentation,\nand conversions to other media types.'),(0,o.kt)("p",{parentName:"li"},'"Work" shall mean the work of authorship, whether in Source or\nObject form, made available under the License, as indicated by a\ncopyright notice that is included in or attached to the work\n(an example is provided in the Appendix below).'),(0,o.kt)("p",{parentName:"li"},'"Derivative Works" shall mean any work, whether in Source or Object\nform, that is based on (or derived from) the Work and for which the\neditorial revisions, annotations, elaborations, or other modifications\nrepresent, as a whole, an original work of authorship. For the purposes\nof this License, Derivative Works shall not include works that remain\nseparable from, or merely link (or bind by name) to the interfaces of,\nthe Work and Derivative Works thereof.'),(0,o.kt)("p",{parentName:"li"},'"Contribution" shall mean any work of authorship, including\nthe original version of the Work and any modifications or additions\nto that Work or Derivative Works thereof, that is intentionally\nsubmitted to Licensor for inclusion in the Work by the copyright owner\nor by an individual or Legal Entity authorized to submit on behalf of\nthe copyright owner. For the purposes of this definition, "submitted"\nmeans any form of electronic, verbal, or written communication sent\nto the Licensor or its representatives, including but not limited to\ncommunication on electronic mailing lists, source code control systems,\nand issue tracking systems that are managed by, or on behalf of, the\nLicensor for the purpose of discussing and improving the Work, but\nexcluding communication that is conspicuously marked or otherwise\ndesignated in writing by the copyright owner as "Not a Contribution."'),(0,o.kt)("p",{parentName:"li"},'"Contributor" shall mean Licensor and any individual or Legal Entity\non behalf of whom a Contribution has been received by Licensor and\nsubsequently incorporated within the Work.')),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Grant of Copyright License. Subject to the terms and conditions of\nthis License, each Contributor hereby grants to You a perpetual,\nworldwide, non-exclusive, no-charge, royalty-free, irrevocable\ncopyright license to reproduce, prepare Derivative Works of,\npublicly display, publicly perform, sublicense, and distribute the\nWork and such Derivative Works in Source or Object form.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Grant of Patent License. Subject to the terms and conditions of\nthis License, each Contributor hereby grants to You a perpetual,\nworldwide, non-exclusive, no-charge, royalty-free, irrevocable\n(except as stated in this section) patent license to make, have made,\nuse, offer to sell, sell, import, and otherwise transfer the Work,\nwhere such license applies only to those patent claims licensable\nby such Contributor that are necessarily infringed by their\nContribution(s) alone or by combination of their Contribution(s)\nwith the Work to which such Contribution(s) was submitted. If You\ninstitute patent litigation against any entity (including a\ncross-claim or counterclaim in a lawsuit) alleging that the Work\nor a Contribution incorporated within the Work constitutes direct\nor contributory patent infringement, then any patent licenses\ngranted to You under this License for that Work shall terminate\nas of the date such litigation is filed.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Redistribution. You may reproduce and distribute copies of the\nWork or Derivative Works thereof in any medium, with or without\nmodifications, and in Source or Object form, provided that You\nmeet the following conditions:"),(0,o.kt)("p",{parentName:"li"},"(a) You must give any other recipients of the Work or\nDerivative Works a copy of this License; and"),(0,o.kt)("p",{parentName:"li"},"(b) You must cause any modified files to carry prominent notices\nstating that You changed the files; and"),(0,o.kt)("p",{parentName:"li"},"(c) You must retain, in the Source form of any Derivative Works\nthat You distribute, all copyright, patent, trademark, and\nattribution notices from the Source form of the Work,\nexcluding those notices that do not pertain to any part of\nthe Derivative Works; and"),(0,o.kt)("p",{parentName:"li"},'(d) If the Work includes a "NOTICE" text file as part of its\ndistribution, then any Derivative Works that You distribute must\ninclude a readable copy of the attribution notices contained\nwithin such NOTICE file, excluding those notices that do not\npertain to any part of the Derivative Works, in at least one\nof the following places: within a NOTICE text file distributed\nas part of the Derivative Works; within the Source form or\ndocumentation, if provided along with the Derivative Works; or,\nwithin a display generated by the Derivative Works, if and\nwherever such third-party notices normally appear. The contents\nof the NOTICE file are for informational purposes only and\ndo not modify the License. You may add Your own attribution\nnotices within Derivative Works that You distribute, alongside\nor as an addendum to the NOTICE text from the Work, provided\nthat such additional attribution notices cannot be construed\nas modifying the License.'),(0,o.kt)("p",{parentName:"li"},"You may add Your own copyright statement to Your modifications and\nmay provide additional or different license terms and conditions\nfor use, reproduction, or distribution of Your modifications, or\nfor any such Derivative Works as a whole, provided Your use,\nreproduction, and distribution of the Work otherwise complies with\nthe conditions stated in this License.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Submission of Contributions. Unless You explicitly state otherwise,\nany Contribution intentionally submitted for inclusion in the Work\nby You to the Licensor shall be under the terms and conditions of\nthis License, without any additional terms or conditions.\nNotwithstanding the above, nothing herein shall supersede or modify\nthe terms of any separate license agreement you may have executed\nwith Licensor regarding such Contributions.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Trademarks. This License does not grant permission to use the trade\nnames, trademarks, service marks, or product names of the Licensor,\nexcept as required for reasonable and customary use in describing the\norigin of the Work and reproducing the content of the NOTICE file.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},'Disclaimer of Warranty. Unless required by applicable law or\nagreed to in writing, Licensor provides the Work (and each\nContributor provides its Contributions) on an "AS IS" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\nimplied, including, without limitation, any warranties or conditions\nof TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\nPARTICULAR PURPOSE. You are solely responsible for determining the\nappropriateness of using or redistributing the Work and assume any\nrisks associated with Your exercise of permissions under this License.')),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Limitation of Liability. In no event and under no legal theory,\nwhether in tort (including negligence), contract, or otherwise,\nunless required by applicable law (such as deliberate and grossly\nnegligent acts) or agreed to in writing, shall any Contributor be\nliable to You for damages, including any direct, indirect, special,\nincidental, or consequential damages of any character arising as a\nresult of this License or out of the use or inability to use the\nWork (including but not limited to damages for loss of goodwill,\nwork stoppage, computer failure or malfunction, or any and all\nother commercial damages or losses), even if such Contributor\nhas been advised of the possibility of such damages.")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Accepting Warranty or Additional Liability. While redistributing\nthe Work or Derivative Works thereof, You may choose to offer,\nand charge a fee for, acceptance of support, warranty, indemnity,\nor other liability obligations and/or rights consistent with this\nLicense. However, in accepting such obligations, You may act only\non Your own behalf and on Your sole responsibility, not on behalf\nof any other Contributor, and only if You agree to indemnify,\ndefend, and hold each Contributor harmless for any liability\nincurred by, or claims asserted against, such Contributor by reason\nof your accepting any such warranty or additional liability."),(0,o.kt)("p",{parentName:"li"},"END OF TERMS AND CONDITIONS"),(0,o.kt)("p",{parentName:"li"},"APPENDIX: How to apply the Apache License to your work."),(0,o.kt)("p",{parentName:"li"},' To apply the Apache License to your work, attach the following\nboilerplate notice, with the fields enclosed by brackets "[]"\nreplaced with your own identifying information. (Don\'t include\nthe brackets!) The text should be enclosed in the appropriate\ncomment syntax for the file format. We also recommend that a\nfile or class name and description of purpose be included on the\nsame "printed page" as the copyright notice for easier\nidentification within third-party archives.'),(0,o.kt)("p",{parentName:"li"},"Copyright ","[yyyy][name of copyright owner]"),(0,o.kt)("p",{parentName:"li"},'Licensed under the Apache License, Version 2.0 (the "License");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at'),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre"},"http://www.apache.org/licenses/LICENSE-2.0\n")),(0,o.kt)("p",{parentName:"li"},'Unless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an "AS IS" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.'))))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/main.e19c364c.js b/assets/js/main.e19c364c.js new file mode 100644 index 0000000..d6d6ebf --- /dev/null +++ b/assets/js/main.e19c364c.js @@ -0,0 +1,2 @@ +/*! For license information please see main.e19c364c.js.LICENSE.txt */ +(self.webpackChunkfastkafka=self.webpackChunkfastkafka||[]).push([[179],{830:(e,t,n)=>{"use strict";n.d(t,{W:()=>r});var a=n(7294);function r(){return a.createElement("svg",{width:"20",height:"20",className:"DocSearch-Search-Icon",viewBox:"0 0 20 20"},a.createElement("path",{d:"M14.386 14.386l4.0877 4.0877-4.0877-4.0877c-2.9418 2.9419-7.7115 2.9419-10.6533 0-2.9419-2.9418-2.9419-7.7115 0-10.6533 2.9418-2.9419 7.7115-2.9419 10.6533 0 2.9419 2.9418 2.9419 7.7115 0 10.6533z",stroke:"currentColor",fill:"none",fillRule:"evenodd",strokeLinecap:"round",strokeLinejoin:"round"}))}},723:(e,t,n)=>{"use strict";n.d(t,{Z:()=>p});var a=n(7294),r=n(7462),o=n(8356),i=n.n(o),s=n(6887);const l={"002d14fa":[()=>n.e(8064).then(n.bind(n,3469)),"@site/versioned_docs/version-0.6.0/guides/Guide_21_Produces_Basics.md",3469],"0030fd86":[()=>n.e(1887).then(n.t.bind(n,7619,19)),"~docs/default/version-0-5-0-metadata-prop-3bc.json",7619],"036db789":[()=>n.e(6590).then(n.bind(n,4271)),"@site/docs/guides/Guide_02_First_Steps.md",4271],"04d4af82":[()=>n.e(5775).then(n.bind(n,9607)),"@site/docs/api/fastkafka/testing/Tester.md",9607],"0582779b":[()=>n.e(953).then(n.bind(n,3667)),"@site/versioned_docs/version-0.8.0/guides/Guide_05_Lifespan_Handler.md",3667],"060147ec":[()=>n.e(7602).then(n.bind(n,1947)),"@site/docs/guides/Guide_24_Using_Multiple_Kafka_Clusters.md",1947],"065bbf18":[()=>n.e(1244).then(n.bind(n,7426)),"@site/docs/guides/Guide_33_Using_Tester_class_to_test_fastkafka.md",7426],"06acf88d":[()=>n.e(99).then(n.bind(n,4364)),"@site/versioned_docs/version-0.7.0/guides/Guide_03_Authentication.md",4364],"09cca5f2":[()=>n.e(29).then(n.bind(n,2452)),"@site/versioned_docs/version-0.6.0/api/fastkafka/KafkaEvent.md",2452],"0a79db1f":[()=>n.e(2732).then(n.bind(n,8131)),"@site/docs/api/fastkafka/encoder/avro_encoder.md",8131],"0d766b78":[()=>n.e(6492).then(n.bind(n,9585)),"@site/versioned_docs/version-0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters.md",9585],"0d927e9a":[()=>n.e(9652).then(n.bind(n,5155)),"@site/versioned_docs/version-0.8.0/LICENSE.md",5155],"0fb5d45b":[()=>n.e(3671).then(n.bind(n,4336)),"@site/docs/guides/Guide_05_Lifespan_Handler.md",4336],"0ff0556c":[()=>n.e(7624).then(n.bind(n,9139)),"@site/versioned_docs/version-0.7.0/api/fastkafka/testing/ApacheKafkaBroker.md",9139],"10df9fdc":[()=>n.e(7368).then(n.bind(n,4261)),"@site/docs/guides/Guide_11_Consumes_Basics.md",4261],"111ae602":[()=>n.e(4379).then(n.bind(n,5371)),"@site/versioned_docs/version-0.5.0/guides/Guide_22_Partition_Keys.md",5371],"1128ab4d":[()=>n.e(8952).then(n.bind(n,1487)),"@site/docs/api/fastkafka/encoder/AvroBase.md",1487],"1187a271":[()=>n.e(5198).then(n.bind(n,8021)),"@site/versioned_docs/version-0.8.0/guides/Guide_23_Batch_Producing.md",8021],"11c86bb5":[()=>n.e(1612).then(n.bind(n,3753)),"@site/versioned_docs/version-0.7.1/api/fastkafka/encoder/AvroBase.md",3753],"1244450e":[()=>n.e(5805).then(n.bind(n,8891)),"@site/versioned_docs/version-0.7.0/cli/fastkafka.md",8891],"13bdfbad":[()=>n.e(5896).then(n.bind(n,4784)),"@site/versioned_docs/version-0.8.0/guides/Guide_04_Github_Actions_Workflow.md",4784],"14056c2c":[()=>n.e(6133).then(n.bind(n,9218)),"@site/versioned_docs/version-0.7.0/guides/Guide_01_Intro.md",9218],"14111b0c":[()=>n.e(1856).then(n.bind(n,1462)),"@site/docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application.md",1462],"14f7f42b":[()=>n.e(5547).then(n.bind(n,8869)),"@site/versioned_docs/version-0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",8869],"15aa5f44":[()=>n.e(3051).then(n.bind(n,9966)),"@site/versioned_docs/version-0.7.1/cli/run_fastkafka_server_process.md",9966],"15f1310d":[()=>n.e(5997).then(n.bind(n,9692)),"@site/versioned_docs/version-0.7.0/index.md",9692],"1674a630":[()=>n.e(5252).then(n.bind(n,1575)),"@site/versioned_docs/version-0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",1575],"16e87abe":[()=>n.e(7132).then(n.bind(n,3631)),"@site/versioned_docs/version-0.8.0/api/fastkafka/executors/DynamicTaskExecutor.md",3631],17896441:[()=>Promise.all([n.e(532),n.e(7918)]).then(n.bind(n,8246)),"@theme/DocItem",8246],"1957b43a":[()=>n.e(5684).then(n.bind(n,6494)),"@site/versioned_docs/version-0.7.0/CHANGELOG.md",6494],"196c63a7":[()=>n.e(4404).then(n.bind(n,6869)),"@site/versioned_docs/version-0.8.0/api/fastkafka/encoder/json_encoder.md",6869],"1a4e3797":[()=>Promise.all([n.e(532),n.e(7920)]).then(n.bind(n,6675)),"@theme/SearchPage",6675],"1be78505":[()=>Promise.all([n.e(532),n.e(9514)]).then(n.bind(n,9963)),"@theme/DocPage",9963],"1d4d4e46":[()=>n.e(5623).then(n.bind(n,7868)),"@site/versioned_docs/version-0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application.md",7868],"1efdbea1":[()=>n.e(8914).then(n.bind(n,6129)),"@site/versioned_docs/version-0.6.0/LICENSE.md",6129],"1f0a946f":[()=>n.e(3927).then(n.bind(n,8599)),"@site/versioned_docs/version-0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application.md",8599],"1f1765ab":[()=>n.e(3645).then(n.bind(n,6409)),"@site/versioned_docs/version-0.7.0/api/fastkafka/encoder/json_encoder.md",6409],"205a719b":[()=>n.e(5414).then(n.bind(n,593)),"@site/versioned_docs/version-0.7.1/api/fastkafka/testing/LocalRedpandaBroker.md",593],"20f8c1fd":[()=>n.e(8215).then(n.bind(n,7918)),"@site/versioned_docs/version-0.7.1/api/fastkafka/KafkaEvent.md",7918],"222e7c49":[()=>n.e(1733).then(n.bind(n,71)),"@site/versioned_docs/version-0.5.0/guides/Guide_00_FastKafka_Demo.md",71],"232ab88c":[()=>n.e(7100).then(n.bind(n,3097)),"@site/versioned_docs/version-0.8.0/guides/Guide_02_First_Steps.md",3097],"23c607c1":[()=>n.e(9840).then(n.bind(n,1025)),"@site/versioned_docs/version-0.7.0/cli/run_fastkafka_server_process.md",1025],"243cddb9":[()=>n.e(1294).then(n.bind(n,5043)),"@site/versioned_docs/version-0.8.0/api/fastkafka/testing/LocalRedpandaBroker.md",5043],"2622e95a":[()=>n.e(5340).then(n.bind(n,7743)),"@site/versioned_docs/version-0.8.0/api/fastkafka/testing/ApacheKafkaBroker.md",7743],29105828:[()=>n.e(9050).then(n.bind(n,8367)),"@site/versioned_docs/version-0.7.0/guides/Guide_04_Github_Actions_Workflow.md",8367],"2ae68e65":[()=>n.e(88).then(n.bind(n,7193)),"@site/versioned_docs/version-0.5.0/api/fastkafka/FastKafka.md",7193],"2afa602b":[()=>n.e(8653).then(n.bind(n,4721)),"@site/versioned_docs/version-0.5.0/api/fastkafka/KafkaEvent.md",4721],"2b2faa0a":[()=>n.e(5050).then(n.bind(n,2490)),"@site/versioned_docs/version-0.6.0/api/fastkafka/encoder/avro_decoder.md",2490],"2bc15a09":[()=>n.e(8908).then(n.bind(n,8690)),"@site/versioned_docs/version-0.7.1/guides/Guide_22_Partition_Keys.md",8690],"2c797d78":[()=>n.e(7011).then(n.bind(n,6128)),"@site/versioned_docs/version-0.6.0/guides/Guide_03_Authentication.md",6128],"2e96a196":[()=>n.e(3623).then(n.bind(n,5413)),"@site/versioned_docs/version-0.8.0/api/fastkafka/executors/SequentialExecutor.md",5413],"2fe15297":[()=>n.e(2111).then(n.t.bind(n,1388,19)),"~docs/default/version-0-7-0-metadata-prop-303.json",1388],"3087bb2d":[()=>n.e(6308).then(n.bind(n,7247)),"@site/versioned_docs/version-0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",7247],"35d7f647":[()=>n.e(8775).then(n.bind(n,4035)),"@site/versioned_docs/version-0.6.0/guides/Guide_23_Batch_Producing.md",4035],"381a15bc":[()=>n.e(7886).then(n.bind(n,4388)),"@site/versioned_docs/version-0.7.1/cli/fastkafka.md",4388],"38a44003":[()=>n.e(8674).then(n.bind(n,4328)),"@site/versioned_docs/version-0.6.0/cli/fastkafka.md",4328],"40415b6c":[()=>n.e(7058).then(n.bind(n,3100)),"@site/docs/guides/Guide_03_Authentication.md",3100],"409b7aa0":[()=>n.e(6623).then(n.bind(n,1359)),"@site/versioned_docs/version-0.5.0/guides/Guide_04_Github_Actions_Workflow.md",1359],"414d4a37":[()=>n.e(7473).then(n.bind(n,5954)),"@site/versioned_docs/version-0.5.0/guides/Guide_11_Consumes_Basics.md",5954],"456c5d82":[()=>n.e(1939).then(n.bind(n,6153)),"@site/versioned_docs/version-0.7.0/guides/Guide_12_Batch_Consuming.md",6153],"46d2add0":[()=>n.e(1674).then(n.bind(n,6655)),"@site/versioned_docs/version-0.6.0/api/fastkafka/EventMetadata.md",6655],"478692f7":[()=>n.e(7562).then(n.bind(n,7116)),"@site/versioned_docs/version-0.8.0/api/fastkafka/encoder/json_decoder.md",7116],"47ac2e75":[()=>n.e(3684).then(n.bind(n,5232)),"@site/versioned_docs/version-0.5.0/guides/Guide_21_Produces_Basics.md",5232],48199270:[()=>n.e(5041).then(n.bind(n,1730)),"@site/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",1730],"4a00fd3a":[()=>n.e(4559).then(n.bind(n,4835)),"@site/versioned_docs/version-0.8.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",4835],"4a2f1dfa":[()=>n.e(383).then(n.bind(n,4331)),"@site/versioned_docs/version-0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",4331],"4a9e4762":[()=>n.e(1377).then(n.bind(n,5809)),"@site/versioned_docs/version-0.8.0/index.md",5809],"4ace981f":[()=>n.e(6791).then(n.bind(n,4071)),"@site/versioned_docs/version-0.6.0/index.md",4071],"4c4d6ef6":[()=>n.e(2706).then(n.bind(n,2063)),"@site/versioned_docs/version-0.7.1/guides/Guide_21_Produces_Basics.md",2063],"4d11873e":[()=>n.e(904).then(n.bind(n,5732)),"@site/versioned_docs/version-0.7.0/api/fastkafka/FastKafka.md",5732],"4d517c40":[()=>n.e(1195).then(n.bind(n,728)),"@site/docs/api/fastkafka/FastKafka.md",728],"4e5074e6":[()=>n.e(1312).then(n.bind(n,5681)),"@site/versioned_docs/version-0.5.0/api/fastkafka/testing/LocalRedpandaBroker.md",5681],"4f8e8160":[()=>n.e(7683).then(n.bind(n,1337)),"@site/versioned_docs/version-0.6.0/api/fastkafka/testing/ApacheKafkaBroker.md",1337],"514a13f6":[()=>n.e(3800).then(n.bind(n,9685)),"@site/docs/api/fastkafka/testing/ApacheKafkaBroker.md",9685],"516ebbd1":[()=>n.e(4095).then(n.bind(n,5048)),"@site/docs/guides/Guide_01_Intro.md",5048],"5300e879":[()=>n.e(7600).then(n.bind(n,1171)),"@site/versioned_docs/version-0.8.0/guides/Guide_11_Consumes_Basics.md",1171],"5347168a":[()=>n.e(424).then(n.bind(n,4540)),"@site/docs/api/fastkafka/executors/DynamicTaskExecutor.md",4540],"5527e5b7":[()=>n.e(3114).then(n.bind(n,8979)),"@site/versioned_docs/version-0.8.0/api/fastkafka/EventMetadata.md",8979],"5534c352":[()=>n.e(7229).then(n.bind(n,885)),"@site/versioned_docs/version-0.6.0/CHANGELOG.md",885],"5584c47d":[()=>n.e(8119).then(n.bind(n,1164)),"@site/versioned_docs/version-0.7.0/api/fastkafka/encoder/avro_decoder.md",1164],"58b4829f":[()=>n.e(721).then(n.bind(n,2693)),"@site/versioned_docs/version-0.8.0/guides/Guide_00_FastKafka_Demo.md",2693],"58f10d9f":[()=>n.e(2493).then(n.t.bind(n,9005,19)),"~docs/default/version-0-6-0-metadata-prop-089.json",9005],"5a11a8c6":[()=>n.e(5746).then(n.bind(n,4321)),"@site/versioned_docs/version-0.7.0/api/fastkafka/executors/SequentialExecutor.md",4321],"5cf0f698":[()=>n.e(4884).then(n.bind(n,2950)),"@site/versioned_docs/version-0.6.0/guides/Guide_22_Partition_Keys.md",2950],"5e9f5e1a":[()=>Promise.resolve().then(n.bind(n,6809)),"@generated/docusaurus.config",6809],"61386b8d":[()=>n.e(5375).then(n.bind(n,7530)),"@site/versioned_docs/version-0.5.0/api/fastkafka/testing/ApacheKafkaBroker.md",7530],"62ff7ec9":[()=>n.e(5625).then(n.bind(n,5233)),"@site/versioned_docs/version-0.6.0/guides/Guide_05_Lifespan_Handler.md",5233],"647303d6":[()=>n.e(554).then(n.bind(n,8292)),"@site/versioned_docs/version-0.7.1/guides/Guide_05_Lifespan_Handler.md",8292],"65ab9689":[()=>n.e(7786).then(n.bind(n,4744)),"@site/versioned_docs/version-0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",4744],"68c835af":[()=>n.e(2e3).then(n.bind(n,3649)),"@site/versioned_docs/version-0.7.0/guides/Guide_21_Produces_Basics.md",3649],"68d54528":[()=>n.e(7881).then(n.bind(n,9231)),"@site/versioned_docs/version-0.7.0/guides/Guide_11_Consumes_Basics.md",9231],"69a9729f":[()=>n.e(1707).then(n.bind(n,1506)),"@site/docs/api/fastkafka/encoder/json_encoder.md",1506],"6af17b1d":[()=>n.e(5045).then(n.bind(n,5276)),"@site/versioned_docs/version-0.7.0/CONTRIBUTING.md",5276],"6b76d411":[()=>n.e(7083).then(n.bind(n,3613)),"@site/versioned_docs/version-0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",3613],"6c174e6d":[()=>n.e(9511).then(n.bind(n,2174)),"@site/versioned_docs/version-0.6.0/api/fastkafka/encoder/avro_encoder.md",2174],"6c450cd6":[()=>n.e(7750).then(n.bind(n,5931)),"@site/versioned_docs/version-0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",5931],"6cafb666":[()=>n.e(7883).then(n.bind(n,9373)),"@site/versioned_docs/version-0.7.1/api/fastkafka/encoder/avsc_to_pydantic.md",9373],"6d9c0b04":[()=>n.e(3042).then(n.bind(n,6788)),"@site/versioned_docs/version-0.7.0/api/fastkafka/encoder/avsc_to_pydantic.md",6788],"6dbdf8e8":[()=>n.e(1998).then(n.bind(n,3497)),"@site/versioned_docs/version-0.7.1/api/fastkafka/executors/DynamicTaskExecutor.md",3497],"6e7b1bc6":[()=>n.e(4413).then(n.bind(n,9827)),"@site/versioned_docs/version-0.6.0/api/fastkafka/encoder/AvroBase.md",9827],"7107eb83":[()=>n.e(4457).then(n.bind(n,6252)),"@site/docs/guides/Guide_22_Partition_Keys.md",6252],"7245ce96":[()=>n.e(92).then(n.bind(n,3004)),"@site/versioned_docs/version-0.7.1/api/fastkafka/FastKafka.md",3004],"74e1ba0d":[()=>n.e(5845).then(n.bind(n,9017)),"@site/versioned_docs/version-0.7.0/guides/Guide_06_Benchmarking_FastKafka.md",9017],"75af10bd":[()=>n.e(1783).then(n.bind(n,6940)),"@site/versioned_docs/version-0.8.0/guides/Guide_21_Produces_Basics.md",6940],"7ae5d564":[()=>n.e(5012).then(n.bind(n,399)),"@site/versioned_docs/version-0.7.0/guides/Guide_00_FastKafka_Demo.md",399],"7b4381d3":[()=>n.e(3696).then(n.bind(n,3658)),"@site/docs/guides/Guide_04_Github_Actions_Workflow.md",3658],"7b589963":[()=>n.e(5628).then(n.bind(n,1295)),"@site/versioned_docs/version-0.6.0/api/fastkafka/testing/LocalRedpandaBroker.md",1295],"80f42d74":[()=>n.e(6527).then(n.bind(n,4725)),"@site/versioned_docs/version-0.8.0/api/fastkafka/encoder/avro_decoder.md",4725],"81b6783d":[()=>n.e(8888).then(n.bind(n,7622)),"@site/versioned_docs/version-0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",7622],"81bf77fc":[()=>n.e(1358).then(n.bind(n,8524)),"@site/versioned_docs/version-0.5.0/api/fastkafka/encoder/avsc_to_pydantic.md",8524],"83ec613f":[()=>n.e(4098).then(n.bind(n,1236)),"@site/docs/cli/fastkafka.md",1236],"847c12c2":[()=>n.e(1790).then(n.bind(n,1155)),"@site/versioned_docs/version-0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",1155],"87b29f85":[()=>n.e(8110).then(n.bind(n,9253)),"@site/versioned_docs/version-0.8.0/cli/run_fastkafka_server_process.md",9253],"87f59f37":[()=>n.e(733).then(n.bind(n,3419)),"@site/docs/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",3419],"8804eadc":[()=>n.e(9498).then(n.bind(n,5131)),"@site/versioned_docs/version-0.7.1/guides/Guide_11_Consumes_Basics.md",5131],"898ba646":[()=>n.e(8279).then(n.bind(n,9502)),"@site/docs/api/fastkafka/encoder/avro_decoder.md",9502],"8ad68633":[()=>n.e(2399).then(n.bind(n,1606)),"@site/docs/LICENSE.md",1606],"8c27608b":[()=>n.e(8851).then(n.bind(n,1457)),"@site/versioned_docs/version-0.8.0/guides/Guide_06_Benchmarking_FastKafka.md",1457],"8d193b98":[()=>n.e(7505).then(n.bind(n,676)),"@site/docs/CONTRIBUTING.md",676],"8ff5d7ba":[()=>n.e(7795).then(n.bind(n,1082)),"@site/versioned_docs/version-0.8.0/CHANGELOG.md",1082],"935f2afb":[()=>n.e(53).then(n.t.bind(n,1109,19)),"~docs/default/version-current-metadata-prop-751.json",1109],"9440fd12":[()=>n.e(1604).then(n.bind(n,2218)),"@site/docs/guides/Guide_00_FastKafka_Demo.md",2218],"94d2eef0":[()=>n.e(9723).then(n.bind(n,4855)),"@site/versioned_docs/version-0.7.1/CHANGELOG.md",4855],"97a352ae":[()=>n.e(5439).then(n.bind(n,2522)),"@site/versioned_docs/version-0.7.1/CONTRIBUTING.md",2522],"980c25d7":[()=>Promise.all([n.e(532),n.e(2473)]).then(n.bind(n,8685)),"@site/src/pages/demo/index.js",8685],"982d0b04":[()=>n.e(8468).then(n.bind(n,3331)),"@site/docs/api/fastkafka/encoder/avsc_to_pydantic.md",3331],"9980ea0e":[()=>n.e(1506).then(n.bind(n,1339)),"@site/versioned_docs/version-0.7.1/guides/Guide_04_Github_Actions_Workflow.md",1339],"99912bf6":[()=>n.e(9469).then(n.bind(n,9036)),"@site/versioned_docs/version-0.6.0/guides/Guide_04_Github_Actions_Workflow.md",9036],"99bfca7e":[()=>n.e(4172).then(n.bind(n,129)),"@site/docs/api/fastkafka/encoder/json_decoder.md",129],"99d969f2":[()=>n.e(6005).then(n.bind(n,9515)),"@site/versioned_docs/version-0.8.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application.md",9515],"9af63d42":[()=>n.e(3814).then(n.bind(n,709)),"@site/versioned_docs/version-0.7.1/guides/Guide_23_Batch_Producing.md",709],"9defa5b7":[()=>n.e(5955).then(n.bind(n,993)),"@site/versioned_docs/version-0.7.1/guides/Guide_01_Intro.md",993],"9fc8d1d9":[()=>n.e(1673).then(n.bind(n,4941)),"@site/versioned_docs/version-0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters.md",4941],"9fda8563":[()=>n.e(7055).then(n.bind(n,7279)),"@site/versioned_docs/version-0.6.0/api/fastkafka/executors/DynamicTaskExecutor.md",7279],a03cde8f:[()=>n.e(4789).then(n.bind(n,7255)),"@site/versioned_docs/version-0.5.0/guides/Guide_01_Intro.md",7255],a07fb1cb:[()=>n.e(5430).then(n.bind(n,8510)),"@site/versioned_docs/version-0.6.0/api/fastkafka/encoder/avsc_to_pydantic.md",8510],a17dbf83:[()=>n.e(162).then(n.bind(n,9044)),"@site/versioned_docs/version-0.7.0/api/fastkafka/executors/DynamicTaskExecutor.md",9044],a34ed3b2:[()=>n.e(2777).then(n.bind(n,3749)),"@site/versioned_docs/version-0.8.0/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",3749],a4055066:[()=>n.e(4018).then(n.bind(n,9328)),"@site/versioned_docs/version-0.7.1/api/fastkafka/testing/ApacheKafkaBroker.md",9328],a4cbee7f:[()=>n.e(5144).then(n.bind(n,501)),"@site/versioned_docs/version-0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",501],a5b090b0:[()=>n.e(4029).then(n.bind(n,8298)),"@site/versioned_docs/version-0.8.0/api/fastkafka/encoder/avsc_to_pydantic.md",8298],a624bde7:[()=>n.e(8823).then(n.bind(n,1882)),"@site/versioned_docs/version-0.8.0/guides/Guide_01_Intro.md",1882],a686ca68:[()=>n.e(7639).then(n.bind(n,58)),"@site/versioned_docs/version-0.6.0/guides/Guide_01_Intro.md",58],a6c229c0:[()=>n.e(7773).then(n.bind(n,4563)),"@site/versioned_docs/version-0.8.0/api/fastkafka/FastKafka.md",4563],a7914a5c:[()=>n.e(4842).then(n.bind(n,2804)),"@site/versioned_docs/version-0.7.1/index.md",2804],a80d168f:[()=>n.e(3725).then(n.bind(n,760)),"@site/versioned_docs/version-0.7.1/guides/Guide_00_FastKafka_Demo.md",760],a9ab9f8f:[()=>n.e(3979).then(n.bind(n,1034)),"@site/versioned_docs/version-0.8.0/guides/Guide_12_Batch_Consuming.md",1034],aa946361:[()=>n.e(5171).then(n.bind(n,5239)),"@site/versioned_docs/version-0.7.0/guides/Guide_23_Batch_Producing.md",5239],aacd1d40:[()=>n.e(5394).then(n.t.bind(n,5745,19)),"/home/runner/work/fastkafka/fastkafka/docusaurus/.docusaurus/docusaurus-plugin-content-pages/default/plugin-route-context-module-100.json",5745],ac02e102:[()=>n.e(9942).then(n.bind(n,3768)),"@site/versioned_docs/version-0.6.0/guides/Guide_00_FastKafka_Demo.md",3768],ae1efb81:[()=>n.e(309).then(n.bind(n,2026)),"@site/versioned_docs/version-0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka.md",2026],b1b6a961:[()=>n.e(4168).then(n.bind(n,7251)),"@site/versioned_docs/version-0.5.0/guides/Guide_05_Lifespan_Handler.md",7251],b24805c2:[()=>n.e(1616).then(n.bind(n,5504)),"@site/versioned_docs/version-0.5.0/CHANGELOG.md",5504],b638c32b:[()=>n.e(4497).then(n.bind(n,4962)),"@site/versioned_docs/version-0.5.0/guides/Guide_03_Authentication.md",4962],b70bee8d:[()=>n.e(608).then(n.bind(n,3428)),"@site/docs/api/fastkafka/EventMetadata.md",3428],b7f60777:[()=>n.e(3679).then(n.bind(n,6066)),"@site/versioned_docs/version-0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",6066],b91921d6:[()=>n.e(8270).then(n.bind(n,3960)),"@site/versioned_docs/version-0.7.0/api/fastkafka/KafkaEvent.md",3960],b9d0db8e:[()=>n.e(9170).then(n.bind(n,9743)),"@site/versioned_docs/version-0.8.0/api/fastkafka/encoder/avro_encoder.md",9743],ba3b9f5c:[()=>n.e(4358).then(n.bind(n,9462)),"@site/versioned_docs/version-0.7.0/api/fastkafka/encoder/avro_encoder.md",9462],ba9d536d:[()=>n.e(1384).then(n.bind(n,2435)),"@site/versioned_docs/version-0.8.0/guides/Guide_03_Authentication.md",2435],be529d37:[()=>n.e(3033).then(n.bind(n,6524)),"@site/versioned_docs/version-0.7.1/api/fastkafka/testing/Tester.md",6524],beaba6c2:[()=>n.e(3023).then(n.bind(n,1566)),"@site/versioned_docs/version-0.6.0/CONTRIBUTING.md",1566],bfac6a8d:[()=>n.e(6064).then(n.bind(n,7227)),"@site/versioned_docs/version-0.5.0/guides/Guide_02_First_Steps.md",7227],c0e3ff8b:[()=>n.e(2368).then(n.bind(n,567)),"@site/versioned_docs/version-0.8.0/api/fastkafka/encoder/AvroBase.md",567],c16f65ec:[()=>n.e(2798).then(n.bind(n,9942)),"@site/versioned_docs/version-0.7.1/guides/Guide_06_Benchmarking_FastKafka.md",9942],c192c597:[()=>n.e(1984).then(n.bind(n,6649)),"@site/versioned_docs/version-0.8.0/CONTRIBUTING.md",6649],c248ee7e:[()=>n.e(8927).then(n.bind(n,7872)),"@site/versioned_docs/version-0.8.0/api/fastkafka/testing/Tester.md",7872],c377a04b:[()=>n.e(6971).then(n.bind(n,1269)),"@site/docs/index.md",1269],c3d488fa:[()=>n.e(3196).then(n.bind(n,4617)),"@site/versioned_docs/version-0.8.0/guides/Guide_31_Using_redpanda_to_test_fastkafka.md",4617],c4a14462:[()=>n.e(1159).then(n.bind(n,4159)),"@site/versioned_docs/version-0.7.0/api/fastkafka/EventMetadata.md",4159],c4f5d8e4:[()=>Promise.all([n.e(532),n.e(1030),n.e(4195)]).then(n.bind(n,9767)),"@site/src/pages/index.js",9767],c602cd44:[()=>n.e(6862).then(n.bind(n,9216)),"@site/versioned_docs/version-0.6.0/api/fastkafka/encoder/json_encoder.md",9216],c95b781b:[()=>n.e(9810).then(n.t.bind(n,6280,19)),"~docs/default/version-0-8-0-metadata-prop-466.json",6280],c9eeccbf:[()=>n.e(3747).then(n.bind(n,9709)),"@site/versioned_docs/version-0.7.1/guides/Guide_12_Batch_Consuming.md",9709],ca2bf8a3:[()=>n.e(6704).then(n.t.bind(n,3769,19)),"/home/runner/work/fastkafka/fastkafka/docusaurus/.docusaurus/docusaurus-plugin-content-docs/default/plugin-route-context-module-100.json",3769],ca36df4d:[()=>n.e(917).then(n.bind(n,2197)),"@site/docs/CHANGELOG.md",2197],cac45e38:[()=>n.e(3111).then(n.bind(n,3570)),"@site/versioned_docs/version-0.5.0/cli/run_fastkafka_server_process.md",3570],cd19d898:[()=>n.e(8565).then(n.bind(n,2386)),"@site/versioned_docs/version-0.5.0/cli/fastkafka.md",2386],cd59f9ef:[()=>n.e(1753).then(n.bind(n,7411)),"@site/versioned_docs/version-0.6.0/api/fastkafka/testing/Tester.md",7411],d0381ee6:[()=>n.e(8120).then(n.bind(n,3347)),"@site/versioned_docs/version-0.8.0/guides/Guide_24_Using_Multiple_Kafka_Clusters.md",3347],d2282d9e:[()=>n.e(4779).then(n.bind(n,9985)),"@site/versioned_docs/version-0.7.0/api/fastkafka/encoder/json_decoder.md",9985],d241d4ef:[()=>n.e(5339).then(n.bind(n,232)),"@site/versioned_docs/version-0.6.0/api/fastkafka/executors/SequentialExecutor.md",232],d2af0b95:[()=>n.e(1267).then(n.bind(n,1036)),"@site/versioned_docs/version-0.7.0/guides/Guide_02_First_Steps.md",1036],d2b827bd:[()=>n.e(4874).then(n.bind(n,5407)),"@site/versioned_docs/version-0.7.0/LICENSE.md",5407],d35204c3:[()=>n.e(604).then(n.bind(n,2560)),"@site/versioned_docs/version-0.7.1/guides/Guide_03_Authentication.md",2560],d40fb48f:[()=>n.e(2049).then(n.bind(n,1885)),"@site/docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka.md",1885],d67a4111:[()=>n.e(8308).then(n.bind(n,279)),"@site/versioned_docs/version-0.7.0/guides/Guide_05_Lifespan_Handler.md",279],d73efefc:[()=>n.e(2353).then(n.bind(n,3130)),"@site/versioned_docs/version-0.7.0/api/fastkafka/testing/LocalRedpandaBroker.md",3130],d7dfec52:[()=>n.e(8457).then(n.bind(n,6322)),"@site/docs/cli/run_fastkafka_server_process.md",6322],d87f7f29:[()=>n.e(9069).then(n.bind(n,6927)),"@site/docs/guides/Guide_21_Produces_Basics.md",6927],d9bd3427:[()=>n.e(8796).then(n.bind(n,7372)),"@site/versioned_docs/version-0.8.0/guides/Guide_22_Partition_Keys.md",7372],d9ce81b2:[()=>n.e(6803).then(n.bind(n,9681)),"@site/versioned_docs/version-0.5.0/api/fastkafka/testing/Tester.md",9681],dbc0f590:[()=>n.e(5412).then(n.bind(n,8099)),"@site/versioned_docs/version-0.6.0/cli/run_fastkafka_server_process.md",8099],dc75700c:[()=>n.e(4886).then(n.bind(n,6780)),"@site/versioned_docs/version-0.8.0/cli/fastkafka.md",6780],dde1ff6e:[()=>n.e(6993).then(n.t.bind(n,7085,19)),"/home/runner/work/fastkafka/fastkafka/docusaurus/.docusaurus/docusaurus-theme-search-algolia/default/plugin-route-context-module-100.json",7085],de2621c2:[()=>n.e(298).then(n.bind(n,7082)),"@site/docs/api/fastkafka/executors/SequentialExecutor.md",7082],e109b3ff:[()=>n.e(4039).then(n.bind(n,231)),"@site/versioned_docs/version-0.6.0/guides/Guide_02_First_Steps.md",231],e1584d63:[()=>n.e(8945).then(n.bind(n,680)),"@site/docs/guides/Guide_12_Batch_Consuming.md",680],e323208f:[()=>n.e(9777).then(n.bind(n,3388)),"@site/docs/api/fastkafka/testing/LocalRedpandaBroker.md",3388],e333f535:[()=>n.e(3776).then(n.bind(n,5783)),"@site/docs/api/fastkafka/KafkaEvent.md",5783],e4d0ad4d:[()=>n.e(1122).then(n.t.bind(n,40,19)),"~docs/default/version-0-7-1-metadata-prop-4e7.json",40],e56c502c:[()=>n.e(1202).then(n.bind(n,8988)),"@site/versioned_docs/version-0.7.0/api/fastkafka/encoder/AvroBase.md",8988],e6eb5527:[()=>n.e(6964).then(n.bind(n,9190)),"@site/versioned_docs/version-0.7.1/api/fastkafka/encoder/json_decoder.md",9190],e7ab2684:[()=>n.e(6147).then(n.bind(n,1802)),"@site/versioned_docs/version-0.6.0/api/fastkafka/FastKafka.md",1802],e8ae88bc:[()=>n.e(920).then(n.bind(n,4854)),"@site/versioned_docs/version-0.6.0/guides/Guide_06_Benchmarking_FastKafka.md",4854],e968e69e:[()=>n.e(3374).then(n.bind(n,1849)),"@site/docs/guides/Guide_06_Benchmarking_FastKafka.md",1849],e97b3564:[()=>n.e(9724).then(n.bind(n,6232)),"@site/versioned_docs/version-0.7.1/api/fastkafka/encoder/avro_decoder.md",6232],ebc40d40:[()=>n.e(984).then(n.bind(n,4118)),"@site/versioned_docs/version-0.7.1/api/fastkafka/executors/SequentialExecutor.md",4118],ee2e0a62:[()=>n.e(836).then(n.bind(n,1950)),"@site/versioned_docs/version-0.7.1/guides/Guide_02_First_Steps.md",1950],f2954f34:[()=>n.e(7408).then(n.bind(n,541)),"@site/versioned_docs/version-0.5.0/index.md",541],f2aaa4e5:[()=>n.e(8861).then(n.bind(n,3123)),"@site/versioned_docs/version-0.8.0/api/fastkafka/KafkaEvent.md",3123],f35e2aba:[()=>n.e(3690).then(n.bind(n,4128)),"@site/versioned_docs/version-0.7.1/api/fastkafka/encoder/avro_encoder.md",4128],f39642a1:[()=>n.e(9107).then(n.bind(n,795)),"@site/versioned_docs/version-0.6.0/api/fastkafka/encoder/json_decoder.md",795],f7e229b3:[()=>n.e(3331).then(n.bind(n,4494)),"@site/versioned_docs/version-0.6.0/guides/Guide_11_Consumes_Basics.md",4494],f8edae29:[()=>n.e(7710).then(n.bind(n,4586)),"@site/docs/guides/Guide_23_Batch_Producing.md",4586],fb969bb3:[()=>n.e(9571).then(n.bind(n,3911)),"@site/versioned_docs/version-0.7.0/guides/Guide_22_Partition_Keys.md",3911],fc8a86b2:[()=>n.e(8197).then(n.bind(n,333)),"@site/versioned_docs/version-0.5.0/guides/Guide_06_Benchmarking_FastKafka.md",333],fd2e624b:[()=>n.e(9851).then(n.bind(n,9421)),"@site/versioned_docs/version-0.7.0/api/fastkafka/testing/Tester.md",9421],fdc5233c:[()=>n.e(9794).then(n.bind(n,3444)),"@site/versioned_docs/version-0.7.1/api/fastkafka/EventMetadata.md",3444],fe73cc84:[()=>n.e(3251).then(n.bind(n,604)),"@site/versioned_docs/version-0.7.1/api/fastkafka/encoder/json_encoder.md",604],fff0a46d:[()=>n.e(4641).then(n.bind(n,7129)),"@site/versioned_docs/version-0.7.1/LICENSE.md",7129]};function c(e){let{error:t,retry:n,pastDelay:r}=e;return t?a.createElement("div",{style:{textAlign:"center",color:"#fff",backgroundColor:"#fa383e",borderColor:"#fa383e",borderStyle:"solid",borderRadius:"0.25rem",borderWidth:"1px",boxSizing:"border-box",display:"block",padding:"1rem",flex:"0 0 50%",marginLeft:"25%",marginRight:"25%",marginTop:"5rem",maxWidth:"50%",width:"100%"}},a.createElement("p",null,String(t)),a.createElement("div",null,a.createElement("button",{type:"button",onClick:n},"Retry"))):r?a.createElement("div",{style:{display:"flex",justifyContent:"center",alignItems:"center",height:"100vh"}},a.createElement("svg",{id:"loader",style:{width:128,height:110,position:"absolute",top:"calc(100vh - 64%)"},viewBox:"0 0 45 45",xmlns:"http://www.w3.org/2000/svg",stroke:"#61dafb"},a.createElement("g",{fill:"none",fillRule:"evenodd",transform:"translate(1 1)",strokeWidth:"2"},a.createElement("circle",{cx:"22",cy:"22",r:"6",strokeOpacity:"0"},a.createElement("animate",{attributeName:"r",begin:"1.5s",dur:"3s",values:"6;22",calcMode:"linear",repeatCount:"indefinite"}),a.createElement("animate",{attributeName:"stroke-opacity",begin:"1.5s",dur:"3s",values:"1;0",calcMode:"linear",repeatCount:"indefinite"}),a.createElement("animate",{attributeName:"stroke-width",begin:"1.5s",dur:"3s",values:"2;0",calcMode:"linear",repeatCount:"indefinite"})),a.createElement("circle",{cx:"22",cy:"22",r:"6",strokeOpacity:"0"},a.createElement("animate",{attributeName:"r",begin:"3s",dur:"3s",values:"6;22",calcMode:"linear",repeatCount:"indefinite"}),a.createElement("animate",{attributeName:"stroke-opacity",begin:"3s",dur:"3s",values:"1;0",calcMode:"linear",repeatCount:"indefinite"}),a.createElement("animate",{attributeName:"stroke-width",begin:"3s",dur:"3s",values:"2;0",calcMode:"linear",repeatCount:"indefinite"})),a.createElement("circle",{cx:"22",cy:"22",r:"8"},a.createElement("animate",{attributeName:"r",begin:"0s",dur:"1.5s",values:"6;1;2;3;4;5;6",calcMode:"linear",repeatCount:"indefinite"}))))):null}var u=n(9670),d=n(226);function f(e,t){if("*"===e)return i()({loading:c,loader:()=>n.e(4972).then(n.bind(n,4972)),modules:["@theme/NotFound"],webpack:()=>[4972],render(e,t){const n=e.default;return a.createElement(d.z,{value:{plugin:{name:"native",id:"default"}}},a.createElement(n,t))}});const o=s[`${e}-${t}`],f={},p=[],m=[],h=(0,u.Z)(o);return Object.entries(h).forEach((e=>{let[t,n]=e;const a=l[n];a&&(f[t]=a[0],p.push(a[1]),m.push(a[2]))})),i().Map({loading:c,loader:f,modules:p,webpack:()=>m,render(t,n){const i=JSON.parse(JSON.stringify(o));Object.entries(t).forEach((t=>{let[n,a]=t;const r=a.default;if(!r)throw new Error(`The page component at ${e} doesn't have a default export. This makes it impossible to render anything. Consider default-exporting a React component.`);"object"!=typeof r&&"function"!=typeof r||Object.keys(a).filter((e=>"default"!==e)).forEach((e=>{r[e]=a[e]}));let o=i;const s=n.split(".");s.slice(0,-1).forEach((e=>{o=o[e]})),o[s[s.length-1]]=r}));const s=i.__comp;delete i.__comp;const l=i.__context;return delete i.__context,a.createElement(d.z,{value:l},a.createElement(s,(0,r.Z)({},i,n)))}})}const p=[{path:"/demo/",component:f("/demo/","745"),exact:!0},{path:"/search/",component:f("/search/","b54"),exact:!0},{path:"/docs/0.5.0/",component:f("/docs/0.5.0/","1a6"),routes:[{path:"/docs/0.5.0/",component:f("/docs/0.5.0/","f05"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/api/fastkafka/",component:f("/docs/0.5.0/api/fastkafka/","aba"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/api/fastkafka/encoder/avsc_to_pydantic/",component:f("/docs/0.5.0/api/fastkafka/encoder/avsc_to_pydantic/","cfa"),exact:!0},{path:"/docs/0.5.0/api/fastkafka/KafkaEvent/",component:f("/docs/0.5.0/api/fastkafka/KafkaEvent/","80f"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker/",component:f("/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker/","cdb"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/api/fastkafka/testing/LocalRedpandaBroker/",component:f("/docs/0.5.0/api/fastkafka/testing/LocalRedpandaBroker/","dd7"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/api/fastkafka/testing/Tester/",component:f("/docs/0.5.0/api/fastkafka/testing/Tester/","b37"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/CHANGELOG/",component:f("/docs/0.5.0/CHANGELOG/","3a0"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/cli/fastkafka/",component:f("/docs/0.5.0/cli/fastkafka/","aea"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/cli/run_fastkafka_server_process/",component:f("/docs/0.5.0/cli/run_fastkafka_server_process/","35c"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/guides/Guide_00_FastKafka_Demo/",component:f("/docs/0.5.0/guides/Guide_00_FastKafka_Demo/","a1e"),exact:!0},{path:"/docs/0.5.0/guides/Guide_01_Intro/",component:f("/docs/0.5.0/guides/Guide_01_Intro/","a79"),exact:!0},{path:"/docs/0.5.0/guides/Guide_02_First_Steps/",component:f("/docs/0.5.0/guides/Guide_02_First_Steps/","2c5"),exact:!0},{path:"/docs/0.5.0/guides/Guide_03_Authentication/",component:f("/docs/0.5.0/guides/Guide_03_Authentication/","8a7"),exact:!0},{path:"/docs/0.5.0/guides/Guide_04_Github_Actions_Workflow/",component:f("/docs/0.5.0/guides/Guide_04_Github_Actions_Workflow/","d3b"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/guides/Guide_05_Lifespan_Handler/",component:f("/docs/0.5.0/guides/Guide_05_Lifespan_Handler/","e7c"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/guides/Guide_06_Benchmarking_FastKafka/",component:f("/docs/0.5.0/guides/Guide_06_Benchmarking_FastKafka/","880"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/",component:f("/docs/0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/","b72"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/guides/Guide_11_Consumes_Basics/",component:f("/docs/0.5.0/guides/Guide_11_Consumes_Basics/","683"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/guides/Guide_21_Produces_Basics/",component:f("/docs/0.5.0/guides/Guide_21_Produces_Basics/","cd5"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/guides/Guide_22_Partition_Keys/",component:f("/docs/0.5.0/guides/Guide_22_Partition_Keys/","098"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/",component:f("/docs/0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/","693"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/",component:f("/docs/0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/","bcc"),exact:!0,sidebar:"tutorialSidebar"}]},{path:"/docs/0.6.0/",component:f("/docs/0.6.0/","8d8"),routes:[{path:"/docs/0.6.0/",component:f("/docs/0.6.0/","22e"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/api/fastkafka/",component:f("/docs/0.6.0/api/fastkafka/","8f8"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/api/fastkafka/encoder/avro_decoder/",component:f("/docs/0.6.0/api/fastkafka/encoder/avro_decoder/","54e"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/api/fastkafka/encoder/avro_encoder/",component:f("/docs/0.6.0/api/fastkafka/encoder/avro_encoder/","837"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/api/fastkafka/encoder/AvroBase/",component:f("/docs/0.6.0/api/fastkafka/encoder/AvroBase/","535"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic/",component:f("/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic/","885"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/api/fastkafka/encoder/json_decoder/",component:f("/docs/0.6.0/api/fastkafka/encoder/json_decoder/","e6b"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/api/fastkafka/encoder/json_encoder/",component:f("/docs/0.6.0/api/fastkafka/encoder/json_encoder/","e82"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/api/fastkafka/EventMetadata/",component:f("/docs/0.6.0/api/fastkafka/EventMetadata/","541"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/api/fastkafka/executors/DynamicTaskExecutor/",component:f("/docs/0.6.0/api/fastkafka/executors/DynamicTaskExecutor/","e6e"),exact:!0},{path:"/docs/0.6.0/api/fastkafka/executors/SequentialExecutor/",component:f("/docs/0.6.0/api/fastkafka/executors/SequentialExecutor/","846"),exact:!0},{path:"/docs/0.6.0/api/fastkafka/KafkaEvent/",component:f("/docs/0.6.0/api/fastkafka/KafkaEvent/","acc"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker/",component:f("/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker/","26d"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/api/fastkafka/testing/LocalRedpandaBroker/",component:f("/docs/0.6.0/api/fastkafka/testing/LocalRedpandaBroker/","f47"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/api/fastkafka/testing/Tester/",component:f("/docs/0.6.0/api/fastkafka/testing/Tester/","21f"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/CHANGELOG/",component:f("/docs/0.6.0/CHANGELOG/","e01"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/cli/fastkafka/",component:f("/docs/0.6.0/cli/fastkafka/","c61"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/cli/run_fastkafka_server_process/",component:f("/docs/0.6.0/cli/run_fastkafka_server_process/","616"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/CONTRIBUTING/",component:f("/docs/0.6.0/CONTRIBUTING/","5d5"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/guides/Guide_00_FastKafka_Demo/",component:f("/docs/0.6.0/guides/Guide_00_FastKafka_Demo/","d41"),exact:!0},{path:"/docs/0.6.0/guides/Guide_01_Intro/",component:f("/docs/0.6.0/guides/Guide_01_Intro/","73d"),exact:!0},{path:"/docs/0.6.0/guides/Guide_02_First_Steps/",component:f("/docs/0.6.0/guides/Guide_02_First_Steps/","956"),exact:!0},{path:"/docs/0.6.0/guides/Guide_03_Authentication/",component:f("/docs/0.6.0/guides/Guide_03_Authentication/","5e3"),exact:!0},{path:"/docs/0.6.0/guides/Guide_04_Github_Actions_Workflow/",component:f("/docs/0.6.0/guides/Guide_04_Github_Actions_Workflow/","3c0"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/guides/Guide_05_Lifespan_Handler/",component:f("/docs/0.6.0/guides/Guide_05_Lifespan_Handler/","3e4"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/guides/Guide_06_Benchmarking_FastKafka/",component:f("/docs/0.6.0/guides/Guide_06_Benchmarking_FastKafka/","62b"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/",component:f("/docs/0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/","1fd"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/guides/Guide_11_Consumes_Basics/",component:f("/docs/0.6.0/guides/Guide_11_Consumes_Basics/","301"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/guides/Guide_21_Produces_Basics/",component:f("/docs/0.6.0/guides/Guide_21_Produces_Basics/","771"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/guides/Guide_22_Partition_Keys/",component:f("/docs/0.6.0/guides/Guide_22_Partition_Keys/","fc9"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/guides/Guide_23_Batch_Producing/",component:f("/docs/0.6.0/guides/Guide_23_Batch_Producing/","d0a"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/",component:f("/docs/0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/","093"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/",component:f("/docs/0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/","efc"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.6.0/LICENSE/",component:f("/docs/0.6.0/LICENSE/","204"),exact:!0,sidebar:"tutorialSidebar"}]},{path:"/docs/0.7.0/",component:f("/docs/0.7.0/","6a8"),routes:[{path:"/docs/0.7.0/",component:f("/docs/0.7.0/","54f"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/api/fastkafka/",component:f("/docs/0.7.0/api/fastkafka/","790"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/api/fastkafka/encoder/avro_decoder/",component:f("/docs/0.7.0/api/fastkafka/encoder/avro_decoder/","cda"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/api/fastkafka/encoder/avro_encoder/",component:f("/docs/0.7.0/api/fastkafka/encoder/avro_encoder/","ba0"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/api/fastkafka/encoder/AvroBase/",component:f("/docs/0.7.0/api/fastkafka/encoder/AvroBase/","8c7"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/api/fastkafka/encoder/avsc_to_pydantic/",component:f("/docs/0.7.0/api/fastkafka/encoder/avsc_to_pydantic/","455"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/api/fastkafka/encoder/json_decoder/",component:f("/docs/0.7.0/api/fastkafka/encoder/json_decoder/","41f"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/api/fastkafka/encoder/json_encoder/",component:f("/docs/0.7.0/api/fastkafka/encoder/json_encoder/","810"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/api/fastkafka/EventMetadata/",component:f("/docs/0.7.0/api/fastkafka/EventMetadata/","841"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/api/fastkafka/executors/DynamicTaskExecutor/",component:f("/docs/0.7.0/api/fastkafka/executors/DynamicTaskExecutor/","1ec"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/api/fastkafka/executors/SequentialExecutor/",component:f("/docs/0.7.0/api/fastkafka/executors/SequentialExecutor/","5dc"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/api/fastkafka/KafkaEvent/",component:f("/docs/0.7.0/api/fastkafka/KafkaEvent/","ce6"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/api/fastkafka/testing/ApacheKafkaBroker/",component:f("/docs/0.7.0/api/fastkafka/testing/ApacheKafkaBroker/","771"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/api/fastkafka/testing/LocalRedpandaBroker/",component:f("/docs/0.7.0/api/fastkafka/testing/LocalRedpandaBroker/","164"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/api/fastkafka/testing/Tester/",component:f("/docs/0.7.0/api/fastkafka/testing/Tester/","ece"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/CHANGELOG/",component:f("/docs/0.7.0/CHANGELOG/","14a"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/cli/fastkafka/",component:f("/docs/0.7.0/cli/fastkafka/","bed"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/cli/run_fastkafka_server_process/",component:f("/docs/0.7.0/cli/run_fastkafka_server_process/","117"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/CONTRIBUTING/",component:f("/docs/0.7.0/CONTRIBUTING/","61b"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/guides/Guide_00_FastKafka_Demo/",component:f("/docs/0.7.0/guides/Guide_00_FastKafka_Demo/","52d"),exact:!0},{path:"/docs/0.7.0/guides/Guide_01_Intro/",component:f("/docs/0.7.0/guides/Guide_01_Intro/","801"),exact:!0},{path:"/docs/0.7.0/guides/Guide_02_First_Steps/",component:f("/docs/0.7.0/guides/Guide_02_First_Steps/","aa1"),exact:!0},{path:"/docs/0.7.0/guides/Guide_03_Authentication/",component:f("/docs/0.7.0/guides/Guide_03_Authentication/","375"),exact:!0},{path:"/docs/0.7.0/guides/Guide_04_Github_Actions_Workflow/",component:f("/docs/0.7.0/guides/Guide_04_Github_Actions_Workflow/","292"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/guides/Guide_05_Lifespan_Handler/",component:f("/docs/0.7.0/guides/Guide_05_Lifespan_Handler/","62d"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/guides/Guide_06_Benchmarking_FastKafka/",component:f("/docs/0.7.0/guides/Guide_06_Benchmarking_FastKafka/","3c6"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/",component:f("/docs/0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/","a34"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/guides/Guide_11_Consumes_Basics/",component:f("/docs/0.7.0/guides/Guide_11_Consumes_Basics/","7f0"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/guides/Guide_12_Batch_Consuming/",component:f("/docs/0.7.0/guides/Guide_12_Batch_Consuming/","f88"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/guides/Guide_21_Produces_Basics/",component:f("/docs/0.7.0/guides/Guide_21_Produces_Basics/","1ee"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/guides/Guide_22_Partition_Keys/",component:f("/docs/0.7.0/guides/Guide_22_Partition_Keys/","9e0"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/guides/Guide_23_Batch_Producing/",component:f("/docs/0.7.0/guides/Guide_23_Batch_Producing/","36c"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters/",component:f("/docs/0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters/","7d0"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/",component:f("/docs/0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/","01a"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/",component:f("/docs/0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/","764"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/",component:f("/docs/0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/","052"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.0/LICENSE/",component:f("/docs/0.7.0/LICENSE/","200"),exact:!0,sidebar:"tutorialSidebar"}]},{path:"/docs/0.7.1/",component:f("/docs/0.7.1/","64f"),routes:[{path:"/docs/0.7.1/",component:f("/docs/0.7.1/","3ad"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/api/fastkafka/",component:f("/docs/0.7.1/api/fastkafka/","78c"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/api/fastkafka/encoder/avro_decoder/",component:f("/docs/0.7.1/api/fastkafka/encoder/avro_decoder/","a8b"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/api/fastkafka/encoder/avro_encoder/",component:f("/docs/0.7.1/api/fastkafka/encoder/avro_encoder/","a81"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/api/fastkafka/encoder/AvroBase/",component:f("/docs/0.7.1/api/fastkafka/encoder/AvroBase/","25b"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic/",component:f("/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic/","10c"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/api/fastkafka/encoder/json_decoder/",component:f("/docs/0.7.1/api/fastkafka/encoder/json_decoder/","91e"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/api/fastkafka/encoder/json_encoder/",component:f("/docs/0.7.1/api/fastkafka/encoder/json_encoder/","920"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/api/fastkafka/EventMetadata/",component:f("/docs/0.7.1/api/fastkafka/EventMetadata/","e00"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/api/fastkafka/executors/DynamicTaskExecutor/",component:f("/docs/0.7.1/api/fastkafka/executors/DynamicTaskExecutor/","7f7"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/api/fastkafka/executors/SequentialExecutor/",component:f("/docs/0.7.1/api/fastkafka/executors/SequentialExecutor/","e27"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/api/fastkafka/KafkaEvent/",component:f("/docs/0.7.1/api/fastkafka/KafkaEvent/","27f"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/api/fastkafka/testing/ApacheKafkaBroker/",component:f("/docs/0.7.1/api/fastkafka/testing/ApacheKafkaBroker/","438"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/api/fastkafka/testing/LocalRedpandaBroker/",component:f("/docs/0.7.1/api/fastkafka/testing/LocalRedpandaBroker/","13a"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/api/fastkafka/testing/Tester/",component:f("/docs/0.7.1/api/fastkafka/testing/Tester/","a2a"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/CHANGELOG/",component:f("/docs/0.7.1/CHANGELOG/","92e"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/cli/fastkafka/",component:f("/docs/0.7.1/cli/fastkafka/","6d7"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/cli/run_fastkafka_server_process/",component:f("/docs/0.7.1/cli/run_fastkafka_server_process/","569"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/CONTRIBUTING/",component:f("/docs/0.7.1/CONTRIBUTING/","a22"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/guides/Guide_00_FastKafka_Demo/",component:f("/docs/0.7.1/guides/Guide_00_FastKafka_Demo/","ebb"),exact:!0},{path:"/docs/0.7.1/guides/Guide_01_Intro/",component:f("/docs/0.7.1/guides/Guide_01_Intro/","028"),exact:!0},{path:"/docs/0.7.1/guides/Guide_02_First_Steps/",component:f("/docs/0.7.1/guides/Guide_02_First_Steps/","3a5"),exact:!0},{path:"/docs/0.7.1/guides/Guide_03_Authentication/",component:f("/docs/0.7.1/guides/Guide_03_Authentication/","3f8"),exact:!0},{path:"/docs/0.7.1/guides/Guide_04_Github_Actions_Workflow/",component:f("/docs/0.7.1/guides/Guide_04_Github_Actions_Workflow/","8ed"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/guides/Guide_05_Lifespan_Handler/",component:f("/docs/0.7.1/guides/Guide_05_Lifespan_Handler/","1bd"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/guides/Guide_06_Benchmarking_FastKafka/",component:f("/docs/0.7.1/guides/Guide_06_Benchmarking_FastKafka/","36f"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/",component:f("/docs/0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/","bf5"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/guides/Guide_11_Consumes_Basics/",component:f("/docs/0.7.1/guides/Guide_11_Consumes_Basics/","25a"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/guides/Guide_12_Batch_Consuming/",component:f("/docs/0.7.1/guides/Guide_12_Batch_Consuming/","32a"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/guides/Guide_21_Produces_Basics/",component:f("/docs/0.7.1/guides/Guide_21_Produces_Basics/","f0c"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/guides/Guide_22_Partition_Keys/",component:f("/docs/0.7.1/guides/Guide_22_Partition_Keys/","821"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/guides/Guide_23_Batch_Producing/",component:f("/docs/0.7.1/guides/Guide_23_Batch_Producing/","243"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters/",component:f("/docs/0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters/","545"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka/",component:f("/docs/0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka/","359"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka/",component:f("/docs/0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka/","228"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/",component:f("/docs/0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/","c3b"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/0.7.1/LICENSE/",component:f("/docs/0.7.1/LICENSE/","edd"),exact:!0,sidebar:"tutorialSidebar"}]},{path:"/docs/next/",component:f("/docs/next/","9c0"),routes:[{path:"/docs/next/",component:f("/docs/next/","acf"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/api/fastkafka/",component:f("/docs/next/api/fastkafka/","323"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/api/fastkafka/encoder/avro_decoder/",component:f("/docs/next/api/fastkafka/encoder/avro_decoder/","f4a"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/api/fastkafka/encoder/avro_encoder/",component:f("/docs/next/api/fastkafka/encoder/avro_encoder/","332"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/api/fastkafka/encoder/AvroBase/",component:f("/docs/next/api/fastkafka/encoder/AvroBase/","7fe"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/api/fastkafka/encoder/avsc_to_pydantic/",component:f("/docs/next/api/fastkafka/encoder/avsc_to_pydantic/","252"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/api/fastkafka/encoder/json_decoder/",component:f("/docs/next/api/fastkafka/encoder/json_decoder/","555"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/api/fastkafka/encoder/json_encoder/",component:f("/docs/next/api/fastkafka/encoder/json_encoder/","11a"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/api/fastkafka/EventMetadata/",component:f("/docs/next/api/fastkafka/EventMetadata/","489"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/api/fastkafka/executors/DynamicTaskExecutor/",component:f("/docs/next/api/fastkafka/executors/DynamicTaskExecutor/","c05"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/api/fastkafka/executors/SequentialExecutor/",component:f("/docs/next/api/fastkafka/executors/SequentialExecutor/","93f"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/api/fastkafka/KafkaEvent/",component:f("/docs/next/api/fastkafka/KafkaEvent/","b8e"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/api/fastkafka/testing/ApacheKafkaBroker/",component:f("/docs/next/api/fastkafka/testing/ApacheKafkaBroker/","304"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/api/fastkafka/testing/LocalRedpandaBroker/",component:f("/docs/next/api/fastkafka/testing/LocalRedpandaBroker/","7d3"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/api/fastkafka/testing/Tester/",component:f("/docs/next/api/fastkafka/testing/Tester/","3eb"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/CHANGELOG/",component:f("/docs/next/CHANGELOG/","53a"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/cli/fastkafka/",component:f("/docs/next/cli/fastkafka/","552"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/cli/run_fastkafka_server_process/",component:f("/docs/next/cli/run_fastkafka_server_process/","c49"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/CONTRIBUTING/",component:f("/docs/next/CONTRIBUTING/","a7d"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/guides/Guide_00_FastKafka_Demo/",component:f("/docs/next/guides/Guide_00_FastKafka_Demo/","c8c"),exact:!0},{path:"/docs/next/guides/Guide_01_Intro/",component:f("/docs/next/guides/Guide_01_Intro/","89f"),exact:!0},{path:"/docs/next/guides/Guide_02_First_Steps/",component:f("/docs/next/guides/Guide_02_First_Steps/","bf4"),exact:!0},{path:"/docs/next/guides/Guide_03_Authentication/",component:f("/docs/next/guides/Guide_03_Authentication/","735"),exact:!0},{path:"/docs/next/guides/Guide_04_Github_Actions_Workflow/",component:f("/docs/next/guides/Guide_04_Github_Actions_Workflow/","7fb"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/guides/Guide_05_Lifespan_Handler/",component:f("/docs/next/guides/Guide_05_Lifespan_Handler/","256"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/guides/Guide_06_Benchmarking_FastKafka/",component:f("/docs/next/guides/Guide_06_Benchmarking_FastKafka/","ee6"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/",component:f("/docs/next/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/","c00"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/guides/Guide_11_Consumes_Basics/",component:f("/docs/next/guides/Guide_11_Consumes_Basics/","376"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/guides/Guide_12_Batch_Consuming/",component:f("/docs/next/guides/Guide_12_Batch_Consuming/","2d0"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/guides/Guide_21_Produces_Basics/",component:f("/docs/next/guides/Guide_21_Produces_Basics/","8b9"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/guides/Guide_22_Partition_Keys/",component:f("/docs/next/guides/Guide_22_Partition_Keys/","26d"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/guides/Guide_23_Batch_Producing/",component:f("/docs/next/guides/Guide_23_Batch_Producing/","a58"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/guides/Guide_24_Using_Multiple_Kafka_Clusters/",component:f("/docs/next/guides/Guide_24_Using_Multiple_Kafka_Clusters/","fa5"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/guides/Guide_30_Using_docker_to_deploy_fastkafka/",component:f("/docs/next/guides/Guide_30_Using_docker_to_deploy_fastkafka/","a93"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/guides/Guide_31_Using_redpanda_to_test_fastkafka/",component:f("/docs/next/guides/Guide_31_Using_redpanda_to_test_fastkafka/","630"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/",component:f("/docs/next/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/","b52"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/guides/Guide_33_Using_Tester_class_to_test_fastkafka/",component:f("/docs/next/guides/Guide_33_Using_Tester_class_to_test_fastkafka/","a1d"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/next/LICENSE/",component:f("/docs/next/LICENSE/","491"),exact:!0,sidebar:"tutorialSidebar"}]},{path:"/docs/",component:f("/docs/","e68"),routes:[{path:"/docs/",component:f("/docs/","2f0"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/api/fastkafka/",component:f("/docs/api/fastkafka/","209"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/api/fastkafka/encoder/avro_decoder/",component:f("/docs/api/fastkafka/encoder/avro_decoder/","422"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/api/fastkafka/encoder/avro_encoder/",component:f("/docs/api/fastkafka/encoder/avro_encoder/","67e"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/api/fastkafka/encoder/AvroBase/",component:f("/docs/api/fastkafka/encoder/AvroBase/","719"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/api/fastkafka/encoder/avsc_to_pydantic/",component:f("/docs/api/fastkafka/encoder/avsc_to_pydantic/","2ad"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/api/fastkafka/encoder/json_decoder/",component:f("/docs/api/fastkafka/encoder/json_decoder/","ba2"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/api/fastkafka/encoder/json_encoder/",component:f("/docs/api/fastkafka/encoder/json_encoder/","d66"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/api/fastkafka/EventMetadata/",component:f("/docs/api/fastkafka/EventMetadata/","e9d"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/api/fastkafka/executors/DynamicTaskExecutor/",component:f("/docs/api/fastkafka/executors/DynamicTaskExecutor/","200"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/api/fastkafka/executors/SequentialExecutor/",component:f("/docs/api/fastkafka/executors/SequentialExecutor/","acb"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/api/fastkafka/KafkaEvent/",component:f("/docs/api/fastkafka/KafkaEvent/","7f5"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/api/fastkafka/testing/ApacheKafkaBroker/",component:f("/docs/api/fastkafka/testing/ApacheKafkaBroker/","edd"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/api/fastkafka/testing/LocalRedpandaBroker/",component:f("/docs/api/fastkafka/testing/LocalRedpandaBroker/","dc7"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/api/fastkafka/testing/Tester/",component:f("/docs/api/fastkafka/testing/Tester/","79a"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/CHANGELOG/",component:f("/docs/CHANGELOG/","6a5"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/cli/fastkafka/",component:f("/docs/cli/fastkafka/","632"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/cli/run_fastkafka_server_process/",component:f("/docs/cli/run_fastkafka_server_process/","1ba"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/CONTRIBUTING/",component:f("/docs/CONTRIBUTING/","6c9"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/guides/Guide_00_FastKafka_Demo/",component:f("/docs/guides/Guide_00_FastKafka_Demo/","414"),exact:!0},{path:"/docs/guides/Guide_01_Intro/",component:f("/docs/guides/Guide_01_Intro/","3e6"),exact:!0},{path:"/docs/guides/Guide_02_First_Steps/",component:f("/docs/guides/Guide_02_First_Steps/","db7"),exact:!0},{path:"/docs/guides/Guide_03_Authentication/",component:f("/docs/guides/Guide_03_Authentication/","6a9"),exact:!0},{path:"/docs/guides/Guide_04_Github_Actions_Workflow/",component:f("/docs/guides/Guide_04_Github_Actions_Workflow/","c61"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/guides/Guide_05_Lifespan_Handler/",component:f("/docs/guides/Guide_05_Lifespan_Handler/","bf4"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/guides/Guide_06_Benchmarking_FastKafka/",component:f("/docs/guides/Guide_06_Benchmarking_FastKafka/","b72"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/",component:f("/docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/","bf6"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/guides/Guide_11_Consumes_Basics/",component:f("/docs/guides/Guide_11_Consumes_Basics/","7c1"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/guides/Guide_12_Batch_Consuming/",component:f("/docs/guides/Guide_12_Batch_Consuming/","9ac"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/guides/Guide_21_Produces_Basics/",component:f("/docs/guides/Guide_21_Produces_Basics/","dd3"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/guides/Guide_22_Partition_Keys/",component:f("/docs/guides/Guide_22_Partition_Keys/","61b"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/guides/Guide_23_Batch_Producing/",component:f("/docs/guides/Guide_23_Batch_Producing/","83b"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/guides/Guide_24_Using_Multiple_Kafka_Clusters/",component:f("/docs/guides/Guide_24_Using_Multiple_Kafka_Clusters/","2ef"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/guides/Guide_30_Using_docker_to_deploy_fastkafka/",component:f("/docs/guides/Guide_30_Using_docker_to_deploy_fastkafka/","8da"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka/",component:f("/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka/","a0f"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/",component:f("/docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/","966"),exact:!0,sidebar:"tutorialSidebar"},{path:"/docs/LICENSE/",component:f("/docs/LICENSE/","a59"),exact:!0,sidebar:"tutorialSidebar"}]},{path:"/",component:f("/","50b"),exact:!0},{path:"*",component:f("*")}]},8934:(e,t,n)=>{"use strict";n.d(t,{_:()=>r,t:()=>o});var a=n(7294);const r=a.createContext(!1);function o(e){let{children:t}=e;const[n,o]=(0,a.useState)(!1);return(0,a.useEffect)((()=>{o(!0)}),[]),a.createElement(r.Provider,{value:n},t)}},9383:(e,t,n)=>{"use strict";var a=n(7294),r=n(3935),o=n(3727),i=n(405),s=n(412);const l=[n(6657),n(2497),n(3310),n(8320),n(2295)];var c=n(723),u=n(6550),d=n(8790);function f(e){let{children:t}=e;return a.createElement(a.Fragment,null,t)}var p=n(7462),m=n(5742),h=n(2263),g=n(4996),_=n(6668),b=n(833),k=n(4711),v=n(9727),y=n(3320),w=n(197);function S(){const{i18n:{defaultLocale:e,localeConfigs:t}}=(0,h.Z)(),n=(0,k.l)();return a.createElement(m.Z,null,Object.entries(t).map((e=>{let[t,{htmlLang:r}]=e;return a.createElement("link",{key:t,rel:"alternate",href:n.createUrl({locale:t,fullyQualified:!0}),hrefLang:r})})),a.createElement("link",{rel:"alternate",href:n.createUrl({locale:e,fullyQualified:!0}),hrefLang:"x-default"}))}function E(e){let{permalink:t}=e;const{siteConfig:{url:n}}=(0,h.Z)(),r=function(){const{siteConfig:{url:e}}=(0,h.Z)(),{pathname:t}=(0,u.TH)();return e+(0,g.Z)(t)}(),o=t?`${n}${t}`:r;return a.createElement(m.Z,null,a.createElement("meta",{property:"og:url",content:o}),a.createElement("link",{rel:"canonical",href:o}))}function x(){const{i18n:{currentLocale:e}}=(0,h.Z)(),{metadata:t,image:n}=(0,_.L)();return a.createElement(a.Fragment,null,a.createElement(m.Z,null,a.createElement("meta",{name:"twitter:card",content:"summary_large_image"}),a.createElement("body",{className:v.h})),n&&a.createElement(b.d,{image:n}),a.createElement(E,null),a.createElement(S,null),a.createElement(w.Z,{tag:y.HX,locale:e}),a.createElement(m.Z,null,t.map(((e,t)=>a.createElement("meta",(0,p.Z)({key:t},e))))))}const C=new Map;function T(e){if(C.has(e.pathname))return{...e,pathname:C.get(e.pathname)};if((0,d.f)(c.Z,e.pathname).some((e=>{let{route:t}=e;return!0===t.exact})))return C.set(e.pathname,e.pathname),e;const t=e.pathname.trim().replace(/(?:\/index)?\.html$/,"")||"/";return C.set(e.pathname,t),{...e,pathname:t}}var A=n(8934),G=n(8940);function L(e){for(var t=arguments.length,n=new Array(t>1?t-1:0),a=1;a<t;a++)n[a-1]=arguments[a];const r=l.map((t=>{const a=t.default?.[e]??t[e];return a?.(...n)}));return()=>r.forEach((e=>e?.()))}const N=function(e){let{children:t,location:n,previousLocation:r}=e;return(0,a.useLayoutEffect)((()=>{r!==n&&(!function(e){let{location:t,previousLocation:n}=e;if(!n)return;const a=t.pathname===n.pathname,r=t.hash===n.hash,o=t.search===n.search;if(a&&r&&!o)return;const{hash:i}=t;if(i){const e=decodeURIComponent(i.substring(1)),t=document.getElementById(e);t?.scrollIntoView()}else window.scrollTo(0,0)}({location:n,previousLocation:r}),L("onRouteDidUpdate",{previousLocation:r,location:n}))}),[r,n]),t};function R(e){const t=Array.from(new Set([e,decodeURI(e)])).map((e=>(0,d.f)(c.Z,e))).flat();return Promise.all(t.map((e=>e.route.component.preload?.())))}class P extends a.Component{constructor(e){super(e),this.previousLocation=void 0,this.routeUpdateCleanupCb=void 0,this.previousLocation=null,this.routeUpdateCleanupCb=s.Z.canUseDOM?L("onRouteUpdate",{previousLocation:null,location:this.props.location}):()=>{},this.state={nextRouteHasLoaded:!0}}shouldComponentUpdate(e,t){if(e.location===this.props.location)return t.nextRouteHasLoaded;const n=e.location;return this.previousLocation=this.props.location,this.setState({nextRouteHasLoaded:!1}),this.routeUpdateCleanupCb=L("onRouteUpdate",{previousLocation:this.previousLocation,location:n}),R(n.pathname).then((()=>{this.routeUpdateCleanupCb(),this.setState({nextRouteHasLoaded:!0})})).catch((e=>{console.warn(e),window.location.reload()})),!1}render(){const{children:e,location:t}=this.props;return a.createElement(N,{previousLocation:this.previousLocation,location:t},a.createElement(u.AW,{location:t,render:()=>e}))}}const I=P,O="docusaurus-base-url-issue-banner-container",B="docusaurus-base-url-issue-banner",D="docusaurus-base-url-issue-banner-suggestion-container",M="__DOCUSAURUS_INSERT_BASEURL_BANNER";function F(e){return`\nwindow['${M}'] = true;\n\ndocument.addEventListener('DOMContentLoaded', maybeInsertBanner);\n\nfunction maybeInsertBanner() {\n var shouldInsert = window['${M}'];\n shouldInsert && insertBanner();\n}\n\nfunction insertBanner() {\n var bannerContainer = document.getElementById('${O}');\n if (!bannerContainer) {\n return;\n }\n var bannerHtml = ${JSON.stringify(function(e){return`\n<div id="${B}" style="border: thick solid red; background-color: rgb(255, 230, 179); margin: 20px; padding: 20px; font-size: 20px;">\n <p style="font-weight: bold; font-size: 30px;">Your Docusaurus site did not load properly.</p>\n <p>A very common reason is a wrong site <a href="https://docusaurus.io/docs/docusaurus.config.js/#baseUrl" style="font-weight: bold;">baseUrl configuration</a>.</p>\n <p>Current configured baseUrl = <span style="font-weight: bold; color: red;">${e}</span> ${"/"===e?" (default value)":""}</p>\n <p>We suggest trying baseUrl = <span id="${D}" style="font-weight: bold; color: green;"></span></p>\n</div>\n`}(e)).replace(/</g,"\\<")};\n bannerContainer.innerHTML = bannerHtml;\n var suggestionContainer = document.getElementById('${D}');\n var actualHomePagePath = window.location.pathname;\n var suggestedBaseUrl = actualHomePagePath.substr(-1) === '/'\n ? actualHomePagePath\n : actualHomePagePath + '/';\n suggestionContainer.innerHTML = suggestedBaseUrl;\n}\n`}function U(){const{siteConfig:{baseUrl:e}}=(0,h.Z)();return(0,a.useLayoutEffect)((()=>{window[M]=!1}),[]),a.createElement(a.Fragment,null,!s.Z.canUseDOM&&a.createElement(m.Z,null,a.createElement("script",null,F(e))),a.createElement("div",{id:O}))}function j(){const{siteConfig:{baseUrl:e,baseUrlIssueBanner:t}}=(0,h.Z)(),{pathname:n}=(0,u.TH)();return t&&n===e?a.createElement(U,null):null}function z(){const{siteConfig:{favicon:e,title:t,noIndex:n},i18n:{currentLocale:r,localeConfigs:o}}=(0,h.Z)(),i=(0,g.Z)(e),{htmlLang:s,direction:l}=o[r];return a.createElement(m.Z,null,a.createElement("html",{lang:s,dir:l}),a.createElement("title",null,t),a.createElement("meta",{property:"og:title",content:t}),a.createElement("meta",{name:"viewport",content:"width=device-width, initial-scale=1.0"}),n&&a.createElement("meta",{name:"robots",content:"noindex, nofollow"}),e&&a.createElement("link",{rel:"icon",href:i}))}var K=n(4763);function $(){const e=(0,d.H)(c.Z),t=(0,u.TH)();return a.createElement(K.Z,null,a.createElement(G.M,null,a.createElement(A.t,null,a.createElement(f,null,a.createElement(z,null),a.createElement(x,null),a.createElement(j,null),a.createElement(I,{location:T(t)},e)))))}var H=n(6887);const q=function(e){try{return document.createElement("link").relList.supports(e)}catch{return!1}}("prefetch")?function(e){return new Promise(((t,n)=>{if("undefined"==typeof document)return void n();const a=document.createElement("link");a.setAttribute("rel","prefetch"),a.setAttribute("href",e),a.onload=()=>t(),a.onerror=()=>n();const r=document.getElementsByTagName("head")[0]??document.getElementsByName("script")[0]?.parentNode;r?.appendChild(a)}))}:function(e){return new Promise(((t,n)=>{const a=new XMLHttpRequest;a.open("GET",e,!0),a.withCredentials=!0,a.onload=()=>{200===a.status?t():n()},a.send(null)}))};var Z=n(9670);const W=new Set,V=new Set,Y=()=>navigator.connection?.effectiveType.includes("2g")||navigator.connection?.saveData,Q={prefetch(e){if(!(e=>!Y()&&!V.has(e)&&!W.has(e))(e))return!1;W.add(e);const t=(0,d.f)(c.Z,e).flatMap((e=>{return t=e.route.path,Object.entries(H).filter((e=>{let[n]=e;return n.replace(/-[^-]+$/,"")===t})).flatMap((e=>{let[,t]=e;return Object.values((0,Z.Z)(t))}));var t}));return Promise.all(t.map((e=>{const t=n.gca(e);return t&&!t.includes("undefined")?q(t).catch((()=>{})):Promise.resolve()})))},preload:e=>!!(e=>!Y()&&!V.has(e))(e)&&(V.add(e),R(e))},X=Object.freeze(Q);if(s.Z.canUseDOM){window.docusaurus=X;const e=r.hydrate;R(window.location.pathname).then((()=>{e(a.createElement(i.B6,null,a.createElement(o.VK,null,a.createElement($,null))),document.getElementById("__docusaurus"))}))}},8940:(e,t,n)=>{"use strict";n.d(t,{_:()=>u,M:()=>d});var a=n(7294),r=n(6809);const o=JSON.parse('{"docusaurus-plugin-google-gtag":{"default":{"trackingID":["G-WLMWPELHMB"],"anonymizeIP":false,"id":"default"}},"docusaurus-plugin-content-docs":{"default":{"path":"/docs","versions":[{"name":"current","label":"dev \ud83d\udea7","isLast":false,"path":"/docs/next","mainDocId":"index","docs":[{"id":"api/fastkafka/encoder/avro_decoder","path":"/docs/next/api/fastkafka/encoder/avro_decoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/avro_encoder","path":"/docs/next/api/fastkafka/encoder/avro_encoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/AvroBase","path":"/docs/next/api/fastkafka/encoder/AvroBase","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/avsc_to_pydantic","path":"/docs/next/api/fastkafka/encoder/avsc_to_pydantic","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/json_decoder","path":"/docs/next/api/fastkafka/encoder/json_decoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/json_encoder","path":"/docs/next/api/fastkafka/encoder/json_encoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/EventMetadata","path":"/docs/next/api/fastkafka/EventMetadata","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/executors/DynamicTaskExecutor","path":"/docs/next/api/fastkafka/executors/DynamicTaskExecutor","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/executors/SequentialExecutor","path":"/docs/next/api/fastkafka/executors/SequentialExecutor","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/FastKafka","path":"/docs/next/api/fastkafka/","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/KafkaEvent","path":"/docs/next/api/fastkafka/KafkaEvent","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/ApacheKafkaBroker","path":"/docs/next/api/fastkafka/testing/ApacheKafkaBroker","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/LocalRedpandaBroker","path":"/docs/next/api/fastkafka/testing/LocalRedpandaBroker","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/Tester","path":"/docs/next/api/fastkafka/testing/Tester","sidebar":"tutorialSidebar"},{"id":"CHANGELOG","path":"/docs/next/CHANGELOG","sidebar":"tutorialSidebar"},{"id":"cli/fastkafka","path":"/docs/next/cli/fastkafka","sidebar":"tutorialSidebar"},{"id":"cli/run_fastkafka_server_process","path":"/docs/next/cli/run_fastkafka_server_process","sidebar":"tutorialSidebar"},{"id":"CONTRIBUTING","path":"/docs/next/CONTRIBUTING","sidebar":"tutorialSidebar"},{"id":"guides/Guide_00_FastKafka_Demo","path":"/docs/next/guides/Guide_00_FastKafka_Demo"},{"id":"guides/Guide_01_Intro","path":"/docs/next/guides/Guide_01_Intro"},{"id":"guides/Guide_02_First_Steps","path":"/docs/next/guides/Guide_02_First_Steps"},{"id":"guides/Guide_03_Authentication","path":"/docs/next/guides/Guide_03_Authentication"},{"id":"guides/Guide_04_Github_Actions_Workflow","path":"/docs/next/guides/Guide_04_Github_Actions_Workflow","sidebar":"tutorialSidebar"},{"id":"guides/Guide_05_Lifespan_Handler","path":"/docs/next/guides/Guide_05_Lifespan_Handler","sidebar":"tutorialSidebar"},{"id":"guides/Guide_06_Benchmarking_FastKafka","path":"/docs/next/guides/Guide_06_Benchmarking_FastKafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","path":"/docs/next/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_11_Consumes_Basics","path":"/docs/next/guides/Guide_11_Consumes_Basics","sidebar":"tutorialSidebar"},{"id":"guides/Guide_12_Batch_Consuming","path":"/docs/next/guides/Guide_12_Batch_Consuming","sidebar":"tutorialSidebar"},{"id":"guides/Guide_21_Produces_Basics","path":"/docs/next/guides/Guide_21_Produces_Basics","sidebar":"tutorialSidebar"},{"id":"guides/Guide_22_Partition_Keys","path":"/docs/next/guides/Guide_22_Partition_Keys","sidebar":"tutorialSidebar"},{"id":"guides/Guide_23_Batch_Producing","path":"/docs/next/guides/Guide_23_Batch_Producing","sidebar":"tutorialSidebar"},{"id":"guides/Guide_24_Using_Multiple_Kafka_Clusters","path":"/docs/next/guides/Guide_24_Using_Multiple_Kafka_Clusters","sidebar":"tutorialSidebar"},{"id":"guides/Guide_30_Using_docker_to_deploy_fastkafka","path":"/docs/next/guides/Guide_30_Using_docker_to_deploy_fastkafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_31_Using_redpanda_to_test_fastkafka","path":"/docs/next/guides/Guide_31_Using_redpanda_to_test_fastkafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_32_Using_fastapi_to_run_fastkafka_application","path":"/docs/next/guides/Guide_32_Using_fastapi_to_run_fastkafka_application","sidebar":"tutorialSidebar"},{"id":"guides/Guide_33_Using_Tester_class_to_test_fastkafka","path":"/docs/next/guides/Guide_33_Using_Tester_class_to_test_fastkafka","sidebar":"tutorialSidebar"},{"id":"index","path":"/docs/next/","sidebar":"tutorialSidebar"},{"id":"LICENSE","path":"/docs/next/LICENSE","sidebar":"tutorialSidebar"}],"draftIds":[],"sidebars":{"tutorialSidebar":{"link":{"path":"/docs/next/","label":"index"}}}},{"name":"0.8.0","label":"0.8.0","isLast":true,"path":"/docs","mainDocId":"index","docs":[{"id":"api/fastkafka/encoder/avro_decoder","path":"/docs/api/fastkafka/encoder/avro_decoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/avro_encoder","path":"/docs/api/fastkafka/encoder/avro_encoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/AvroBase","path":"/docs/api/fastkafka/encoder/AvroBase","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/avsc_to_pydantic","path":"/docs/api/fastkafka/encoder/avsc_to_pydantic","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/json_decoder","path":"/docs/api/fastkafka/encoder/json_decoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/json_encoder","path":"/docs/api/fastkafka/encoder/json_encoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/EventMetadata","path":"/docs/api/fastkafka/EventMetadata","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/executors/DynamicTaskExecutor","path":"/docs/api/fastkafka/executors/DynamicTaskExecutor","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/executors/SequentialExecutor","path":"/docs/api/fastkafka/executors/SequentialExecutor","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/FastKafka","path":"/docs/api/fastkafka/","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/KafkaEvent","path":"/docs/api/fastkafka/KafkaEvent","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/ApacheKafkaBroker","path":"/docs/api/fastkafka/testing/ApacheKafkaBroker","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/LocalRedpandaBroker","path":"/docs/api/fastkafka/testing/LocalRedpandaBroker","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/Tester","path":"/docs/api/fastkafka/testing/Tester","sidebar":"tutorialSidebar"},{"id":"CHANGELOG","path":"/docs/CHANGELOG","sidebar":"tutorialSidebar"},{"id":"cli/fastkafka","path":"/docs/cli/fastkafka","sidebar":"tutorialSidebar"},{"id":"cli/run_fastkafka_server_process","path":"/docs/cli/run_fastkafka_server_process","sidebar":"tutorialSidebar"},{"id":"CONTRIBUTING","path":"/docs/CONTRIBUTING","sidebar":"tutorialSidebar"},{"id":"guides/Guide_00_FastKafka_Demo","path":"/docs/guides/Guide_00_FastKafka_Demo"},{"id":"guides/Guide_01_Intro","path":"/docs/guides/Guide_01_Intro"},{"id":"guides/Guide_02_First_Steps","path":"/docs/guides/Guide_02_First_Steps"},{"id":"guides/Guide_03_Authentication","path":"/docs/guides/Guide_03_Authentication"},{"id":"guides/Guide_04_Github_Actions_Workflow","path":"/docs/guides/Guide_04_Github_Actions_Workflow","sidebar":"tutorialSidebar"},{"id":"guides/Guide_05_Lifespan_Handler","path":"/docs/guides/Guide_05_Lifespan_Handler","sidebar":"tutorialSidebar"},{"id":"guides/Guide_06_Benchmarking_FastKafka","path":"/docs/guides/Guide_06_Benchmarking_FastKafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","path":"/docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_11_Consumes_Basics","path":"/docs/guides/Guide_11_Consumes_Basics","sidebar":"tutorialSidebar"},{"id":"guides/Guide_12_Batch_Consuming","path":"/docs/guides/Guide_12_Batch_Consuming","sidebar":"tutorialSidebar"},{"id":"guides/Guide_21_Produces_Basics","path":"/docs/guides/Guide_21_Produces_Basics","sidebar":"tutorialSidebar"},{"id":"guides/Guide_22_Partition_Keys","path":"/docs/guides/Guide_22_Partition_Keys","sidebar":"tutorialSidebar"},{"id":"guides/Guide_23_Batch_Producing","path":"/docs/guides/Guide_23_Batch_Producing","sidebar":"tutorialSidebar"},{"id":"guides/Guide_24_Using_Multiple_Kafka_Clusters","path":"/docs/guides/Guide_24_Using_Multiple_Kafka_Clusters","sidebar":"tutorialSidebar"},{"id":"guides/Guide_30_Using_docker_to_deploy_fastkafka","path":"/docs/guides/Guide_30_Using_docker_to_deploy_fastkafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_31_Using_redpanda_to_test_fastkafka","path":"/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_32_Using_fastapi_to_run_fastkafka_application","path":"/docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application","sidebar":"tutorialSidebar"},{"id":"index","path":"/docs/","sidebar":"tutorialSidebar"},{"id":"LICENSE","path":"/docs/LICENSE","sidebar":"tutorialSidebar"}],"draftIds":[],"sidebars":{"tutorialSidebar":{"link":{"path":"/docs/","label":"index"}}}},{"name":"0.7.1","label":"0.7.1","isLast":false,"path":"/docs/0.7.1","mainDocId":"index","docs":[{"id":"api/fastkafka/encoder/avro_decoder","path":"/docs/0.7.1/api/fastkafka/encoder/avro_decoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/avro_encoder","path":"/docs/0.7.1/api/fastkafka/encoder/avro_encoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/AvroBase","path":"/docs/0.7.1/api/fastkafka/encoder/AvroBase","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/avsc_to_pydantic","path":"/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/json_decoder","path":"/docs/0.7.1/api/fastkafka/encoder/json_decoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/json_encoder","path":"/docs/0.7.1/api/fastkafka/encoder/json_encoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/EventMetadata","path":"/docs/0.7.1/api/fastkafka/EventMetadata","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/executors/DynamicTaskExecutor","path":"/docs/0.7.1/api/fastkafka/executors/DynamicTaskExecutor","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/executors/SequentialExecutor","path":"/docs/0.7.1/api/fastkafka/executors/SequentialExecutor","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/FastKafka","path":"/docs/0.7.1/api/fastkafka/","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/KafkaEvent","path":"/docs/0.7.1/api/fastkafka/KafkaEvent","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/ApacheKafkaBroker","path":"/docs/0.7.1/api/fastkafka/testing/ApacheKafkaBroker","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/LocalRedpandaBroker","path":"/docs/0.7.1/api/fastkafka/testing/LocalRedpandaBroker","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/Tester","path":"/docs/0.7.1/api/fastkafka/testing/Tester","sidebar":"tutorialSidebar"},{"id":"CHANGELOG","path":"/docs/0.7.1/CHANGELOG","sidebar":"tutorialSidebar"},{"id":"cli/fastkafka","path":"/docs/0.7.1/cli/fastkafka","sidebar":"tutorialSidebar"},{"id":"cli/run_fastkafka_server_process","path":"/docs/0.7.1/cli/run_fastkafka_server_process","sidebar":"tutorialSidebar"},{"id":"CONTRIBUTING","path":"/docs/0.7.1/CONTRIBUTING","sidebar":"tutorialSidebar"},{"id":"guides/Guide_00_FastKafka_Demo","path":"/docs/0.7.1/guides/Guide_00_FastKafka_Demo"},{"id":"guides/Guide_01_Intro","path":"/docs/0.7.1/guides/Guide_01_Intro"},{"id":"guides/Guide_02_First_Steps","path":"/docs/0.7.1/guides/Guide_02_First_Steps"},{"id":"guides/Guide_03_Authentication","path":"/docs/0.7.1/guides/Guide_03_Authentication"},{"id":"guides/Guide_04_Github_Actions_Workflow","path":"/docs/0.7.1/guides/Guide_04_Github_Actions_Workflow","sidebar":"tutorialSidebar"},{"id":"guides/Guide_05_Lifespan_Handler","path":"/docs/0.7.1/guides/Guide_05_Lifespan_Handler","sidebar":"tutorialSidebar"},{"id":"guides/Guide_06_Benchmarking_FastKafka","path":"/docs/0.7.1/guides/Guide_06_Benchmarking_FastKafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","path":"/docs/0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_11_Consumes_Basics","path":"/docs/0.7.1/guides/Guide_11_Consumes_Basics","sidebar":"tutorialSidebar"},{"id":"guides/Guide_12_Batch_Consuming","path":"/docs/0.7.1/guides/Guide_12_Batch_Consuming","sidebar":"tutorialSidebar"},{"id":"guides/Guide_21_Produces_Basics","path":"/docs/0.7.1/guides/Guide_21_Produces_Basics","sidebar":"tutorialSidebar"},{"id":"guides/Guide_22_Partition_Keys","path":"/docs/0.7.1/guides/Guide_22_Partition_Keys","sidebar":"tutorialSidebar"},{"id":"guides/Guide_23_Batch_Producing","path":"/docs/0.7.1/guides/Guide_23_Batch_Producing","sidebar":"tutorialSidebar"},{"id":"guides/Guide_24_Using_Multiple_Kafka_Clusters","path":"/docs/0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters","sidebar":"tutorialSidebar"},{"id":"guides/Guide_30_Using_docker_to_deploy_fastkafka","path":"/docs/0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_31_Using_redpanda_to_test_fastkafka","path":"/docs/0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_32_Using_fastapi_to_run_fastkafka_application","path":"/docs/0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application","sidebar":"tutorialSidebar"},{"id":"index","path":"/docs/0.7.1/","sidebar":"tutorialSidebar"},{"id":"LICENSE","path":"/docs/0.7.1/LICENSE","sidebar":"tutorialSidebar"}],"draftIds":[],"sidebars":{"tutorialSidebar":{"link":{"path":"/docs/0.7.1/","label":"index"}}}},{"name":"0.7.0","label":"0.7.0","isLast":false,"path":"/docs/0.7.0","mainDocId":"index","docs":[{"id":"api/fastkafka/encoder/avro_decoder","path":"/docs/0.7.0/api/fastkafka/encoder/avro_decoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/avro_encoder","path":"/docs/0.7.0/api/fastkafka/encoder/avro_encoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/AvroBase","path":"/docs/0.7.0/api/fastkafka/encoder/AvroBase","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/avsc_to_pydantic","path":"/docs/0.7.0/api/fastkafka/encoder/avsc_to_pydantic","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/json_decoder","path":"/docs/0.7.0/api/fastkafka/encoder/json_decoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/json_encoder","path":"/docs/0.7.0/api/fastkafka/encoder/json_encoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/EventMetadata","path":"/docs/0.7.0/api/fastkafka/EventMetadata","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/executors/DynamicTaskExecutor","path":"/docs/0.7.0/api/fastkafka/executors/DynamicTaskExecutor","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/executors/SequentialExecutor","path":"/docs/0.7.0/api/fastkafka/executors/SequentialExecutor","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/FastKafka","path":"/docs/0.7.0/api/fastkafka/","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/KafkaEvent","path":"/docs/0.7.0/api/fastkafka/KafkaEvent","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/ApacheKafkaBroker","path":"/docs/0.7.0/api/fastkafka/testing/ApacheKafkaBroker","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/LocalRedpandaBroker","path":"/docs/0.7.0/api/fastkafka/testing/LocalRedpandaBroker","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/Tester","path":"/docs/0.7.0/api/fastkafka/testing/Tester","sidebar":"tutorialSidebar"},{"id":"CHANGELOG","path":"/docs/0.7.0/CHANGELOG","sidebar":"tutorialSidebar"},{"id":"cli/fastkafka","path":"/docs/0.7.0/cli/fastkafka","sidebar":"tutorialSidebar"},{"id":"cli/run_fastkafka_server_process","path":"/docs/0.7.0/cli/run_fastkafka_server_process","sidebar":"tutorialSidebar"},{"id":"CONTRIBUTING","path":"/docs/0.7.0/CONTRIBUTING","sidebar":"tutorialSidebar"},{"id":"guides/Guide_00_FastKafka_Demo","path":"/docs/0.7.0/guides/Guide_00_FastKafka_Demo"},{"id":"guides/Guide_01_Intro","path":"/docs/0.7.0/guides/Guide_01_Intro"},{"id":"guides/Guide_02_First_Steps","path":"/docs/0.7.0/guides/Guide_02_First_Steps"},{"id":"guides/Guide_03_Authentication","path":"/docs/0.7.0/guides/Guide_03_Authentication"},{"id":"guides/Guide_04_Github_Actions_Workflow","path":"/docs/0.7.0/guides/Guide_04_Github_Actions_Workflow","sidebar":"tutorialSidebar"},{"id":"guides/Guide_05_Lifespan_Handler","path":"/docs/0.7.0/guides/Guide_05_Lifespan_Handler","sidebar":"tutorialSidebar"},{"id":"guides/Guide_06_Benchmarking_FastKafka","path":"/docs/0.7.0/guides/Guide_06_Benchmarking_FastKafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","path":"/docs/0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_11_Consumes_Basics","path":"/docs/0.7.0/guides/Guide_11_Consumes_Basics","sidebar":"tutorialSidebar"},{"id":"guides/Guide_12_Batch_Consuming","path":"/docs/0.7.0/guides/Guide_12_Batch_Consuming","sidebar":"tutorialSidebar"},{"id":"guides/Guide_21_Produces_Basics","path":"/docs/0.7.0/guides/Guide_21_Produces_Basics","sidebar":"tutorialSidebar"},{"id":"guides/Guide_22_Partition_Keys","path":"/docs/0.7.0/guides/Guide_22_Partition_Keys","sidebar":"tutorialSidebar"},{"id":"guides/Guide_23_Batch_Producing","path":"/docs/0.7.0/guides/Guide_23_Batch_Producing","sidebar":"tutorialSidebar"},{"id":"guides/Guide_24_Using_Multiple_Kafka_Clusters","path":"/docs/0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters","sidebar":"tutorialSidebar"},{"id":"guides/Guide_30_Using_docker_to_deploy_fastkafka","path":"/docs/0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_31_Using_redpanda_to_test_fastkafka","path":"/docs/0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_32_Using_fastapi_to_run_fastkafka_application","path":"/docs/0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application","sidebar":"tutorialSidebar"},{"id":"index","path":"/docs/0.7.0/","sidebar":"tutorialSidebar"},{"id":"LICENSE","path":"/docs/0.7.0/LICENSE","sidebar":"tutorialSidebar"}],"draftIds":[],"sidebars":{"tutorialSidebar":{"link":{"path":"/docs/0.7.0/","label":"index"}}}},{"name":"0.6.0","label":"0.6.0","isLast":false,"path":"/docs/0.6.0","mainDocId":"index","docs":[{"id":"api/fastkafka/encoder/avro_decoder","path":"/docs/0.6.0/api/fastkafka/encoder/avro_decoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/avro_encoder","path":"/docs/0.6.0/api/fastkafka/encoder/avro_encoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/AvroBase","path":"/docs/0.6.0/api/fastkafka/encoder/AvroBase","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/avsc_to_pydantic","path":"/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/json_decoder","path":"/docs/0.6.0/api/fastkafka/encoder/json_decoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/encoder/json_encoder","path":"/docs/0.6.0/api/fastkafka/encoder/json_encoder","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/EventMetadata","path":"/docs/0.6.0/api/fastkafka/EventMetadata","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/executors/DynamicTaskExecutor","path":"/docs/0.6.0/api/fastkafka/executors/DynamicTaskExecutor"},{"id":"api/fastkafka/executors/SequentialExecutor","path":"/docs/0.6.0/api/fastkafka/executors/SequentialExecutor"},{"id":"api/fastkafka/FastKafka","path":"/docs/0.6.0/api/fastkafka/","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/KafkaEvent","path":"/docs/0.6.0/api/fastkafka/KafkaEvent","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/ApacheKafkaBroker","path":"/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/LocalRedpandaBroker","path":"/docs/0.6.0/api/fastkafka/testing/LocalRedpandaBroker","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/Tester","path":"/docs/0.6.0/api/fastkafka/testing/Tester","sidebar":"tutorialSidebar"},{"id":"CHANGELOG","path":"/docs/0.6.0/CHANGELOG","sidebar":"tutorialSidebar"},{"id":"cli/fastkafka","path":"/docs/0.6.0/cli/fastkafka","sidebar":"tutorialSidebar"},{"id":"cli/run_fastkafka_server_process","path":"/docs/0.6.0/cli/run_fastkafka_server_process","sidebar":"tutorialSidebar"},{"id":"CONTRIBUTING","path":"/docs/0.6.0/CONTRIBUTING","sidebar":"tutorialSidebar"},{"id":"guides/Guide_00_FastKafka_Demo","path":"/docs/0.6.0/guides/Guide_00_FastKafka_Demo"},{"id":"guides/Guide_01_Intro","path":"/docs/0.6.0/guides/Guide_01_Intro"},{"id":"guides/Guide_02_First_Steps","path":"/docs/0.6.0/guides/Guide_02_First_Steps"},{"id":"guides/Guide_03_Authentication","path":"/docs/0.6.0/guides/Guide_03_Authentication"},{"id":"guides/Guide_04_Github_Actions_Workflow","path":"/docs/0.6.0/guides/Guide_04_Github_Actions_Workflow","sidebar":"tutorialSidebar"},{"id":"guides/Guide_05_Lifespan_Handler","path":"/docs/0.6.0/guides/Guide_05_Lifespan_Handler","sidebar":"tutorialSidebar"},{"id":"guides/Guide_06_Benchmarking_FastKafka","path":"/docs/0.6.0/guides/Guide_06_Benchmarking_FastKafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","path":"/docs/0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_11_Consumes_Basics","path":"/docs/0.6.0/guides/Guide_11_Consumes_Basics","sidebar":"tutorialSidebar"},{"id":"guides/Guide_21_Produces_Basics","path":"/docs/0.6.0/guides/Guide_21_Produces_Basics","sidebar":"tutorialSidebar"},{"id":"guides/Guide_22_Partition_Keys","path":"/docs/0.6.0/guides/Guide_22_Partition_Keys","sidebar":"tutorialSidebar"},{"id":"guides/Guide_23_Batch_Producing","path":"/docs/0.6.0/guides/Guide_23_Batch_Producing","sidebar":"tutorialSidebar"},{"id":"guides/Guide_30_Using_docker_to_deploy_fastkafka","path":"/docs/0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_31_Using_redpanda_to_test_fastkafka","path":"/docs/0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka","sidebar":"tutorialSidebar"},{"id":"index","path":"/docs/0.6.0/","sidebar":"tutorialSidebar"},{"id":"LICENSE","path":"/docs/0.6.0/LICENSE","sidebar":"tutorialSidebar"}],"draftIds":[],"sidebars":{"tutorialSidebar":{"link":{"path":"/docs/0.6.0/","label":"index"}}}},{"name":"0.5.0","label":"0.5.0","isLast":false,"path":"/docs/0.5.0","mainDocId":"index","docs":[{"id":"api/fastkafka/encoder/avsc_to_pydantic","path":"/docs/0.5.0/api/fastkafka/encoder/avsc_to_pydantic"},{"id":"api/fastkafka/FastKafka","path":"/docs/0.5.0/api/fastkafka/","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/KafkaEvent","path":"/docs/0.5.0/api/fastkafka/KafkaEvent","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/ApacheKafkaBroker","path":"/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/LocalRedpandaBroker","path":"/docs/0.5.0/api/fastkafka/testing/LocalRedpandaBroker","sidebar":"tutorialSidebar"},{"id":"api/fastkafka/testing/Tester","path":"/docs/0.5.0/api/fastkafka/testing/Tester","sidebar":"tutorialSidebar"},{"id":"CHANGELOG","path":"/docs/0.5.0/CHANGELOG","sidebar":"tutorialSidebar"},{"id":"cli/fastkafka","path":"/docs/0.5.0/cli/fastkafka","sidebar":"tutorialSidebar"},{"id":"cli/run_fastkafka_server_process","path":"/docs/0.5.0/cli/run_fastkafka_server_process","sidebar":"tutorialSidebar"},{"id":"guides/Guide_00_FastKafka_Demo","path":"/docs/0.5.0/guides/Guide_00_FastKafka_Demo"},{"id":"guides/Guide_01_Intro","path":"/docs/0.5.0/guides/Guide_01_Intro"},{"id":"guides/Guide_02_First_Steps","path":"/docs/0.5.0/guides/Guide_02_First_Steps"},{"id":"guides/Guide_03_Authentication","path":"/docs/0.5.0/guides/Guide_03_Authentication"},{"id":"guides/Guide_04_Github_Actions_Workflow","path":"/docs/0.5.0/guides/Guide_04_Github_Actions_Workflow","sidebar":"tutorialSidebar"},{"id":"guides/Guide_05_Lifespan_Handler","path":"/docs/0.5.0/guides/Guide_05_Lifespan_Handler","sidebar":"tutorialSidebar"},{"id":"guides/Guide_06_Benchmarking_FastKafka","path":"/docs/0.5.0/guides/Guide_06_Benchmarking_FastKafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","path":"/docs/0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_11_Consumes_Basics","path":"/docs/0.5.0/guides/Guide_11_Consumes_Basics","sidebar":"tutorialSidebar"},{"id":"guides/Guide_21_Produces_Basics","path":"/docs/0.5.0/guides/Guide_21_Produces_Basics","sidebar":"tutorialSidebar"},{"id":"guides/Guide_22_Partition_Keys","path":"/docs/0.5.0/guides/Guide_22_Partition_Keys","sidebar":"tutorialSidebar"},{"id":"guides/Guide_30_Using_docker_to_deploy_fastkafka","path":"/docs/0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka","sidebar":"tutorialSidebar"},{"id":"guides/Guide_31_Using_redpanda_to_test_fastkafka","path":"/docs/0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka","sidebar":"tutorialSidebar"},{"id":"index","path":"/docs/0.5.0/","sidebar":"tutorialSidebar"}],"draftIds":[],"sidebars":{"tutorialSidebar":{"link":{"path":"/docs/0.5.0/","label":"index"}}}}],"breadcrumbs":true}}}'),i=JSON.parse('{"defaultLocale":"en","locales":["en"],"path":"i18n","currentLocale":"en","localeConfigs":{"en":{"label":"English","direction":"ltr","htmlLang":"en","calendar":"gregory","path":"en"}}}');var s=n(7529);const l=JSON.parse('{"docusaurusVersion":"2.4.0","siteVersion":"0.0.0","pluginVersions":{"docusaurus-plugin-content-docs":{"type":"package","name":"@docusaurus/plugin-content-docs","version":"2.4.0"},"docusaurus-plugin-content-blog":{"type":"package","name":"@docusaurus/plugin-content-blog","version":"2.4.0"},"docusaurus-plugin-content-pages":{"type":"package","name":"@docusaurus/plugin-content-pages","version":"2.4.0"},"docusaurus-plugin-google-gtag":{"type":"package","name":"@docusaurus/plugin-google-gtag","version":"2.4.0"},"docusaurus-plugin-sitemap":{"type":"package","name":"@docusaurus/plugin-sitemap","version":"2.4.0"},"docusaurus-theme-classic":{"type":"package","name":"@docusaurus/theme-classic","version":"2.4.0"},"docusaurus-theme-search-algolia":{"type":"package","name":"@docusaurus/theme-search-algolia","version":"2.4.0"}}}'),c={siteConfig:r.default,siteMetadata:l,globalData:o,i18n:i,codeTranslations:s},u=a.createContext(c);function d(e){let{children:t}=e;return a.createElement(u.Provider,{value:c},t)}},4763:(e,t,n)=>{"use strict";n.d(t,{Z:()=>f});var a=n(7294),r=n(412),o=n(5742),i=n(8780),s=n(7452);function l(e){let{error:t,tryAgain:n}=e;return a.createElement("div",{style:{display:"flex",flexDirection:"column",justifyContent:"center",alignItems:"flex-start",minHeight:"100vh",width:"100%",maxWidth:"80ch",fontSize:"20px",margin:"0 auto",padding:"1rem"}},a.createElement("h1",{style:{fontSize:"3rem"}},"This page crashed"),a.createElement("button",{type:"button",onClick:n,style:{margin:"1rem 0",fontSize:"2rem",cursor:"pointer",borderRadius:20,padding:"1rem"}},"Try again"),a.createElement(c,{error:t}))}function c(e){let{error:t}=e;const n=(0,i.getErrorCausalChain)(t).map((e=>e.message)).join("\n\nCause:\n");return a.createElement("p",{style:{whiteSpace:"pre-wrap"}},n)}function u(e){let{error:t,tryAgain:n}=e;return a.createElement(f,{fallback:()=>a.createElement(l,{error:t,tryAgain:n})},a.createElement(o.Z,null,a.createElement("title",null,"Page Error")),a.createElement(s.Z,null,a.createElement(l,{error:t,tryAgain:n})))}const d=e=>a.createElement(u,e);class f extends a.Component{constructor(e){super(e),this.state={error:null}}componentDidCatch(e){r.Z.canUseDOM&&this.setState({error:e})}render(){const{children:e}=this.props,{error:t}=this.state;if(t){const e={error:t,tryAgain:()=>this.setState({error:null})};return(this.props.fallback??d)(e)}return e??null}}},412:(e,t,n)=>{"use strict";n.d(t,{Z:()=>r});const a="undefined"!=typeof window&&"document"in window&&"createElement"in window.document,r={canUseDOM:a,canUseEventListeners:a&&("addEventListener"in window||"attachEvent"in window),canUseIntersectionObserver:a&&"IntersectionObserver"in window,canUseViewport:a&&"screen"in window}},5742:(e,t,n)=>{"use strict";n.d(t,{Z:()=>o});var a=n(7294),r=n(405);function o(e){return a.createElement(r.ql,e)}},9960:(e,t,n)=>{"use strict";n.d(t,{Z:()=>p});var a=n(7462),r=n(7294),o=n(3727),i=n(8780),s=n(2263),l=n(3919),c=n(412);const u=r.createContext({collectLink:()=>{}});var d=n(4996);function f(e,t){let{isNavLink:n,to:f,href:p,activeClassName:m,isActive:h,"data-noBrokenLinkCheck":g,autoAddBaseUrl:_=!0,...b}=e;const{siteConfig:{trailingSlash:k,baseUrl:v}}=(0,s.Z)(),{withBaseUrl:y}=(0,d.C)(),w=(0,r.useContext)(u),S=(0,r.useRef)(null);(0,r.useImperativeHandle)(t,(()=>S.current));const E=f||p;const x=(0,l.Z)(E),C=E?.replace("pathname://","");let T=void 0!==C?(A=C,_&&(e=>e.startsWith("/"))(A)?y(A):A):void 0;var A;T&&x&&(T=(0,i.applyTrailingSlash)(T,{trailingSlash:k,baseUrl:v}));const G=(0,r.useRef)(!1),L=n?o.OL:o.rU,N=c.Z.canUseIntersectionObserver,R=(0,r.useRef)(),P=()=>{G.current||null==T||(window.docusaurus.preload(T),G.current=!0)};(0,r.useEffect)((()=>(!N&&x&&null!=T&&window.docusaurus.prefetch(T),()=>{N&&R.current&&R.current.disconnect()})),[R,T,N,x]);const I=T?.startsWith("#")??!1,O=!T||!x||I;return O||g||w.collectLink(T),O?r.createElement("a",(0,a.Z)({ref:S,href:T},E&&!x&&{target:"_blank",rel:"noopener noreferrer"},b)):r.createElement(L,(0,a.Z)({},b,{onMouseEnter:P,onTouchStart:P,innerRef:e=>{S.current=e,N&&e&&x&&(R.current=new window.IntersectionObserver((t=>{t.forEach((t=>{e===t.target&&(t.isIntersecting||t.intersectionRatio>0)&&(R.current.unobserve(e),R.current.disconnect(),null!=T&&window.docusaurus.prefetch(T))}))})),R.current.observe(e))},to:T},n&&{isActive:h,activeClassName:m}))}const p=r.forwardRef(f)},5999:(e,t,n)=>{"use strict";n.d(t,{Z:()=>l,I:()=>s});var a=n(7294);function r(e,t){const n=e.split(/(\{\w+\})/).map(((e,n)=>{if(n%2==1){const n=t?.[e.slice(1,-1)];if(void 0!==n)return n}return e}));return n.some((e=>(0,a.isValidElement)(e)))?n.map(((e,t)=>(0,a.isValidElement)(e)?a.cloneElement(e,{key:t}):e)).filter((e=>""!==e)):n.join("")}var o=n(7529);function i(e){let{id:t,message:n}=e;if(void 0===t&&void 0===n)throw new Error("Docusaurus translation declarations must have at least a translation id or a default translation message");return o[t??n]??n??t}function s(e,t){let{message:n,id:a}=e;return r(i({message:n,id:a}),t)}function l(e){let{children:t,id:n,values:o}=e;if(t&&"string"!=typeof t)throw console.warn("Illegal <Translate> children",t),new Error("The Docusaurus <Translate> component only accept simple string values");const s=i({message:t,id:n});return a.createElement(a.Fragment,null,r(s,o))}},9935:(e,t,n)=>{"use strict";n.d(t,{m:()=>a});const a="default"},3919:(e,t,n)=>{"use strict";function a(e){return/^(?:\w*:|\/\/)/.test(e)}function r(e){return void 0!==e&&!a(e)}n.d(t,{Z:()=>r,b:()=>a})},4996:(e,t,n)=>{"use strict";n.d(t,{C:()=>i,Z:()=>s});var a=n(7294),r=n(2263),o=n(3919);function i(){const{siteConfig:{baseUrl:e,url:t}}=(0,r.Z)(),n=(0,a.useCallback)(((n,a)=>function(e,t,n,a){let{forcePrependBaseUrl:r=!1,absolute:i=!1}=void 0===a?{}:a;if(!n||n.startsWith("#")||(0,o.b)(n))return n;if(r)return t+n.replace(/^\//,"");if(n===t.replace(/\/$/,""))return t;const s=n.startsWith(t)?n:t+n.replace(/^\//,"");return i?e+s:s}(t,e,n,a)),[t,e]);return{withBaseUrl:n}}function s(e,t){void 0===t&&(t={});const{withBaseUrl:n}=i();return n(e,t)}},2263:(e,t,n)=>{"use strict";n.d(t,{Z:()=>o});var a=n(7294),r=n(8940);function o(){return(0,a.useContext)(r._)}},2389:(e,t,n)=>{"use strict";n.d(t,{Z:()=>o});var a=n(7294),r=n(8934);function o(){return(0,a.useContext)(r._)}},9670:(e,t,n)=>{"use strict";n.d(t,{Z:()=>r});const a=e=>"object"==typeof e&&!!e&&Object.keys(e).length>0;function r(e){const t={};return function e(n,r){Object.entries(n).forEach((n=>{let[o,i]=n;const s=r?`${r}.${o}`:o;a(i)?e(i,s):t[s]=i}))}(e),t}},226:(e,t,n)=>{"use strict";n.d(t,{_:()=>r,z:()=>o});var a=n(7294);const r=a.createContext(null);function o(e){let{children:t,value:n}=e;const o=a.useContext(r),i=(0,a.useMemo)((()=>function(e){let{parent:t,value:n}=e;if(!t){if(!n)throw new Error("Unexpected: no Docusaurus route context found");if(!("plugin"in n))throw new Error("Unexpected: Docusaurus topmost route context has no `plugin` attribute");return n}const a={...t.data,...n?.data};return{plugin:t.plugin,data:a}}({parent:o,value:n})),[o,n]);return a.createElement(r.Provider,{value:i},t)}},143:(e,t,n)=>{"use strict";n.d(t,{Iw:()=>_,gA:()=>p,WS:()=>m,_r:()=>d,Jo:()=>b,zh:()=>f,yW:()=>g,gB:()=>h});var a=n(6550),r=n(2263),o=n(9935);function i(e,t){void 0===t&&(t={});const n=function(){const{globalData:e}=(0,r.Z)();return e}()[e];if(!n&&t.failfast)throw new Error(`Docusaurus plugin global data not found for "${e}" plugin.`);return n}const s=e=>e.versions.find((e=>e.isLast));function l(e,t){const n=s(e);return[...e.versions.filter((e=>e!==n)),n].find((e=>!!(0,a.LX)(t,{path:e.path,exact:!1,strict:!1})))}function c(e,t){const n=l(e,t),r=n?.docs.find((e=>!!(0,a.LX)(t,{path:e.path,exact:!0,strict:!1})));return{activeVersion:n,activeDoc:r,alternateDocVersions:r?function(t){const n={};return e.versions.forEach((e=>{e.docs.forEach((a=>{a.id===t&&(n[e.name]=a)}))})),n}(r.id):{}}}const u={},d=()=>i("docusaurus-plugin-content-docs")??u,f=e=>function(e,t,n){void 0===t&&(t=o.m),void 0===n&&(n={});const a=i(e),r=a?.[t];if(!r&&n.failfast)throw new Error(`Docusaurus plugin global data not found for "${e}" plugin with id "${t}".`);return r}("docusaurus-plugin-content-docs",e,{failfast:!0});function p(e){void 0===e&&(e={});const t=d(),{pathname:n}=(0,a.TH)();return function(e,t,n){void 0===n&&(n={});const r=Object.entries(e).sort(((e,t)=>t[1].path.localeCompare(e[1].path))).find((e=>{let[,n]=e;return!!(0,a.LX)(t,{path:n.path,exact:!1,strict:!1})})),o=r?{pluginId:r[0],pluginData:r[1]}:void 0;if(!o&&n.failfast)throw new Error(`Can't find active docs plugin for "${t}" pathname, while it was expected to be found. Maybe you tried to use a docs feature that can only be used on a docs-related page? Existing docs plugin paths are: ${Object.values(e).map((e=>e.path)).join(", ")}`);return o}(t,n,e)}function m(e){void 0===e&&(e={});const t=p(e),{pathname:n}=(0,a.TH)();if(!t)return;return{activePlugin:t,activeVersion:l(t.pluginData,n)}}function h(e){return f(e).versions}function g(e){const t=f(e);return s(t)}function _(e){const t=f(e),{pathname:n}=(0,a.TH)();return c(t,n)}function b(e){const t=f(e),{pathname:n}=(0,a.TH)();return function(e,t){const n=s(e);return{latestDocSuggestion:c(e,t).alternateDocVersions[n.name],latestVersionSuggestion:n}}(t,n)}},6657:(e,t,n)=>{"use strict";n.r(t),n.d(t,{default:()=>a});const a={onRouteDidUpdate(e){let{location:t,previousLocation:n}=e;!n||t.pathname===n.pathname&&t.search===n.search&&t.hash===n.hash||setTimeout((()=>{window.gtag("event","page_view",{page_title:document.title,page_location:window.location.href,page_path:t.pathname+t.search+t.hash})}))}}},8320:(e,t,n)=>{"use strict";n.r(t),n.d(t,{default:()=>o});var a=n(4865),r=n.n(a);r().configure({showSpinner:!1});const o={onRouteUpdate(e){let{location:t,previousLocation:n}=e;if(n&&t.pathname!==n.pathname){const e=window.setTimeout((()=>{r().start()}),200);return()=>window.clearTimeout(e)}},onRouteDidUpdate(){r().done()}}},3310:(e,t,n)=>{"use strict";n.r(t);var a=n(7410),r=n(6809);!function(e){const{themeConfig:{prism:t}}=r.default,{additionalLanguages:a}=t;globalThis.Prism=e,a.forEach((e=>{n(6726)(`./prism-${e}`)})),delete globalThis.Prism}(a.Z)},9471:(e,t,n)=>{"use strict";n.d(t,{Z:()=>o});var a=n(7294);const r={iconExternalLink:"iconExternalLink_nPIU"};function o(e){let{width:t=13.5,height:n=13.5}=e;return a.createElement("svg",{width:t,height:n,"aria-hidden":"true",viewBox:"0 0 24 24",className:r.iconExternalLink},a.createElement("path",{fill:"currentColor",d:"M21 13v10h-21v-19h12v2h-10v15h17v-8h2zm3-12h-10.988l4.035 4-6.977 7.07 2.828 2.828 6.977-7.07 4.125 4.172v-11z"}))}},7452:(e,t,n)=>{"use strict";n.d(t,{Z:()=>Lt});var a=n(7294),r=n(6010),o=n(4763),i=n(833),s=n(7462),l=n(6550),c=n(5999),u=n(5936);const d="docusaurus_skipToContent_fallback";function f(e){e.setAttribute("tabindex","-1"),e.focus(),e.removeAttribute("tabindex")}function p(){const e=(0,a.useRef)(null),{action:t}=(0,l.k6)(),n=(0,a.useCallback)((e=>{e.preventDefault();const t=document.querySelector("main:first-of-type")??document.getElementById(d);t&&f(t)}),[]);return(0,u.S)((n=>{let{location:a}=n;e.current&&!a.hash&&"PUSH"===t&&f(e.current)})),{containerRef:e,onClick:n}}const m=(0,c.I)({id:"theme.common.skipToMainContent",description:"The skip to content label used for accessibility, allowing to rapidly navigate to main content with keyboard tab/enter navigation",message:"Skip to main content"});function h(e){const t=e.children??m,{containerRef:n,onClick:r}=p();return a.createElement("div",{ref:n,role:"region","aria-label":m},a.createElement("a",(0,s.Z)({},e,{href:`#${d}`,onClick:r}),t))}var g=n(5281),_=n(9727);const b={skipToContent:"skipToContent_fXgn"};function k(){return a.createElement(h,{className:b.skipToContent})}var v=n(6668),y=n(9689);function w(e){let{width:t=21,height:n=21,color:r="currentColor",strokeWidth:o=1.2,className:i,...l}=e;return a.createElement("svg",(0,s.Z)({viewBox:"0 0 15 15",width:t,height:n},l),a.createElement("g",{stroke:r,strokeWidth:o},a.createElement("path",{d:"M.75.75l13.5 13.5M14.25.75L.75 14.25"})))}const S={closeButton:"closeButton_CVFx"};function E(e){return a.createElement("button",(0,s.Z)({type:"button","aria-label":(0,c.I)({id:"theme.AnnouncementBar.closeButtonAriaLabel",message:"Close",description:"The ARIA label for close button of announcement bar"})},e,{className:(0,r.Z)("clean-btn close",S.closeButton,e.className)}),a.createElement(w,{width:14,height:14,strokeWidth:3.1}))}const x={content:"content_knG7"};function C(e){const{announcementBar:t}=(0,v.L)(),{content:n}=t;return a.createElement("div",(0,s.Z)({},e,{className:(0,r.Z)(x.content,e.className),dangerouslySetInnerHTML:{__html:n}}))}const T={announcementBar:"announcementBar_mb4j",announcementBarPlaceholder:"announcementBarPlaceholder_vyr4",announcementBarClose:"announcementBarClose_gvF7",announcementBarContent:"announcementBarContent_xLdY"};function A(){const{announcementBar:e}=(0,v.L)(),{isActive:t,close:n}=(0,y.nT)();if(!t)return null;const{backgroundColor:r,textColor:o,isCloseable:i}=e;return a.createElement("div",{className:T.announcementBar,style:{backgroundColor:r,color:o},role:"banner"},i&&a.createElement("div",{className:T.announcementBarPlaceholder}),a.createElement(C,{className:T.announcementBarContent}),i&&a.createElement(E,{onClick:n,className:T.announcementBarClose}))}var G=n(3163),L=n(2466);var N=n(902),R=n(3102);const P=a.createContext(null);function I(e){let{children:t}=e;const n=function(){const e=(0,G.e)(),t=(0,R.HY)(),[n,r]=(0,a.useState)(!1),o=null!==t.component,i=(0,N.D9)(o);return(0,a.useEffect)((()=>{o&&!i&&r(!0)}),[o,i]),(0,a.useEffect)((()=>{o?e.shown||r(!0):r(!1)}),[e.shown,o]),(0,a.useMemo)((()=>[n,r]),[n])}();return a.createElement(P.Provider,{value:n},t)}function O(e){if(e.component){const t=e.component;return a.createElement(t,e.props)}}function B(){const e=(0,a.useContext)(P);if(!e)throw new N.i6("NavbarSecondaryMenuDisplayProvider");const[t,n]=e,r=(0,a.useCallback)((()=>n(!1)),[n]),o=(0,R.HY)();return(0,a.useMemo)((()=>({shown:t,hide:r,content:O(o)})),[r,o,t])}function D(e){let{header:t,primaryMenu:n,secondaryMenu:o}=e;const{shown:i}=B();return a.createElement("div",{className:"navbar-sidebar"},t,a.createElement("div",{className:(0,r.Z)("navbar-sidebar__items",{"navbar-sidebar__items--show-secondary":i})},a.createElement("div",{className:"navbar-sidebar__item menu"},n),a.createElement("div",{className:"navbar-sidebar__item menu"},o)))}var M=n(2949),F=n(2389);function U(e){return a.createElement("svg",(0,s.Z)({viewBox:"0 0 24 24",width:24,height:24},e),a.createElement("path",{fill:"currentColor",d:"M12,9c1.65,0,3,1.35,3,3s-1.35,3-3,3s-3-1.35-3-3S10.35,9,12,9 M12,7c-2.76,0-5,2.24-5,5s2.24,5,5,5s5-2.24,5-5 S14.76,7,12,7L12,7z M2,13l2,0c0.55,0,1-0.45,1-1s-0.45-1-1-1l-2,0c-0.55,0-1,0.45-1,1S1.45,13,2,13z M20,13l2,0c0.55,0,1-0.45,1-1 s-0.45-1-1-1l-2,0c-0.55,0-1,0.45-1,1S19.45,13,20,13z M11,2v2c0,0.55,0.45,1,1,1s1-0.45,1-1V2c0-0.55-0.45-1-1-1S11,1.45,11,2z M11,20v2c0,0.55,0.45,1,1,1s1-0.45,1-1v-2c0-0.55-0.45-1-1-1C11.45,19,11,19.45,11,20z M5.99,4.58c-0.39-0.39-1.03-0.39-1.41,0 c-0.39,0.39-0.39,1.03,0,1.41l1.06,1.06c0.39,0.39,1.03,0.39,1.41,0s0.39-1.03,0-1.41L5.99,4.58z M18.36,16.95 c-0.39-0.39-1.03-0.39-1.41,0c-0.39,0.39-0.39,1.03,0,1.41l1.06,1.06c0.39,0.39,1.03,0.39,1.41,0c0.39-0.39,0.39-1.03,0-1.41 L18.36,16.95z M19.42,5.99c0.39-0.39,0.39-1.03,0-1.41c-0.39-0.39-1.03-0.39-1.41,0l-1.06,1.06c-0.39,0.39-0.39,1.03,0,1.41 s1.03,0.39,1.41,0L19.42,5.99z M7.05,18.36c0.39-0.39,0.39-1.03,0-1.41c-0.39-0.39-1.03-0.39-1.41,0l-1.06,1.06 c-0.39,0.39-0.39,1.03,0,1.41s1.03,0.39,1.41,0L7.05,18.36z"}))}function j(e){return a.createElement("svg",(0,s.Z)({viewBox:"0 0 24 24",width:24,height:24},e),a.createElement("path",{fill:"currentColor",d:"M9.37,5.51C9.19,6.15,9.1,6.82,9.1,7.5c0,4.08,3.32,7.4,7.4,7.4c0.68,0,1.35-0.09,1.99-0.27C17.45,17.19,14.93,19,12,19 c-3.86,0-7-3.14-7-7C5,9.07,6.81,6.55,9.37,5.51z M12,3c-4.97,0-9,4.03-9,9s4.03,9,9,9s9-4.03,9-9c0-0.46-0.04-0.92-0.1-1.36 c-0.98,1.37-2.58,2.26-4.4,2.26c-2.98,0-5.4-2.42-5.4-5.4c0-1.81,0.89-3.42,2.26-4.4C12.92,3.04,12.46,3,12,3L12,3z"}))}const z={toggle:"toggle_vylO",toggleButton:"toggleButton_gllP",darkToggleIcon:"darkToggleIcon_wfgR",lightToggleIcon:"lightToggleIcon_pyhR",toggleButtonDisabled:"toggleButtonDisabled_aARS"};function K(e){let{className:t,buttonClassName:n,value:o,onChange:i}=e;const s=(0,F.Z)(),l=(0,c.I)({message:"Switch between dark and light mode (currently {mode})",id:"theme.colorToggle.ariaLabel",description:"The ARIA label for the navbar color mode toggle"},{mode:"dark"===o?(0,c.I)({message:"dark mode",id:"theme.colorToggle.ariaLabel.mode.dark",description:"The name for the dark color mode"}):(0,c.I)({message:"light mode",id:"theme.colorToggle.ariaLabel.mode.light",description:"The name for the light color mode"})});return a.createElement("div",{className:(0,r.Z)(z.toggle,t)},a.createElement("button",{className:(0,r.Z)("clean-btn",z.toggleButton,!s&&z.toggleButtonDisabled,n),type:"button",onClick:()=>i("dark"===o?"light":"dark"),disabled:!s,title:l,"aria-label":l,"aria-live":"polite"},a.createElement(U,{className:(0,r.Z)(z.toggleIcon,z.lightToggleIcon)}),a.createElement(j,{className:(0,r.Z)(z.toggleIcon,z.darkToggleIcon)})))}const $=a.memo(K),H={darkNavbarColorModeToggle:"darkNavbarColorModeToggle_X3D1"};function q(e){let{className:t}=e;const n=(0,v.L)().navbar.style,r=(0,v.L)().colorMode.disableSwitch,{colorMode:o,setColorMode:i}=(0,M.I)();return r?null:a.createElement($,{className:t,buttonClassName:"dark"===n?H.darkNavbarColorModeToggle:void 0,value:o,onChange:i})}var Z=n(1327);function W(){return a.createElement(Z.Z,{className:"navbar__brand",imageClassName:"navbar__logo",titleClassName:"navbar__title text--truncate"})}function V(){const e=(0,G.e)();return a.createElement("button",{type:"button","aria-label":(0,c.I)({id:"theme.docs.sidebar.closeSidebarButtonAriaLabel",message:"Close navigation bar",description:"The ARIA label for close button of mobile sidebar"}),className:"clean-btn navbar-sidebar__close",onClick:()=>e.toggle()},a.createElement(w,{color:"var(--ifm-color-emphasis-600)"}))}function Y(){return a.createElement("div",{className:"navbar-sidebar__brand"},a.createElement(W,null),a.createElement(q,{className:"margin-right--md"}),a.createElement(V,null))}var Q=n(9960),X=n(4996),J=n(3919),ee=n(8022),te=n(9471);function ne(e){let{activeBasePath:t,activeBaseRegex:n,to:r,href:o,label:i,html:l,isDropdownLink:c,prependBaseUrlToHref:u,...d}=e;const f=(0,X.Z)(r),p=(0,X.Z)(t),m=(0,X.Z)(o,{forcePrependBaseUrl:!0}),h=i&&o&&!(0,J.Z)(o),g=l?{dangerouslySetInnerHTML:{__html:l}}:{children:a.createElement(a.Fragment,null,i,h&&a.createElement(te.Z,c&&{width:12,height:12}))};return o?a.createElement(Q.Z,(0,s.Z)({href:u?m:o},d,g)):a.createElement(Q.Z,(0,s.Z)({to:f,isNavLink:!0},(t||n)&&{isActive:(e,t)=>n?(0,ee.F)(n,t.pathname):t.pathname.startsWith(p)},d,g))}function ae(e){let{className:t,isDropdownItem:n=!1,...o}=e;const i=a.createElement(ne,(0,s.Z)({className:(0,r.Z)(n?"dropdown__link":"navbar__item navbar__link",t),isDropdownLink:n},o));return n?a.createElement("li",null,i):i}function re(e){let{className:t,isDropdownItem:n,...o}=e;return a.createElement("li",{className:"menu__list-item"},a.createElement(ne,(0,s.Z)({className:(0,r.Z)("menu__link",t)},o)))}function oe(e){let{mobile:t=!1,position:n,...r}=e;const o=t?re:ae;return a.createElement(o,(0,s.Z)({},r,{activeClassName:r.activeClassName??(t?"menu__link--active":"navbar__link--active")}))}var ie=n(6043),se=n(8596),le=n(2263);function ce(e,t){return e.some((e=>function(e,t){return!!(0,se.Mg)(e.to,t)||!!(0,ee.F)(e.activeBaseRegex,t)||!(!e.activeBasePath||!t.startsWith(e.activeBasePath))}(e,t)))}function ue(e){let{items:t,position:n,className:o,onClick:i,...l}=e;const c=(0,a.useRef)(null),[u,d]=(0,a.useState)(!1);return(0,a.useEffect)((()=>{const e=e=>{c.current&&!c.current.contains(e.target)&&d(!1)};return document.addEventListener("mousedown",e),document.addEventListener("touchstart",e),document.addEventListener("focusin",e),()=>{document.removeEventListener("mousedown",e),document.removeEventListener("touchstart",e),document.removeEventListener("focusin",e)}}),[c]),a.createElement("div",{ref:c,className:(0,r.Z)("navbar__item","dropdown","dropdown--hoverable",{"dropdown--right":"right"===n,"dropdown--show":u})},a.createElement(ne,(0,s.Z)({"aria-haspopup":"true","aria-expanded":u,role:"button",href:l.to?void 0:"#",className:(0,r.Z)("navbar__link",o)},l,{onClick:l.to?void 0:e=>e.preventDefault(),onKeyDown:e=>{"Enter"===e.key&&(e.preventDefault(),d(!u))}}),l.children??l.label),a.createElement("ul",{className:"dropdown__menu"},t.map(((e,t)=>a.createElement(Ke,(0,s.Z)({isDropdownItem:!0,activeClassName:"dropdown__link--active"},e,{key:t}))))))}function de(e){let{items:t,className:n,position:o,onClick:i,...c}=e;const u=function(){const{siteConfig:{baseUrl:e}}=(0,le.Z)(),{pathname:t}=(0,l.TH)();return t.replace(e,"/")}(),d=ce(t,u),{collapsed:f,toggleCollapsed:p,setCollapsed:m}=(0,ie.u)({initialState:()=>!d});return(0,a.useEffect)((()=>{d&&m(!d)}),[u,d,m]),a.createElement("li",{className:(0,r.Z)("menu__list-item",{"menu__list-item--collapsed":f})},a.createElement(ne,(0,s.Z)({role:"button",className:(0,r.Z)("menu__link menu__link--sublist menu__link--sublist-caret",n)},c,{onClick:e=>{e.preventDefault(),p()}}),c.children??c.label),a.createElement(ie.z,{lazy:!0,as:"ul",className:"menu__list",collapsed:f},t.map(((e,t)=>a.createElement(Ke,(0,s.Z)({mobile:!0,isDropdownItem:!0,onClick:i,activeClassName:"menu__link--active"},e,{key:t}))))))}function fe(e){let{mobile:t=!1,...n}=e;const r=t?de:ue;return a.createElement(r,n)}var pe=n(4711);function me(e){let{width:t=20,height:n=20,...r}=e;return a.createElement("svg",(0,s.Z)({viewBox:"0 0 24 24",width:t,height:n,"aria-hidden":!0},r),a.createElement("path",{fill:"currentColor",d:"M12.87 15.07l-2.54-2.51.03-.03c1.74-1.94 2.98-4.17 3.71-6.53H17V4h-7V2H8v2H1v1.99h11.17C11.5 7.92 10.44 9.75 9 11.35 8.07 10.32 7.3 9.19 6.69 8h-2c.73 1.63 1.73 3.17 2.98 4.56l-5.09 5.02L4 19l5-5 3.11 3.11.76-2.04zM18.5 10h-2L12 22h2l1.12-3h4.75L21 22h2l-4.5-12zm-2.62 7l1.62-4.33L19.12 17h-3.24z"}))}const he="iconLanguage_nlXk";function ge(){return a.createElement("svg",{width:"15",height:"15",className:"DocSearch-Control-Key-Icon"},a.createElement("path",{d:"M4.505 4.496h2M5.505 5.496v5M8.216 4.496l.055 5.993M10 7.5c.333.333.5.667.5 1v2M12.326 4.5v5.996M8.384 4.496c1.674 0 2.116 0 2.116 1.5s-.442 1.5-2.116 1.5M3.205 9.303c-.09.448-.277 1.21-1.241 1.203C1 10.5.5 9.513.5 8V7c0-1.57.5-2.5 1.464-2.494.964.006 1.134.598 1.24 1.342M12.553 10.5h1.953",strokeWidth:"1.2",stroke:"currentColor",fill:"none",strokeLinecap:"square"}))}var _e=n(830),be=["translations"];function ke(){return ke=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var a in n)Object.prototype.hasOwnProperty.call(n,a)&&(e[a]=n[a])}return e},ke.apply(this,arguments)}function ve(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null==n)return;var a,r,o=[],i=!0,s=!1;try{for(n=n.call(e);!(i=(a=n.next()).done)&&(o.push(a.value),!t||o.length!==t);i=!0);}catch(l){s=!0,r=l}finally{try{i||null==n.return||n.return()}finally{if(s)throw r}}return o}(e,t)||function(e,t){if(!e)return;if("string"==typeof e)return ye(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);"Object"===n&&e.constructor&&(n=e.constructor.name);if("Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return ye(e,t)}(e,t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function ye(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,a=new Array(t);n<t;n++)a[n]=e[n];return a}function we(e,t){if(null==e)return{};var n,a,r=function(e,t){if(null==e)return{};var n,a,r={},o=Object.keys(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var Se="Ctrl";var Ee=a.forwardRef((function(e,t){var n=e.translations,r=void 0===n?{}:n,o=we(e,be),i=r.buttonText,s=void 0===i?"Search":i,l=r.buttonAriaLabel,c=void 0===l?"Search":l,u=ve((0,a.useState)(null),2),d=u[0],f=u[1];return(0,a.useEffect)((function(){"undefined"!=typeof navigator&&(/(Mac|iPhone|iPod|iPad)/i.test(navigator.platform)?f("\u2318"):f(Se))}),[]),a.createElement("button",ke({type:"button",className:"DocSearch DocSearch-Button","aria-label":c},o,{ref:t}),a.createElement("span",{className:"DocSearch-Button-Container"},a.createElement(_e.W,null),a.createElement("span",{className:"DocSearch-Button-Placeholder"},s)),a.createElement("span",{className:"DocSearch-Button-Keys"},null!==d&&a.createElement(a.Fragment,null,a.createElement("kbd",{className:"DocSearch-Button-Key"},d===Se?a.createElement(ge,null):d),a.createElement("kbd",{className:"DocSearch-Button-Key"},"K"))))})),xe=n(5742),Ce=n(6177),Te=n(239),Ae=n(3320);var Ge=n(3935);const Le={button:{buttonText:(0,c.I)({id:"theme.SearchBar.label",message:"Search",description:"The ARIA label and placeholder for search button"}),buttonAriaLabel:(0,c.I)({id:"theme.SearchBar.label",message:"Search",description:"The ARIA label and placeholder for search button"})},modal:{searchBox:{resetButtonTitle:(0,c.I)({id:"theme.SearchModal.searchBox.resetButtonTitle",message:"Clear the query",description:"The label and ARIA label for search box reset button"}),resetButtonAriaLabel:(0,c.I)({id:"theme.SearchModal.searchBox.resetButtonTitle",message:"Clear the query",description:"The label and ARIA label for search box reset button"}),cancelButtonText:(0,c.I)({id:"theme.SearchModal.searchBox.cancelButtonText",message:"Cancel",description:"The label and ARIA label for search box cancel button"}),cancelButtonAriaLabel:(0,c.I)({id:"theme.SearchModal.searchBox.cancelButtonText",message:"Cancel",description:"The label and ARIA label for search box cancel button"})},startScreen:{recentSearchesTitle:(0,c.I)({id:"theme.SearchModal.startScreen.recentSearchesTitle",message:"Recent",description:"The title for recent searches"}),noRecentSearchesText:(0,c.I)({id:"theme.SearchModal.startScreen.noRecentSearchesText",message:"No recent searches",description:"The text when no recent searches"}),saveRecentSearchButtonTitle:(0,c.I)({id:"theme.SearchModal.startScreen.saveRecentSearchButtonTitle",message:"Save this search",description:"The label for save recent search button"}),removeRecentSearchButtonTitle:(0,c.I)({id:"theme.SearchModal.startScreen.removeRecentSearchButtonTitle",message:"Remove this search from history",description:"The label for remove recent search button"}),favoriteSearchesTitle:(0,c.I)({id:"theme.SearchModal.startScreen.favoriteSearchesTitle",message:"Favorite",description:"The title for favorite searches"}),removeFavoriteSearchButtonTitle:(0,c.I)({id:"theme.SearchModal.startScreen.removeFavoriteSearchButtonTitle",message:"Remove this search from favorites",description:"The label for remove favorite search button"})},errorScreen:{titleText:(0,c.I)({id:"theme.SearchModal.errorScreen.titleText",message:"Unable to fetch results",description:"The title for error screen of search modal"}),helpText:(0,c.I)({id:"theme.SearchModal.errorScreen.helpText",message:"You might want to check your network connection.",description:"The help text for error screen of search modal"})},footer:{selectText:(0,c.I)({id:"theme.SearchModal.footer.selectText",message:"to select",description:"The explanatory text of the action for the enter key"}),selectKeyAriaLabel:(0,c.I)({id:"theme.SearchModal.footer.selectKeyAriaLabel",message:"Enter key",description:"The ARIA label for the Enter key button that makes the selection"}),navigateText:(0,c.I)({id:"theme.SearchModal.footer.navigateText",message:"to navigate",description:"The explanatory text of the action for the Arrow up and Arrow down key"}),navigateUpKeyAriaLabel:(0,c.I)({id:"theme.SearchModal.footer.navigateUpKeyAriaLabel",message:"Arrow up",description:"The ARIA label for the Arrow up key button that makes the navigation"}),navigateDownKeyAriaLabel:(0,c.I)({id:"theme.SearchModal.footer.navigateDownKeyAriaLabel",message:"Arrow down",description:"The ARIA label for the Arrow down key button that makes the navigation"}),closeText:(0,c.I)({id:"theme.SearchModal.footer.closeText",message:"to close",description:"The explanatory text of the action for Escape key"}),closeKeyAriaLabel:(0,c.I)({id:"theme.SearchModal.footer.closeKeyAriaLabel",message:"Escape key",description:"The ARIA label for the Escape key button that close the modal"}),searchByText:(0,c.I)({id:"theme.SearchModal.footer.searchByText",message:"Search by",description:"The text explain that the search is making by Algolia"})},noResultsScreen:{noResultsText:(0,c.I)({id:"theme.SearchModal.noResultsScreen.noResultsText",message:"No results for",description:"The text explains that there are no results for the following search"}),suggestedQueryText:(0,c.I)({id:"theme.SearchModal.noResultsScreen.suggestedQueryText",message:"Try searching for",description:"The text for the suggested query when no results are found for the following search"}),reportMissingResultsText:(0,c.I)({id:"theme.SearchModal.noResultsScreen.reportMissingResultsText",message:"Believe this query should return results?",description:"The text for the question where the user thinks there are missing results"}),reportMissingResultsLinkText:(0,c.I)({id:"theme.SearchModal.noResultsScreen.reportMissingResultsLinkText",message:"Let us know.",description:"The text for the link to report missing results"})}},placeholder:(0,c.I)({id:"theme.SearchModal.placeholder",message:"Search docs",description:"The placeholder of the input of the DocSearch pop-up modal"})};let Ne=null;function Re(e){let{hit:t,children:n}=e;return a.createElement(Q.Z,{to:t.url},n)}function Pe(e){let{state:t,onClose:n}=e;const r=(0,Ce.M)();return a.createElement(Q.Z,{to:r(t.query),onClick:n},a.createElement(c.Z,{id:"theme.SearchBar.seeAll",values:{count:t.context.nbHits}},"See all {count} results"))}function Ie(e){let{contextualSearch:t,externalUrlRegex:r,...o}=e;const{siteMetadata:i}=(0,le.Z)(),c=(0,Te.l)(),u=function(){const{locale:e,tags:t}=(0,Ae._q)();return[`language:${e}`,t.map((e=>`docusaurus_tag:${e}`))]}(),d=o.searchParameters?.facetFilters??[],f=t?function(e,t){const n=e=>"string"==typeof e?[e]:e;return[...n(e),...n(t)]}(u,d):d,p={...o.searchParameters,facetFilters:f},m=(0,l.k6)(),h=(0,a.useRef)(null),g=(0,a.useRef)(null),[_,b]=(0,a.useState)(!1),[k,v]=(0,a.useState)(void 0),y=(0,a.useCallback)((()=>Ne?Promise.resolve():Promise.all([n.e(6780).then(n.bind(n,6356)),Promise.all([n.e(532),n.e(6945)]).then(n.bind(n,6945)),Promise.all([n.e(532),n.e(8894)]).then(n.bind(n,8894))]).then((e=>{let[{DocSearchModal:t}]=e;Ne=t}))),[]),w=(0,a.useCallback)((()=>{y().then((()=>{h.current=document.createElement("div"),document.body.insertBefore(h.current,document.body.firstChild),b(!0)}))}),[y,b]),S=(0,a.useCallback)((()=>{b(!1),h.current?.remove()}),[b]),E=(0,a.useCallback)((e=>{y().then((()=>{b(!0),v(e.key)}))}),[y,b,v]),x=(0,a.useRef)({navigate(e){let{itemUrl:t}=e;(0,ee.F)(r,t)?window.location.href=t:m.push(t)}}).current,C=(0,a.useRef)((e=>o.transformItems?o.transformItems(e):e.map((e=>({...e,url:c(e.url)}))))).current,T=(0,a.useMemo)((()=>e=>a.createElement(Pe,(0,s.Z)({},e,{onClose:S}))),[S]),A=(0,a.useCallback)((e=>(e.addAlgoliaAgent("docusaurus",i.docusaurusVersion),e)),[i.docusaurusVersion]);return function(e){var t=e.isOpen,n=e.onOpen,r=e.onClose,o=e.onInput,i=e.searchButtonRef;a.useEffect((function(){function e(e){(27===e.keyCode&&t||"k"===e.key.toLowerCase()&&(e.metaKey||e.ctrlKey)||!function(e){var t=e.target,n=t.tagName;return t.isContentEditable||"INPUT"===n||"SELECT"===n||"TEXTAREA"===n}(e)&&"/"===e.key&&!t)&&(e.preventDefault(),t?r():document.body.classList.contains("DocSearch--active")||document.body.classList.contains("DocSearch--active")||n()),i&&i.current===document.activeElement&&o&&/[a-zA-Z0-9]/.test(String.fromCharCode(e.keyCode))&&o(e)}return window.addEventListener("keydown",e),function(){window.removeEventListener("keydown",e)}}),[t,n,r,o,i])}({isOpen:_,onOpen:w,onClose:S,onInput:E,searchButtonRef:g}),a.createElement(a.Fragment,null,a.createElement(xe.Z,null,a.createElement("link",{rel:"preconnect",href:`https://${o.appId}-dsn.algolia.net`,crossOrigin:"anonymous"})),a.createElement(Ee,{onTouchStart:y,onFocus:y,onMouseOver:y,onClick:w,ref:g,translations:Le.button}),_&&Ne&&h.current&&(0,Ge.createPortal)(a.createElement(Ne,(0,s.Z)({onClose:S,initialScrollY:window.scrollY,initialQuery:k,navigator:x,transformItems:C,hitComponent:Re,transformSearchClient:A},o.searchPagePath&&{resultsFooterComponent:T},o,{searchParameters:p,placeholder:Le.placeholder,translations:Le.modal})),h.current))}function Oe(){const{siteConfig:e}=(0,le.Z)();return a.createElement(Ie,e.themeConfig.algolia)}const Be={searchBox:"searchBox_ZlJk"};function De(e){let{children:t,className:n}=e;return a.createElement("div",{className:(0,r.Z)(n,Be.searchBox)},t)}var Me=n(143),Fe=n(2802);var Ue=n(373);const je=e=>e.docs.find((t=>t.id===e.mainDocId));const ze={default:oe,localeDropdown:function(e){let{mobile:t,dropdownItemsBefore:n,dropdownItemsAfter:r,...o}=e;const{i18n:{currentLocale:i,locales:u,localeConfigs:d}}=(0,le.Z)(),f=(0,pe.l)(),{search:p,hash:m}=(0,l.TH)(),h=[...n,...u.map((e=>{const n=`${`pathname://${f.createUrl({locale:e,fullyQualified:!1})}`}${p}${m}`;return{label:d[e].label,lang:d[e].htmlLang,to:n,target:"_self",autoAddBaseUrl:!1,className:e===i?t?"menu__link--active":"dropdown__link--active":""}})),...r],g=t?(0,c.I)({message:"Languages",id:"theme.navbar.mobileLanguageDropdown.label",description:"The label for the mobile language switcher dropdown"}):d[i].label;return a.createElement(fe,(0,s.Z)({},o,{mobile:t,label:a.createElement(a.Fragment,null,a.createElement(me,{className:he}),g),items:h}))},search:function(e){let{mobile:t,className:n}=e;return t?null:a.createElement(De,{className:n},a.createElement(Oe,null))},dropdown:fe,html:function(e){let{value:t,className:n,mobile:o=!1,isDropdownItem:i=!1}=e;const s=i?"li":"div";return a.createElement(s,{className:(0,r.Z)({navbar__item:!o&&!i,"menu__list-item":o},n),dangerouslySetInnerHTML:{__html:t}})},doc:function(e){let{docId:t,label:n,docsPluginId:r,...o}=e;const{activeDoc:i}=(0,Me.Iw)(r),l=(0,Fe.vY)(t,r);return null===l?null:a.createElement(oe,(0,s.Z)({exact:!0},o,{isActive:()=>i?.path===l.path||!!i?.sidebar&&i.sidebar===l.sidebar,label:n??l.id,to:l.path}))},docSidebar:function(e){let{sidebarId:t,label:n,docsPluginId:r,...o}=e;const{activeDoc:i}=(0,Me.Iw)(r),l=(0,Fe.oz)(t,r).link;if(!l)throw new Error(`DocSidebarNavbarItem: Sidebar with ID "${t}" doesn't have anything to be linked to.`);return a.createElement(oe,(0,s.Z)({exact:!0},o,{isActive:()=>i?.sidebar===t,label:n??l.label,to:l.path}))},docsVersion:function(e){let{label:t,to:n,docsPluginId:r,...o}=e;const i=(0,Fe.lO)(r)[0],l=t??i.label,c=n??(e=>e.docs.find((t=>t.id===e.mainDocId)))(i).path;return a.createElement(oe,(0,s.Z)({},o,{label:l,to:c}))},docsVersionDropdown:function(e){let{mobile:t,docsPluginId:n,dropdownActiveClassDisabled:r,dropdownItemsBefore:o,dropdownItemsAfter:i,...u}=e;const{search:d,hash:f}=(0,l.TH)(),p=(0,Me.Iw)(n),m=(0,Me.gB)(n),{savePreferredVersionName:h}=(0,Ue.J)(n),g=[...o,...m.map((e=>{const t=p.alternateDocVersions[e.name]??je(e);return{label:e.label,to:`${t.path}${d}${f}`,isActive:()=>e===p.activeVersion,onClick:()=>h(e.name)}})),...i],_=(0,Fe.lO)(n)[0],b=t&&g.length>1?(0,c.I)({id:"theme.navbar.mobileVersionsDropdown.label",message:"Versions",description:"The label for the navbar versions dropdown on mobile view"}):_.label,k=t&&g.length>1?void 0:je(_).path;return g.length<=1?a.createElement(oe,(0,s.Z)({},u,{mobile:t,label:b,to:k,isActive:r?()=>!1:void 0})):a.createElement(fe,(0,s.Z)({},u,{mobile:t,label:b,to:k,items:g,isActive:r?()=>!1:void 0}))}};function Ke(e){let{type:t,...n}=e;const r=function(e,t){return e&&"default"!==e?e:"items"in t?"dropdown":"default"}(t,n),o=ze[r];if(!o)throw new Error(`No NavbarItem component found for type "${t}".`);return a.createElement(o,n)}function $e(){const e=(0,G.e)(),t=(0,v.L)().navbar.items;return a.createElement("ul",{className:"menu__list"},t.map(((t,n)=>a.createElement(Ke,(0,s.Z)({mobile:!0},t,{onClick:()=>e.toggle(),key:n})))))}function He(e){return a.createElement("button",(0,s.Z)({},e,{type:"button",className:"clean-btn navbar-sidebar__back"}),a.createElement(c.Z,{id:"theme.navbar.mobileSidebarSecondaryMenu.backButtonLabel",description:"The label of the back button to return to main menu, inside the mobile navbar sidebar secondary menu (notably used to display the docs sidebar)"},"\u2190 Back to main menu"))}function qe(){const e=0===(0,v.L)().navbar.items.length,t=B();return a.createElement(a.Fragment,null,!e&&a.createElement(He,{onClick:()=>t.hide()}),t.content)}function Ze(){const e=(0,G.e)();var t;return void 0===(t=e.shown)&&(t=!0),(0,a.useEffect)((()=>(document.body.style.overflow=t?"hidden":"visible",()=>{document.body.style.overflow="visible"})),[t]),e.shouldRender?a.createElement(D,{header:a.createElement(Y,null),primaryMenu:a.createElement($e,null),secondaryMenu:a.createElement(qe,null)}):null}const We={navbarHideable:"navbarHideable_m1mJ",navbarHidden:"navbarHidden_jGov"};function Ve(e){return a.createElement("div",(0,s.Z)({role:"presentation"},e,{className:(0,r.Z)("navbar-sidebar__backdrop",e.className)}))}function Ye(e){let{children:t}=e;const{navbar:{hideOnScroll:n,style:o}}=(0,v.L)(),i=(0,G.e)(),{navbarRef:s,isNavbarVisible:l}=function(e){const[t,n]=(0,a.useState)(e),r=(0,a.useRef)(!1),o=(0,a.useRef)(0),i=(0,a.useCallback)((e=>{null!==e&&(o.current=e.getBoundingClientRect().height)}),[]);return(0,L.RF)(((t,a)=>{let{scrollY:i}=t;if(!e)return;if(i<o.current)return void n(!0);if(r.current)return void(r.current=!1);const s=a?.scrollY,l=document.documentElement.scrollHeight-o.current,c=window.innerHeight;s&&i>=s?n(!1):i+c<l&&n(!0)})),(0,u.S)((t=>{if(!e)return;const a=t.location.hash;if(a?document.getElementById(a.substring(1)):void 0)return r.current=!0,void n(!1);n(!0)})),{navbarRef:i,isNavbarVisible:t}}(n);return a.createElement("nav",{ref:s,"aria-label":(0,c.I)({id:"theme.NavBar.navAriaLabel",message:"Main",description:"The ARIA label for the main navigation"}),className:(0,r.Z)("navbar","navbar--fixed-top",n&&[We.navbarHideable,!l&&We.navbarHidden],{"navbar--dark":"dark"===o,"navbar--primary":"primary"===o,"navbar-sidebar--show":i.shown})},t,a.createElement(Ve,{onClick:i.toggle}),a.createElement(Ze,null))}var Qe=n(8780);const Xe={errorBoundaryError:"errorBoundaryError_a6uf"};function Je(e){return a.createElement("button",(0,s.Z)({type:"button"},e),a.createElement(c.Z,{id:"theme.ErrorPageContent.tryAgain",description:"The label of the button to try again rendering when the React error boundary captures an error"},"Try again"))}function et(e){let{error:t}=e;const n=(0,Qe.getErrorCausalChain)(t).map((e=>e.message)).join("\n\nCause:\n");return a.createElement("p",{className:Xe.errorBoundaryError},n)}class tt extends a.Component{componentDidCatch(e,t){throw this.props.onError(e,t)}render(){return this.props.children}}const nt="right";function at(e){let{width:t=30,height:n=30,className:r,...o}=e;return a.createElement("svg",(0,s.Z)({className:r,width:t,height:n,viewBox:"0 0 30 30","aria-hidden":"true"},o),a.createElement("path",{stroke:"currentColor",strokeLinecap:"round",strokeMiterlimit:"10",strokeWidth:"2",d:"M4 7h22M4 15h22M4 23h22"}))}function rt(){const{toggle:e,shown:t}=(0,G.e)();return a.createElement("button",{onClick:e,"aria-label":(0,c.I)({id:"theme.docs.sidebar.toggleSidebarButtonAriaLabel",message:"Toggle navigation bar",description:"The ARIA label for hamburger menu button of mobile navigation"}),"aria-expanded":t,className:"navbar__toggle clean-btn",type:"button"},a.createElement(at,null))}const ot={colorModeToggle:"colorModeToggle_DEke"};function it(e){let{items:t}=e;return a.createElement(a.Fragment,null,t.map(((e,t)=>a.createElement(tt,{key:t,onError:t=>new Error(`A theme navbar item failed to render.\nPlease double-check the following navbar item (themeConfig.navbar.items) of your Docusaurus config:\n${JSON.stringify(e,null,2)}`,{cause:t})},a.createElement(Ke,e)))))}function st(e){let{left:t,right:n}=e;return a.createElement("div",{className:"navbar__inner"},a.createElement("div",{className:"navbar__items"},t),a.createElement("div",{className:"navbar__items navbar__items--right"},n))}function lt(){const e=(0,G.e)(),t=(0,v.L)().navbar.items,[n,r]=function(e){function t(e){return"left"===(e.position??nt)}return[e.filter(t),e.filter((e=>!t(e)))]}(t),o=t.find((e=>"search"===e.type));return a.createElement(st,{left:a.createElement(a.Fragment,null,!e.disabled&&a.createElement(rt,null),a.createElement(W,null),a.createElement(it,{items:n})),right:a.createElement(a.Fragment,null,a.createElement(it,{items:r}),a.createElement(q,{className:ot.colorModeToggle}),!o&&a.createElement(De,null,a.createElement(Oe,null)))})}function ct(){return a.createElement(Ye,null,a.createElement(lt,null))}function ut(e){let{item:t}=e;const{to:n,href:r,label:o,prependBaseUrlToHref:i,...l}=t,c=(0,X.Z)(n),u=(0,X.Z)(r,{forcePrependBaseUrl:!0});return a.createElement(Q.Z,(0,s.Z)({className:"footer__link-item"},r?{href:i?u:r}:{to:c},l),o,r&&!(0,J.Z)(r)&&a.createElement(te.Z,null))}function dt(e){let{item:t}=e;return t.html?a.createElement("li",{className:"footer__item",dangerouslySetInnerHTML:{__html:t.html}}):a.createElement("li",{key:t.href??t.to,className:"footer__item"},a.createElement(ut,{item:t}))}function ft(e){let{column:t}=e;return a.createElement("div",{className:"col footer__col"},a.createElement("div",{className:"footer__title"},t.title),a.createElement("ul",{className:"footer__items clean-list"},t.items.map(((e,t)=>a.createElement(dt,{key:t,item:e})))))}function pt(e){let{columns:t}=e;return a.createElement("div",{className:"row footer__links"},t.map(((e,t)=>a.createElement(ft,{key:t,column:e}))))}function mt(){return a.createElement("span",{className:"footer__link-separator"},"\xb7")}function ht(e){let{item:t}=e;return t.html?a.createElement("span",{className:"footer__link-item",dangerouslySetInnerHTML:{__html:t.html}}):a.createElement(ut,{item:t})}function gt(e){let{links:t}=e;return a.createElement("div",{className:"footer__links text--center"},a.createElement("div",{className:"footer__links"},t.map(((e,n)=>a.createElement(a.Fragment,{key:n},a.createElement(ht,{item:e}),t.length!==n+1&&a.createElement(mt,null))))))}function _t(e){let{links:t}=e;return function(e){return"title"in e[0]}(t)?a.createElement(pt,{columns:t}):a.createElement(gt,{links:t})}var bt=n(941);const kt={footerLogoLink:"footerLogoLink_BH7S"};function vt(e){let{logo:t}=e;const{withBaseUrl:n}=(0,X.C)(),o={light:n(t.src),dark:n(t.srcDark??t.src)};return a.createElement(bt.Z,{className:(0,r.Z)("footer__logo",t.className),alt:t.alt,sources:o,width:t.width,height:t.height,style:t.style})}function yt(e){let{logo:t}=e;return t.href?a.createElement(Q.Z,{href:t.href,className:kt.footerLogoLink,target:t.target},a.createElement(vt,{logo:t})):a.createElement(vt,{logo:t})}function wt(e){let{copyright:t}=e;return a.createElement("div",{className:"footer__copyright",dangerouslySetInnerHTML:{__html:t}})}function St(e){let{style:t,links:n,logo:o,copyright:i}=e;return a.createElement("footer",{className:(0,r.Z)("footer",{"footer--dark":"dark"===t})},a.createElement("div",{className:"container container-fluid"},n,(o||i)&&a.createElement("div",{className:"footer__bottom text--center"},o&&a.createElement("div",{className:"margin-bottom--sm"},o),i)))}function Et(){const{footer:e}=(0,v.L)();if(!e)return null;const{copyright:t,links:n,logo:r,style:o}=e;return a.createElement(St,{style:o,links:n&&n.length>0&&a.createElement(_t,{links:n}),logo:r&&a.createElement(yt,{logo:r}),copyright:t&&a.createElement(wt,{copyright:t})})}const xt=a.memo(Et),Ct=(0,N.Qc)([M.S,y.pl,L.OC,Ue.L5,i.VC,function(e){let{children:t}=e;return a.createElement(R.n2,null,a.createElement(G.M,null,a.createElement(I,null,t)))}]);function Tt(e){let{children:t}=e;return a.createElement(Ct,null,t)}function At(e){let{error:t,tryAgain:n}=e;return a.createElement("main",{className:"container margin-vert--xl"},a.createElement("div",{className:"row"},a.createElement("div",{className:"col col--6 col--offset-3"},a.createElement("h1",{className:"hero__title"},a.createElement(c.Z,{id:"theme.ErrorPageContent.title",description:"The title of the fallback page when the page crashed"},"This page crashed.")),a.createElement("div",{className:"margin-vert--lg"},a.createElement(Je,{onClick:n,className:"button button--primary shadow--lw"})),a.createElement("hr",null),a.createElement("div",{className:"margin-vert--md"},a.createElement(et,{error:t})))))}const Gt={mainWrapper:"mainWrapper_z2l0"};function Lt(e){const{children:t,noFooter:n,wrapperClassName:s,title:l,description:c}=e;return(0,_.t)(),a.createElement(Tt,null,a.createElement(i.d,{title:l,description:c}),a.createElement(k,null),a.createElement(A,null),a.createElement(ct,null),a.createElement("div",{id:d,className:(0,r.Z)(g.k.wrapper.main,Gt.mainWrapper,s)},a.createElement(o.Z,{fallback:e=>a.createElement(At,e)},t)),!n&&a.createElement(xt,null))}},1327:(e,t,n)=>{"use strict";n.d(t,{Z:()=>d});var a=n(7462),r=n(7294),o=n(9960),i=n(4996),s=n(2263),l=n(6668),c=n(941);function u(e){let{logo:t,alt:n,imageClassName:a}=e;const o={light:(0,i.Z)(t.src),dark:(0,i.Z)(t.srcDark||t.src)},s=r.createElement(c.Z,{className:t.className,sources:o,height:t.height,width:t.width,alt:n,style:t.style});return a?r.createElement("div",{className:a},s):s}function d(e){const{siteConfig:{title:t}}=(0,s.Z)(),{navbar:{title:n,logo:c}}=(0,l.L)(),{imageClassName:d,titleClassName:f,...p}=e,m=(0,i.Z)(c?.href||"/"),h=n?"":t,g=c?.alt??h;return r.createElement(o.Z,(0,a.Z)({to:m},p,c?.target&&{target:c.target}),c&&r.createElement(u,{logo:c,alt:g,imageClassName:d}),null!=n&&r.createElement("b",{className:f},n))}},197:(e,t,n)=>{"use strict";n.d(t,{Z:()=>o});var a=n(7294),r=n(5742);function o(e){let{locale:t,version:n,tag:o}=e;const i=t;return a.createElement(r.Z,null,t&&a.createElement("meta",{name:"docusaurus_locale",content:t}),n&&a.createElement("meta",{name:"docusaurus_version",content:n}),o&&a.createElement("meta",{name:"docusaurus_tag",content:o}),i&&a.createElement("meta",{name:"docsearch:language",content:i}),n&&a.createElement("meta",{name:"docsearch:version",content:n}),o&&a.createElement("meta",{name:"docsearch:docusaurus_tag",content:o}))}},941:(e,t,n)=>{"use strict";n.d(t,{Z:()=>c});var a=n(7462),r=n(7294),o=n(6010),i=n(2389),s=n(2949);const l={themedImage:"themedImage_ToTc","themedImage--light":"themedImage--light_HNdA","themedImage--dark":"themedImage--dark_i4oU"};function c(e){const t=(0,i.Z)(),{colorMode:n}=(0,s.I)(),{sources:c,className:u,alt:d,...f}=e,p=t?"dark"===n?["dark"]:["light"]:["light","dark"];return r.createElement(r.Fragment,null,p.map((e=>r.createElement("img",(0,a.Z)({key:e,src:c[e],alt:d,className:(0,o.Z)(l.themedImage,l[`themedImage--${e}`],u)},f)))))}},6043:(e,t,n)=>{"use strict";n.d(t,{u:()=>s,z:()=>g});var a=n(7462),r=n(7294),o=n(412);const i="ease-in-out";function s(e){let{initialState:t}=e;const[n,a]=(0,r.useState)(t??!1),o=(0,r.useCallback)((()=>{a((e=>!e))}),[]);return{collapsed:n,setCollapsed:a,toggleCollapsed:o}}const l={display:"none",overflow:"hidden",height:"0px"},c={display:"block",overflow:"visible",height:"auto"};function u(e,t){const n=t?l:c;e.style.display=n.display,e.style.overflow=n.overflow,e.style.height=n.height}function d(e){if(window.matchMedia("(prefers-reduced-motion: reduce)").matches)return 0;const t=e/36;return Math.round(10*(4+15*t**.25+t/5))}function f(e){let{collapsibleRef:t,collapsed:n,animation:a}=e;const o=(0,r.useRef)(!1);(0,r.useEffect)((()=>{const e=t.current;function r(){const t=function(){const t=e.scrollHeight;return{transition:`height ${a?.duration??d(t)}ms ${a?.easing??i}`,height:`${t}px`}}();e.style.transition=t.transition,e.style.height=t.height}if(!o.current)return u(e,n),void(o.current=!0);return e.style.willChange="height",function(){const t=requestAnimationFrame((()=>{n?(r(),requestAnimationFrame((()=>{e.style.height=l.height,e.style.overflow=l.overflow}))):(e.style.display="block",requestAnimationFrame((()=>{r()})))}));return()=>cancelAnimationFrame(t)}()}),[t,n,a])}function p(e){if(!o.Z.canUseDOM)return e?l:c}function m(e){let{as:t="div",collapsed:n,children:a,animation:o,onCollapseTransitionEnd:i,className:s,disableSSRStyle:l}=e;const c=(0,r.useRef)(null);return f({collapsibleRef:c,collapsed:n,animation:o}),r.createElement(t,{ref:c,style:l?void 0:p(n),onTransitionEnd:e=>{"height"===e.propertyName&&(u(c.current,n),i?.(n))},className:s},a)}function h(e){let{collapsed:t,...n}=e;const[o,i]=(0,r.useState)(!t),[s,l]=(0,r.useState)(t);return(0,r.useLayoutEffect)((()=>{t||i(!0)}),[t]),(0,r.useLayoutEffect)((()=>{o&&l(t)}),[o,t]),o?r.createElement(m,(0,a.Z)({},n,{collapsed:s})):null}function g(e){let{lazy:t,...n}=e;const a=t?h:m;return r.createElement(a,n)}},9689:(e,t,n)=>{"use strict";n.d(t,{nT:()=>m,pl:()=>p});var a=n(7294),r=n(2389),o=n(12),i=n(902),s=n(6668);const l=(0,o.WA)("docusaurus.announcement.dismiss"),c=(0,o.WA)("docusaurus.announcement.id"),u=()=>"true"===l.get(),d=e=>l.set(String(e)),f=a.createContext(null);function p(e){let{children:t}=e;const n=function(){const{announcementBar:e}=(0,s.L)(),t=(0,r.Z)(),[n,o]=(0,a.useState)((()=>!!t&&u()));(0,a.useEffect)((()=>{o(u())}),[]);const i=(0,a.useCallback)((()=>{d(!0),o(!0)}),[]);return(0,a.useEffect)((()=>{if(!e)return;const{id:t}=e;let n=c.get();"annoucement-bar"===n&&(n="announcement-bar");const a=t!==n;c.set(t),a&&d(!1),!a&&u()||o(!1)}),[e]),(0,a.useMemo)((()=>({isActive:!!e&&!n,close:i})),[e,n,i])}();return a.createElement(f.Provider,{value:n},t)}function m(){const e=(0,a.useContext)(f);if(!e)throw new i.i6("AnnouncementBarProvider");return e}},2949:(e,t,n)=>{"use strict";n.d(t,{I:()=>g,S:()=>h});var a=n(7294),r=n(412),o=n(902),i=n(12),s=n(6668);const l=a.createContext(void 0),c="theme",u=(0,i.WA)(c),d={light:"light",dark:"dark"},f=e=>e===d.dark?d.dark:d.light,p=e=>r.Z.canUseDOM?f(document.documentElement.getAttribute("data-theme")):f(e),m=e=>{u.set(f(e))};function h(e){let{children:t}=e;const n=function(){const{colorMode:{defaultMode:e,disableSwitch:t,respectPrefersColorScheme:n}}=(0,s.L)(),[r,o]=(0,a.useState)(p(e));(0,a.useEffect)((()=>{t&&u.del()}),[t]);const i=(0,a.useCallback)((function(t,a){void 0===a&&(a={});const{persist:r=!0}=a;t?(o(t),r&&m(t)):(o(n?window.matchMedia("(prefers-color-scheme: dark)").matches?d.dark:d.light:e),u.del())}),[n,e]);(0,a.useEffect)((()=>{document.documentElement.setAttribute("data-theme",f(r))}),[r]),(0,a.useEffect)((()=>{if(t)return;const e=e=>{if(e.key!==c)return;const t=u.get();null!==t&&i(f(t))};return window.addEventListener("storage",e),()=>window.removeEventListener("storage",e)}),[t,i]);const l=(0,a.useRef)(!1);return(0,a.useEffect)((()=>{if(t&&!n)return;const e=window.matchMedia("(prefers-color-scheme: dark)"),a=()=>{window.matchMedia("print").matches||l.current?l.current=window.matchMedia("print").matches:i(null)};return e.addListener(a),()=>e.removeListener(a)}),[i,t,n]),(0,a.useMemo)((()=>({colorMode:r,setColorMode:i,get isDarkTheme(){return r===d.dark},setLightTheme(){i(d.light)},setDarkTheme(){i(d.dark)}})),[r,i])}();return a.createElement(l.Provider,{value:n},t)}function g(){const e=(0,a.useContext)(l);if(null==e)throw new o.i6("ColorModeProvider","Please see https://docusaurus.io/docs/api/themes/configuration#use-color-mode.");return e}},373:(e,t,n)=>{"use strict";n.d(t,{J:()=>b,L5:()=>g,Oh:()=>k});var a=n(7294),r=n(143),o=n(9935),i=n(6668),s=n(2802),l=n(902),c=n(12);const u=e=>`docs-preferred-version-${e}`,d={save:(e,t,n)=>{(0,c.WA)(u(e),{persistence:t}).set(n)},read:(e,t)=>(0,c.WA)(u(e),{persistence:t}).get(),clear:(e,t)=>{(0,c.WA)(u(e),{persistence:t}).del()}},f=e=>Object.fromEntries(e.map((e=>[e,{preferredVersionName:null}])));const p=a.createContext(null);function m(){const e=(0,r._r)(),t=(0,i.L)().docs.versionPersistence,n=(0,a.useMemo)((()=>Object.keys(e)),[e]),[o,s]=(0,a.useState)((()=>f(n)));(0,a.useEffect)((()=>{s(function(e){let{pluginIds:t,versionPersistence:n,allDocsData:a}=e;function r(e){const t=d.read(e,n);return a[e].versions.some((e=>e.name===t))?{preferredVersionName:t}:(d.clear(e,n),{preferredVersionName:null})}return Object.fromEntries(t.map((e=>[e,r(e)])))}({allDocsData:e,versionPersistence:t,pluginIds:n}))}),[e,t,n]);return[o,(0,a.useMemo)((()=>({savePreferredVersion:function(e,n){d.save(e,t,n),s((t=>({...t,[e]:{preferredVersionName:n}})))}})),[t])]}function h(e){let{children:t}=e;const n=m();return a.createElement(p.Provider,{value:n},t)}function g(e){let{children:t}=e;return s.cE?a.createElement(h,null,t):a.createElement(a.Fragment,null,t)}function _(){const e=(0,a.useContext)(p);if(!e)throw new l.i6("DocsPreferredVersionContextProvider");return e}function b(e){void 0===e&&(e=o.m);const t=(0,r.zh)(e),[n,i]=_(),{preferredVersionName:s}=n[e];return{preferredVersion:t.versions.find((e=>e.name===s))??null,savePreferredVersionName:(0,a.useCallback)((t=>{i.savePreferredVersion(e,t)}),[i,e])}}function k(){const e=(0,r._r)(),[t]=_();function n(n){const a=e[n],{preferredVersionName:r}=t[n];return a.versions.find((e=>e.name===r))??null}const a=Object.keys(e);return Object.fromEntries(a.map((e=>[e,n(e)])))}},1116:(e,t,n)=>{"use strict";n.d(t,{V:()=>l,b:()=>s});var a=n(7294),r=n(902);const o=Symbol("EmptyContext"),i=a.createContext(o);function s(e){let{children:t,name:n,items:r}=e;const o=(0,a.useMemo)((()=>n&&r?{name:n,items:r}:null),[n,r]);return a.createElement(i.Provider,{value:o},t)}function l(){const e=(0,a.useContext)(i);if(e===o)throw new r.i6("DocsSidebarProvider");return e}},3163:(e,t,n)=>{"use strict";n.d(t,{M:()=>d,e:()=>f});var a=n(7294),r=n(3102),o=n(7524),i=n(1980),s=n(6668),l=n(902);const c=a.createContext(void 0);function u(){const e=function(){const e=(0,r.HY)(),{items:t}=(0,s.L)().navbar;return 0===t.length&&!e.component}(),t=(0,o.i)(),n=!e&&"mobile"===t,[l,c]=(0,a.useState)(!1);(0,i.Rb)((()=>{if(l)return c(!1),!1}));const u=(0,a.useCallback)((()=>{c((e=>!e))}),[]);return(0,a.useEffect)((()=>{"desktop"===t&&c(!1)}),[t]),(0,a.useMemo)((()=>({disabled:e,shouldRender:n,toggle:u,shown:l})),[e,n,u,l])}function d(e){let{children:t}=e;const n=u();return a.createElement(c.Provider,{value:n},t)}function f(){const e=a.useContext(c);if(void 0===e)throw new l.i6("NavbarMobileSidebarProvider");return e}},3102:(e,t,n)=>{"use strict";n.d(t,{HY:()=>s,Zo:()=>l,n2:()=>i});var a=n(7294),r=n(902);const o=a.createContext(null);function i(e){let{children:t}=e;const n=(0,a.useState)({component:null,props:null});return a.createElement(o.Provider,{value:n},t)}function s(){const e=(0,a.useContext)(o);if(!e)throw new r.i6("NavbarSecondaryMenuContentProvider");return e[0]}function l(e){let{component:t,props:n}=e;const i=(0,a.useContext)(o);if(!i)throw new r.i6("NavbarSecondaryMenuContentProvider");const[,s]=i,l=(0,r.Ql)(n);return(0,a.useEffect)((()=>{s({component:t,props:l})}),[s,t,l]),(0,a.useEffect)((()=>()=>s({component:null,props:null})),[s]),null}},9727:(e,t,n)=>{"use strict";n.d(t,{h:()=>r,t:()=>o});var a=n(7294);const r="navigation-with-keyboard";function o(){(0,a.useEffect)((()=>{function e(e){"keydown"===e.type&&"Tab"===e.key&&document.body.classList.add(r),"mousedown"===e.type&&document.body.classList.remove(r)}return document.addEventListener("keydown",e),document.addEventListener("mousedown",e),()=>{document.body.classList.remove(r),document.removeEventListener("keydown",e),document.removeEventListener("mousedown",e)}}),[])}},6177:(e,t,n)=>{"use strict";n.d(t,{K:()=>s,M:()=>l});var a=n(7294),r=n(2263),o=n(1980);const i="q";function s(){return(0,o.Nc)(i)}function l(){const{siteConfig:{baseUrl:e,themeConfig:t}}=(0,r.Z)(),{algolia:{searchPagePath:n}}=t;return(0,a.useCallback)((t=>`${e}${n}?${i}=${encodeURIComponent(t)}`),[e,n])}},7524:(e,t,n)=>{"use strict";n.d(t,{i:()=>c});var a=n(7294),r=n(412);const o={desktop:"desktop",mobile:"mobile",ssr:"ssr"},i=996;function s(){return r.Z.canUseDOM?window.innerWidth>i?o.desktop:o.mobile:o.ssr}const l=!1;function c(){const[e,t]=(0,a.useState)((()=>l?"ssr":s()));return(0,a.useEffect)((()=>{function e(){t(s())}const n=l?window.setTimeout(e,1e3):void 0;return window.addEventListener("resize",e),()=>{window.removeEventListener("resize",e),clearTimeout(n)}}),[]),e}},5281:(e,t,n)=>{"use strict";n.d(t,{k:()=>a});const a={page:{blogListPage:"blog-list-page",blogPostPage:"blog-post-page",blogTagsListPage:"blog-tags-list-page",blogTagPostListPage:"blog-tags-post-list-page",docsDocPage:"docs-doc-page",docsTagsListPage:"docs-tags-list-page",docsTagDocListPage:"docs-tags-doc-list-page",mdxPage:"mdx-page"},wrapper:{main:"main-wrapper",blogPages:"blog-wrapper",docsPages:"docs-wrapper",mdxPages:"mdx-wrapper"},common:{editThisPage:"theme-edit-this-page",lastUpdated:"theme-last-updated",backToTopButton:"theme-back-to-top-button",codeBlock:"theme-code-block",admonition:"theme-admonition",admonitionType:e=>`theme-admonition-${e}`},layout:{},docs:{docVersionBanner:"theme-doc-version-banner",docVersionBadge:"theme-doc-version-badge",docBreadcrumbs:"theme-doc-breadcrumbs",docMarkdown:"theme-doc-markdown",docTocMobile:"theme-doc-toc-mobile",docTocDesktop:"theme-doc-toc-desktop",docFooter:"theme-doc-footer",docFooterTagsRow:"theme-doc-footer-tags-row",docFooterEditMetaRow:"theme-doc-footer-edit-meta-row",docSidebarContainer:"theme-doc-sidebar-container",docSidebarMenu:"theme-doc-sidebar-menu",docSidebarItemCategory:"theme-doc-sidebar-item-category",docSidebarItemLink:"theme-doc-sidebar-item-link",docSidebarItemCategoryLevel:e=>`theme-doc-sidebar-item-category-level-${e}`,docSidebarItemLinkLevel:e=>`theme-doc-sidebar-item-link-level-${e}`},blog:{}}},2802:(e,t,n)=>{"use strict";n.d(t,{Wl:()=>f,_F:()=>h,cE:()=>d,hI:()=>y,lO:()=>b,vY:()=>v,oz:()=>k,s1:()=>_});var a=n(7294),r=n(6550),o=n(8790),i=n(143),s=n(373),l=n(1116);function c(e){return Array.from(new Set(e))}var u=n(8596);const d=!!i._r;function f(e){if(e.href)return e.href;for(const t of e.items){if("link"===t.type)return t.href;if("category"===t.type){const e=f(t);if(e)return e}}}const p=(e,t)=>void 0!==e&&(0,u.Mg)(e,t),m=(e,t)=>e.some((e=>h(e,t)));function h(e,t){return"link"===e.type?p(e.href,t):"category"===e.type&&(p(e.href,t)||m(e.items,t))}function g(e){let{sidebarItems:t,pathname:n,onlyCategories:a=!1}=e;const r=[];return function e(t){for(const o of t)if("category"===o.type&&((0,u.Mg)(o.href,n)||e(o.items))||"link"===o.type&&(0,u.Mg)(o.href,n)){return a&&"category"!==o.type||r.unshift(o),!0}return!1}(t),r}function _(){const e=(0,l.V)(),{pathname:t}=(0,r.TH)(),n=(0,i.gA)()?.pluginData.breadcrumbs;return!1!==n&&e?g({sidebarItems:e.items,pathname:t}):null}function b(e){const{activeVersion:t}=(0,i.Iw)(e),{preferredVersion:n}=(0,s.J)(e),r=(0,i.yW)(e);return(0,a.useMemo)((()=>c([t,n,r].filter(Boolean))),[t,n,r])}function k(e,t){const n=b(t);return(0,a.useMemo)((()=>{const t=n.flatMap((e=>e.sidebars?Object.entries(e.sidebars):[])),a=t.find((t=>t[0]===e));if(!a)throw new Error(`Can't find any sidebar with id "${e}" in version${n.length>1?"s":""} ${n.map((e=>e.name)).join(", ")}".\nAvailable sidebar ids are:\n- ${Object.keys(t).join("\n- ")}`);return a[1]}),[e,n])}function v(e,t){const n=b(t);return(0,a.useMemo)((()=>{const t=n.flatMap((e=>e.docs)),a=t.find((t=>t.id===e));if(!a){if(n.flatMap((e=>e.draftIds)).includes(e))return null;throw new Error(`Couldn't find any doc with id "${e}" in version${n.length>1?"s":""} "${n.map((e=>e.name)).join(", ")}".\nAvailable doc ids are:\n- ${c(t.map((e=>e.id))).join("\n- ")}`)}return a}),[e,n])}function y(e){let{route:t,versionMetadata:n}=e;const a=(0,r.TH)(),i=t.routes,s=i.find((e=>(0,r.LX)(a.pathname,e)));if(!s)return null;const l=s.sidebar,c=l?n.docsSidebars[l]:void 0;return{docElement:(0,o.H)(i),sidebarName:l,sidebarItems:c}}},2128:(e,t,n)=>{"use strict";n.d(t,{p:()=>r});var a=n(2263);function r(e){const{siteConfig:t}=(0,a.Z)(),{title:n,titleDelimiter:r}=t;return e?.trim().length?`${e.trim()} ${r} ${n}`:n}},1980:(e,t,n)=>{"use strict";n.d(t,{Nc:()=>c,Rb:()=>s});var a=n(7294),r=n(6550),o=n(1688),i=n(902);function s(e){!function(e){const t=(0,r.k6)(),n=(0,i.zX)(e);(0,a.useEffect)((()=>t.block(((e,t)=>n(e,t)))),[t,n])}(((t,n)=>{if("POP"===n)return e(t,n)}))}function l(e){return function(e){const t=(0,r.k6)();return(0,o.useSyncExternalStore)(t.listen,(()=>e(t)),(()=>e(t)))}((t=>null===e?null:new URLSearchParams(t.location.search).get(e)))}function c(e){const t=l(e)??"",n=function(){const e=(0,r.k6)();return(0,a.useCallback)(((t,n,a)=>{const r=new URLSearchParams(e.location.search);n?r.set(t,n):r.delete(t),(a?.push?e.push:e.replace)({search:r.toString()})}),[e])}();return[t,(0,a.useCallback)(((t,a)=>{n(e,t,a)}),[n,e])]}},833:(e,t,n)=>{"use strict";n.d(t,{FG:()=>f,d:()=>u,VC:()=>p});var a=n(7294),r=n(6010),o=n(5742),i=n(226);function s(){const e=a.useContext(i._);if(!e)throw new Error("Unexpected: no Docusaurus route context found");return e}var l=n(4996),c=n(2128);function u(e){let{title:t,description:n,keywords:r,image:i,children:s}=e;const u=(0,c.p)(t),{withBaseUrl:d}=(0,l.C)(),f=i?d(i,{absolute:!0}):void 0;return a.createElement(o.Z,null,t&&a.createElement("title",null,u),t&&a.createElement("meta",{property:"og:title",content:u}),n&&a.createElement("meta",{name:"description",content:n}),n&&a.createElement("meta",{property:"og:description",content:n}),r&&a.createElement("meta",{name:"keywords",content:Array.isArray(r)?r.join(","):r}),f&&a.createElement("meta",{property:"og:image",content:f}),f&&a.createElement("meta",{name:"twitter:image",content:f}),s)}const d=a.createContext(void 0);function f(e){let{className:t,children:n}=e;const i=a.useContext(d),s=(0,r.Z)(i,t);return a.createElement(d.Provider,{value:s},a.createElement(o.Z,null,a.createElement("html",{className:s})),n)}function p(e){let{children:t}=e;const n=s(),o=`plugin-${n.plugin.name.replace(/docusaurus-(?:plugin|theme)-(?:content-)?/gi,"")}`;const i=`plugin-id-${n.plugin.id}`;return a.createElement(f,{className:(0,r.Z)(o,i)},t)}},902:(e,t,n)=>{"use strict";n.d(t,{D9:()=>i,Qc:()=>c,Ql:()=>l,i6:()=>s,zX:()=>o});var a=n(7294);const r=n(412).Z.canUseDOM?a.useLayoutEffect:a.useEffect;function o(e){const t=(0,a.useRef)(e);return r((()=>{t.current=e}),[e]),(0,a.useCallback)((function(){return t.current(...arguments)}),[])}function i(e){const t=(0,a.useRef)();return r((()=>{t.current=e})),t.current}class s extends Error{constructor(e,t){super(),this.name="ReactContextError",this.message=`Hook ${this.stack?.split("\n")[1]?.match(/at (?:\w+\.)?(?<name>\w+)/)?.groups.name??""} is called outside the <${e}>. ${t??""}`}}function l(e){const t=Object.entries(e);return t.sort(((e,t)=>e[0].localeCompare(t[0]))),(0,a.useMemo)((()=>e),t.flat())}function c(e){return t=>{let{children:n}=t;return a.createElement(a.Fragment,null,e.reduceRight(((e,t)=>a.createElement(t,null,e)),n))}}},8022:(e,t,n)=>{"use strict";function a(e,t){return void 0!==e&&void 0!==t&&new RegExp(e,"gi").test(t)}n.d(t,{F:()=>a})},8596:(e,t,n)=>{"use strict";n.d(t,{Mg:()=>i,Ns:()=>s});var a=n(7294),r=n(723),o=n(2263);function i(e,t){const n=e=>(!e||e.endsWith("/")?e:`${e}/`)?.toLowerCase();return n(e)===n(t)}function s(){const{baseUrl:e}=(0,o.Z)().siteConfig;return(0,a.useMemo)((()=>function(e){let{baseUrl:t,routes:n}=e;function a(e){return e.path===t&&!0===e.exact}function r(e){return e.path===t&&!e.exact}return function e(t){if(0===t.length)return;return t.find(a)||e(t.filter(r).flatMap((e=>e.routes??[])))}(n)}({routes:r.Z,baseUrl:e})),[e])}},2466:(e,t,n)=>{"use strict";n.d(t,{Ct:()=>f,OC:()=>l,RF:()=>d});var a=n(7294),r=n(412),o=n(2389),i=n(902);const s=a.createContext(void 0);function l(e){let{children:t}=e;const n=function(){const e=(0,a.useRef)(!0);return(0,a.useMemo)((()=>({scrollEventsEnabledRef:e,enableScrollEvents:()=>{e.current=!0},disableScrollEvents:()=>{e.current=!1}})),[])}();return a.createElement(s.Provider,{value:n},t)}function c(){const e=(0,a.useContext)(s);if(null==e)throw new i.i6("ScrollControllerProvider");return e}const u=()=>r.Z.canUseDOM?{scrollX:window.pageXOffset,scrollY:window.pageYOffset}:null;function d(e,t){void 0===t&&(t=[]);const{scrollEventsEnabledRef:n}=c(),r=(0,a.useRef)(u()),o=(0,i.zX)(e);(0,a.useEffect)((()=>{const e=()=>{if(!n.current)return;const e=u();o(e,r.current),r.current=e},t={passive:!0};return e(),window.addEventListener("scroll",e,t),()=>window.removeEventListener("scroll",e,t)}),[o,n,...t])}function f(){const e=(0,a.useRef)(null),t=(0,o.Z)()&&"smooth"===getComputedStyle(document.documentElement).scrollBehavior;return{startScroll:n=>{e.current=t?function(e){return window.scrollTo({top:e,behavior:"smooth"}),()=>{}}(n):function(e){let t=null;const n=document.documentElement.scrollTop>e;return function a(){const r=document.documentElement.scrollTop;(n&&r>e||!n&&r<e)&&(t=requestAnimationFrame(a),window.scrollTo(0,Math.floor(.85*(r-e))+e))}(),()=>t&&cancelAnimationFrame(t)}(n)},cancelScroll:()=>e.current?.()}}},3320:(e,t,n)=>{"use strict";n.d(t,{HX:()=>i,_q:()=>l,os:()=>s});var a=n(143),r=n(2263),o=n(373);const i="default";function s(e,t){return`docs-${e}-${t}`}function l(){const{i18n:e}=(0,r.Z)(),t=(0,a._r)(),n=(0,a.WS)(),l=(0,o.Oh)();const c=[i,...Object.keys(t).map((function(e){const a=n?.activePlugin.pluginId===e?n.activeVersion:void 0,r=l[e],o=t[e].versions.find((e=>e.isLast));return s(e,(a??r??o).name)}))];return{locale:e.currentLocale,tags:c}}},12:(e,t,n)=>{"use strict";n.d(t,{WA:()=>l});n(7294),n(1688);const a="localStorage";function r(e){let{key:t,oldValue:n,newValue:a,storage:r}=e;if(n===a)return;const o=document.createEvent("StorageEvent");o.initStorageEvent("storage",!1,!1,t,n,a,window.location.href,r),window.dispatchEvent(o)}function o(e){if(void 0===e&&(e=a),"undefined"==typeof window)throw new Error("Browser storage is not available on Node.js/Docusaurus SSR process.");if("none"===e)return null;try{return window[e]}catch(n){return t=n,i||(console.warn("Docusaurus browser storage is not available.\nPossible reasons: running Docusaurus in an iframe, in an incognito browser session, or using too strict browser privacy settings.",t),i=!0),null}var t}let i=!1;const s={get:()=>null,set:()=>{},del:()=>{},listen:()=>()=>{}};function l(e,t){if("undefined"==typeof window)return function(e){function t(){throw new Error(`Illegal storage API usage for storage key "${e}".\nDocusaurus storage APIs are not supposed to be called on the server-rendering process.\nPlease only call storage APIs in effects and event handlers.`)}return{get:t,set:t,del:t,listen:t}}(e);const n=o(t?.persistence);return null===n?s:{get:()=>{try{return n.getItem(e)}catch(t){return console.error(`Docusaurus storage error, can't get key=${e}`,t),null}},set:t=>{try{const a=n.getItem(e);n.setItem(e,t),r({key:e,oldValue:a,newValue:t,storage:n})}catch(a){console.error(`Docusaurus storage error, can't set ${e}=${t}`,a)}},del:()=>{try{const t=n.getItem(e);n.removeItem(e),r({key:e,oldValue:t,newValue:null,storage:n})}catch(t){console.error(`Docusaurus storage error, can't delete key=${e}`,t)}},listen:t=>{try{const a=a=>{a.storageArea===n&&a.key===e&&t(a)};return window.addEventListener("storage",a),()=>window.removeEventListener("storage",a)}catch(a){return console.error(`Docusaurus storage error, can't listen for changes of key=${e}`,a),()=>{}}}}}},4711:(e,t,n)=>{"use strict";n.d(t,{l:()=>o});var a=n(2263),r=n(6550);function o(){const{siteConfig:{baseUrl:e,url:t},i18n:{defaultLocale:n,currentLocale:o}}=(0,a.Z)(),{pathname:i}=(0,r.TH)(),s=o===n?e:e.replace(`/${o}/`,"/"),l=i.replace(e,"");return{createUrl:function(e){let{locale:a,fullyQualified:r}=e;return`${r?t:""}${function(e){return e===n?`${s}`:`${s}${e}/`}(a)}${l}`}}}},5936:(e,t,n)=>{"use strict";n.d(t,{S:()=>i});var a=n(7294),r=n(6550),o=n(902);function i(e){const t=(0,r.TH)(),n=(0,o.D9)(t),i=(0,o.zX)(e);(0,a.useEffect)((()=>{n&&t!==n&&i({location:t,previousLocation:n})}),[i,t,n])}},6668:(e,t,n)=>{"use strict";n.d(t,{L:()=>r});var a=n(2263);function r(){return(0,a.Z)().siteConfig.themeConfig}},6278:(e,t,n)=>{"use strict";n.d(t,{L:()=>r});var a=n(2263);function r(){const{siteConfig:{themeConfig:e}}=(0,a.Z)();return e}},239:(e,t,n)=>{"use strict";n.d(t,{l:()=>s});var a=n(7294),r=n(8022),o=n(4996),i=n(6278);function s(){const{withBaseUrl:e}=(0,o.C)(),{algolia:{externalUrlRegex:t,replaceSearchResultPathname:n}}=(0,i.L)();return(0,a.useCallback)((a=>{const o=new URL(a);if((0,r.F)(t,o.href))return a;const i=`${o.pathname+o.hash}`;return e(function(e,t){return t?e.replaceAll(new RegExp(t.from,"g"),t.to):e}(i,n))}),[e,t,n])}},8802:(e,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){const{trailingSlash:n,baseUrl:a}=t;if(e.startsWith("#"))return e;if(void 0===n)return e;const[r]=e.split(/[#?]/),o="/"===r||r===a?r:(i=r,n?function(e){return e.endsWith("/")?e:`${e}/`}(i):function(e){return e.endsWith("/")?e.slice(0,-1):e}(i));var i;return e.replace(r,o)}},4143:(e,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.getErrorCausalChain=void 0,t.getErrorCausalChain=function e(t){return t.cause?[t,...e(t.cause)]:[t]}},8780:function(e,t,n){"use strict";var a=this&&this.__importDefault||function(e){return e&&e.__esModule?e:{default:e}};Object.defineProperty(t,"__esModule",{value:!0}),t.getErrorCausalChain=t.applyTrailingSlash=t.blogPostContainerID=void 0,t.blogPostContainerID="post-content";var r=n(8802);Object.defineProperty(t,"applyTrailingSlash",{enumerable:!0,get:function(){return a(r).default}});var o=n(4143);Object.defineProperty(t,"getErrorCausalChain",{enumerable:!0,get:function(){return o.getErrorCausalChain}})},6010:(e,t,n)=>{"use strict";function a(e){var t,n,r="";if("string"==typeof e||"number"==typeof e)r+=e;else if("object"==typeof e)if(Array.isArray(e))for(t=0;t<e.length;t++)e[t]&&(n=a(e[t]))&&(r&&(r+=" "),r+=n);else for(t in e)e[t]&&(r&&(r+=" "),r+=t);return r}n.d(t,{Z:()=>r});const r=function(){for(var e,t,n=0,r="";n<arguments.length;)(e=arguments[n++])&&(t=a(e))&&(r&&(r+=" "),r+=t);return r}},9318:(e,t,n)=>{"use strict";n.d(t,{lX:()=>v,q_:()=>C,ob:()=>p,PP:()=>A,Ep:()=>f});var a=n(7462);function r(e){return"/"===e.charAt(0)}function o(e,t){for(var n=t,a=n+1,r=e.length;a<r;n+=1,a+=1)e[n]=e[a];e.pop()}const i=function(e,t){void 0===t&&(t="");var n,a=e&&e.split("/")||[],i=t&&t.split("/")||[],s=e&&r(e),l=t&&r(t),c=s||l;if(e&&r(e)?i=a:a.length&&(i.pop(),i=i.concat(a)),!i.length)return"/";if(i.length){var u=i[i.length-1];n="."===u||".."===u||""===u}else n=!1;for(var d=0,f=i.length;f>=0;f--){var p=i[f];"."===p?o(i,f):".."===p?(o(i,f),d++):d&&(o(i,f),d--)}if(!c)for(;d--;d)i.unshift("..");!c||""===i[0]||i[0]&&r(i[0])||i.unshift("");var m=i.join("/");return n&&"/"!==m.substr(-1)&&(m+="/"),m};var s=n(8776);function l(e){return"/"===e.charAt(0)?e:"/"+e}function c(e){return"/"===e.charAt(0)?e.substr(1):e}function u(e,t){return function(e,t){return 0===e.toLowerCase().indexOf(t.toLowerCase())&&-1!=="/?#".indexOf(e.charAt(t.length))}(e,t)?e.substr(t.length):e}function d(e){return"/"===e.charAt(e.length-1)?e.slice(0,-1):e}function f(e){var t=e.pathname,n=e.search,a=e.hash,r=t||"/";return n&&"?"!==n&&(r+="?"===n.charAt(0)?n:"?"+n),a&&"#"!==a&&(r+="#"===a.charAt(0)?a:"#"+a),r}function p(e,t,n,r){var o;"string"==typeof e?(o=function(e){var t=e||"/",n="",a="",r=t.indexOf("#");-1!==r&&(a=t.substr(r),t=t.substr(0,r));var o=t.indexOf("?");return-1!==o&&(n=t.substr(o),t=t.substr(0,o)),{pathname:t,search:"?"===n?"":n,hash:"#"===a?"":a}}(e),o.state=t):(void 0===(o=(0,a.Z)({},e)).pathname&&(o.pathname=""),o.search?"?"!==o.search.charAt(0)&&(o.search="?"+o.search):o.search="",o.hash?"#"!==o.hash.charAt(0)&&(o.hash="#"+o.hash):o.hash="",void 0!==t&&void 0===o.state&&(o.state=t));try{o.pathname=decodeURI(o.pathname)}catch(s){throw s instanceof URIError?new URIError('Pathname "'+o.pathname+'" could not be decoded. This is likely caused by an invalid percent-encoding.'):s}return n&&(o.key=n),r?o.pathname?"/"!==o.pathname.charAt(0)&&(o.pathname=i(o.pathname,r.pathname)):o.pathname=r.pathname:o.pathname||(o.pathname="/"),o}function m(){var e=null;var t=[];return{setPrompt:function(t){return e=t,function(){e===t&&(e=null)}},confirmTransitionTo:function(t,n,a,r){if(null!=e){var o="function"==typeof e?e(t,n):e;"string"==typeof o?"function"==typeof a?a(o,r):r(!0):r(!1!==o)}else r(!0)},appendListener:function(e){var n=!0;function a(){n&&e.apply(void 0,arguments)}return t.push(a),function(){n=!1,t=t.filter((function(e){return e!==a}))}},notifyListeners:function(){for(var e=arguments.length,n=new Array(e),a=0;a<e;a++)n[a]=arguments[a];t.forEach((function(e){return e.apply(void 0,n)}))}}}var h=!("undefined"==typeof window||!window.document||!window.document.createElement);function g(e,t){t(window.confirm(e))}var _="popstate",b="hashchange";function k(){try{return window.history.state||{}}catch(e){return{}}}function v(e){void 0===e&&(e={}),h||(0,s.Z)(!1);var t,n=window.history,r=(-1===(t=window.navigator.userAgent).indexOf("Android 2.")&&-1===t.indexOf("Android 4.0")||-1===t.indexOf("Mobile Safari")||-1!==t.indexOf("Chrome")||-1!==t.indexOf("Windows Phone"))&&window.history&&"pushState"in window.history,o=!(-1===window.navigator.userAgent.indexOf("Trident")),i=e,c=i.forceRefresh,v=void 0!==c&&c,y=i.getUserConfirmation,w=void 0===y?g:y,S=i.keyLength,E=void 0===S?6:S,x=e.basename?d(l(e.basename)):"";function C(e){var t=e||{},n=t.key,a=t.state,r=window.location,o=r.pathname+r.search+r.hash;return x&&(o=u(o,x)),p(o,a,n)}function T(){return Math.random().toString(36).substr(2,E)}var A=m();function G(e){(0,a.Z)(j,e),j.length=n.length,A.notifyListeners(j.location,j.action)}function L(e){(function(e){return void 0===e.state&&-1===navigator.userAgent.indexOf("CriOS")})(e)||P(C(e.state))}function N(){P(C(k()))}var R=!1;function P(e){if(R)R=!1,G();else{A.confirmTransitionTo(e,"POP",w,(function(t){t?G({action:"POP",location:e}):function(e){var t=j.location,n=O.indexOf(t.key);-1===n&&(n=0);var a=O.indexOf(e.key);-1===a&&(a=0);var r=n-a;r&&(R=!0,D(r))}(e)}))}}var I=C(k()),O=[I.key];function B(e){return x+f(e)}function D(e){n.go(e)}var M=0;function F(e){1===(M+=e)&&1===e?(window.addEventListener(_,L),o&&window.addEventListener(b,N)):0===M&&(window.removeEventListener(_,L),o&&window.removeEventListener(b,N))}var U=!1;var j={length:n.length,action:"POP",location:I,createHref:B,push:function(e,t){var a="PUSH",o=p(e,t,T(),j.location);A.confirmTransitionTo(o,a,w,(function(e){if(e){var t=B(o),i=o.key,s=o.state;if(r)if(n.pushState({key:i,state:s},null,t),v)window.location.href=t;else{var l=O.indexOf(j.location.key),c=O.slice(0,l+1);c.push(o.key),O=c,G({action:a,location:o})}else window.location.href=t}}))},replace:function(e,t){var a="REPLACE",o=p(e,t,T(),j.location);A.confirmTransitionTo(o,a,w,(function(e){if(e){var t=B(o),i=o.key,s=o.state;if(r)if(n.replaceState({key:i,state:s},null,t),v)window.location.replace(t);else{var l=O.indexOf(j.location.key);-1!==l&&(O[l]=o.key),G({action:a,location:o})}else window.location.replace(t)}}))},go:D,goBack:function(){D(-1)},goForward:function(){D(1)},block:function(e){void 0===e&&(e=!1);var t=A.setPrompt(e);return U||(F(1),U=!0),function(){return U&&(U=!1,F(-1)),t()}},listen:function(e){var t=A.appendListener(e);return F(1),function(){F(-1),t()}}};return j}var y="hashchange",w={hashbang:{encodePath:function(e){return"!"===e.charAt(0)?e:"!/"+c(e)},decodePath:function(e){return"!"===e.charAt(0)?e.substr(1):e}},noslash:{encodePath:c,decodePath:l},slash:{encodePath:l,decodePath:l}};function S(e){var t=e.indexOf("#");return-1===t?e:e.slice(0,t)}function E(){var e=window.location.href,t=e.indexOf("#");return-1===t?"":e.substring(t+1)}function x(e){window.location.replace(S(window.location.href)+"#"+e)}function C(e){void 0===e&&(e={}),h||(0,s.Z)(!1);var t=window.history,n=(window.navigator.userAgent.indexOf("Firefox"),e),r=n.getUserConfirmation,o=void 0===r?g:r,i=n.hashType,c=void 0===i?"slash":i,_=e.basename?d(l(e.basename)):"",b=w[c],k=b.encodePath,v=b.decodePath;function C(){var e=v(E());return _&&(e=u(e,_)),p(e)}var T=m();function A(e){(0,a.Z)(U,e),U.length=t.length,T.notifyListeners(U.location,U.action)}var G=!1,L=null;function N(){var e,t,n=E(),a=k(n);if(n!==a)x(a);else{var r=C(),i=U.location;if(!G&&(t=r,(e=i).pathname===t.pathname&&e.search===t.search&&e.hash===t.hash))return;if(L===f(r))return;L=null,function(e){if(G)G=!1,A();else{var t="POP";T.confirmTransitionTo(e,t,o,(function(n){n?A({action:t,location:e}):function(e){var t=U.location,n=O.lastIndexOf(f(t));-1===n&&(n=0);var a=O.lastIndexOf(f(e));-1===a&&(a=0);var r=n-a;r&&(G=!0,B(r))}(e)}))}}(r)}}var R=E(),P=k(R);R!==P&&x(P);var I=C(),O=[f(I)];function B(e){t.go(e)}var D=0;function M(e){1===(D+=e)&&1===e?window.addEventListener(y,N):0===D&&window.removeEventListener(y,N)}var F=!1;var U={length:t.length,action:"POP",location:I,createHref:function(e){var t=document.querySelector("base"),n="";return t&&t.getAttribute("href")&&(n=S(window.location.href)),n+"#"+k(_+f(e))},push:function(e,t){var n="PUSH",a=p(e,void 0,void 0,U.location);T.confirmTransitionTo(a,n,o,(function(e){if(e){var t=f(a),r=k(_+t);if(E()!==r){L=t,function(e){window.location.hash=e}(r);var o=O.lastIndexOf(f(U.location)),i=O.slice(0,o+1);i.push(t),O=i,A({action:n,location:a})}else A()}}))},replace:function(e,t){var n="REPLACE",a=p(e,void 0,void 0,U.location);T.confirmTransitionTo(a,n,o,(function(e){if(e){var t=f(a),r=k(_+t);E()!==r&&(L=t,x(r));var o=O.indexOf(f(U.location));-1!==o&&(O[o]=t),A({action:n,location:a})}}))},go:B,goBack:function(){B(-1)},goForward:function(){B(1)},block:function(e){void 0===e&&(e=!1);var t=T.setPrompt(e);return F||(M(1),F=!0),function(){return F&&(F=!1,M(-1)),t()}},listen:function(e){var t=T.appendListener(e);return M(1),function(){M(-1),t()}}};return U}function T(e,t,n){return Math.min(Math.max(e,t),n)}function A(e){void 0===e&&(e={});var t=e,n=t.getUserConfirmation,r=t.initialEntries,o=void 0===r?["/"]:r,i=t.initialIndex,s=void 0===i?0:i,l=t.keyLength,c=void 0===l?6:l,u=m();function d(e){(0,a.Z)(v,e),v.length=v.entries.length,u.notifyListeners(v.location,v.action)}function h(){return Math.random().toString(36).substr(2,c)}var g=T(s,0,o.length-1),_=o.map((function(e){return p(e,void 0,"string"==typeof e?h():e.key||h())})),b=f;function k(e){var t=T(v.index+e,0,v.entries.length-1),a=v.entries[t];u.confirmTransitionTo(a,"POP",n,(function(e){e?d({action:"POP",location:a,index:t}):d()}))}var v={length:_.length,action:"POP",location:_[g],index:g,entries:_,createHref:b,push:function(e,t){var a="PUSH",r=p(e,t,h(),v.location);u.confirmTransitionTo(r,a,n,(function(e){if(e){var t=v.index+1,n=v.entries.slice(0);n.length>t?n.splice(t,n.length-t,r):n.push(r),d({action:a,location:r,index:t,entries:n})}}))},replace:function(e,t){var a="REPLACE",r=p(e,t,h(),v.location);u.confirmTransitionTo(r,a,n,(function(e){e&&(v.entries[v.index]=r,d({action:a,location:r}))}))},go:k,goBack:function(){k(-1)},goForward:function(){k(1)},canGo:function(e){var t=v.index+e;return t>=0&&t<v.entries.length},block:function(e){return void 0===e&&(e=!1),u.setPrompt(e)},listen:function(e){return u.appendListener(e)}};return v}},8679:(e,t,n)=>{"use strict";var a=n(9864),r={childContextTypes:!0,contextType:!0,contextTypes:!0,defaultProps:!0,displayName:!0,getDefaultProps:!0,getDerivedStateFromError:!0,getDerivedStateFromProps:!0,mixins:!0,propTypes:!0,type:!0},o={name:!0,length:!0,prototype:!0,caller:!0,callee:!0,arguments:!0,arity:!0},i={$$typeof:!0,compare:!0,defaultProps:!0,displayName:!0,propTypes:!0,type:!0},s={};function l(e){return a.isMemo(e)?i:s[e.$$typeof]||r}s[a.ForwardRef]={$$typeof:!0,render:!0,defaultProps:!0,displayName:!0,propTypes:!0},s[a.Memo]=i;var c=Object.defineProperty,u=Object.getOwnPropertyNames,d=Object.getOwnPropertySymbols,f=Object.getOwnPropertyDescriptor,p=Object.getPrototypeOf,m=Object.prototype;e.exports=function e(t,n,a){if("string"!=typeof n){if(m){var r=p(n);r&&r!==m&&e(t,r,a)}var i=u(n);d&&(i=i.concat(d(n)));for(var s=l(t),h=l(n),g=0;g<i.length;++g){var _=i[g];if(!(o[_]||a&&a[_]||h&&h[_]||s&&s[_])){var b=f(n,_);try{c(t,_,b)}catch(k){}}}}return t}},1143:e=>{"use strict";e.exports=function(e,t,n,a,r,o,i,s){if(!e){var l;if(void 0===t)l=new Error("Minified exception occurred; use the non-minified dev environment for the full error message and additional helpful warnings.");else{var c=[n,a,r,o,i,s],u=0;(l=new Error(t.replace(/%s/g,(function(){return c[u++]})))).name="Invariant Violation"}throw l.framesToPop=1,l}}},5826:e=>{e.exports=Array.isArray||function(e){return"[object Array]"==Object.prototype.toString.call(e)}},2497:(e,t,n)=>{"use strict";n.r(t)},2295:(e,t,n)=>{"use strict";n.r(t)},4865:function(e,t,n){var a,r;a=function(){var e,t,n={version:"0.2.0"},a=n.settings={minimum:.08,easing:"ease",positionUsing:"",speed:200,trickle:!0,trickleRate:.02,trickleSpeed:800,showSpinner:!0,barSelector:'[role="bar"]',spinnerSelector:'[role="spinner"]',parent:"body",template:'<div class="bar" role="bar"><div class="peg"></div></div><div class="spinner" role="spinner"><div class="spinner-icon"></div></div>'};function r(e,t,n){return e<t?t:e>n?n:e}function o(e){return 100*(-1+e)}function i(e,t,n){var r;return(r="translate3d"===a.positionUsing?{transform:"translate3d("+o(e)+"%,0,0)"}:"translate"===a.positionUsing?{transform:"translate("+o(e)+"%,0)"}:{"margin-left":o(e)+"%"}).transition="all "+t+"ms "+n,r}n.configure=function(e){var t,n;for(t in e)void 0!==(n=e[t])&&e.hasOwnProperty(t)&&(a[t]=n);return this},n.status=null,n.set=function(e){var t=n.isStarted();e=r(e,a.minimum,1),n.status=1===e?null:e;var o=n.render(!t),c=o.querySelector(a.barSelector),u=a.speed,d=a.easing;return o.offsetWidth,s((function(t){""===a.positionUsing&&(a.positionUsing=n.getPositioningCSS()),l(c,i(e,u,d)),1===e?(l(o,{transition:"none",opacity:1}),o.offsetWidth,setTimeout((function(){l(o,{transition:"all "+u+"ms linear",opacity:0}),setTimeout((function(){n.remove(),t()}),u)}),u)):setTimeout(t,u)})),this},n.isStarted=function(){return"number"==typeof n.status},n.start=function(){n.status||n.set(0);var e=function(){setTimeout((function(){n.status&&(n.trickle(),e())}),a.trickleSpeed)};return a.trickle&&e(),this},n.done=function(e){return e||n.status?n.inc(.3+.5*Math.random()).set(1):this},n.inc=function(e){var t=n.status;return t?("number"!=typeof e&&(e=(1-t)*r(Math.random()*t,.1,.95)),t=r(t+e,0,.994),n.set(t)):n.start()},n.trickle=function(){return n.inc(Math.random()*a.trickleRate)},e=0,t=0,n.promise=function(a){return a&&"resolved"!==a.state()?(0===t&&n.start(),e++,t++,a.always((function(){0==--t?(e=0,n.done()):n.set((e-t)/e)})),this):this},n.render=function(e){if(n.isRendered())return document.getElementById("nprogress");u(document.documentElement,"nprogress-busy");var t=document.createElement("div");t.id="nprogress",t.innerHTML=a.template;var r,i=t.querySelector(a.barSelector),s=e?"-100":o(n.status||0),c=document.querySelector(a.parent);return l(i,{transition:"all 0 linear",transform:"translate3d("+s+"%,0,0)"}),a.showSpinner||(r=t.querySelector(a.spinnerSelector))&&p(r),c!=document.body&&u(c,"nprogress-custom-parent"),c.appendChild(t),t},n.remove=function(){d(document.documentElement,"nprogress-busy"),d(document.querySelector(a.parent),"nprogress-custom-parent");var e=document.getElementById("nprogress");e&&p(e)},n.isRendered=function(){return!!document.getElementById("nprogress")},n.getPositioningCSS=function(){var e=document.body.style,t="WebkitTransform"in e?"Webkit":"MozTransform"in e?"Moz":"msTransform"in e?"ms":"OTransform"in e?"O":"";return t+"Perspective"in e?"translate3d":t+"Transform"in e?"translate":"margin"};var s=function(){var e=[];function t(){var n=e.shift();n&&n(t)}return function(n){e.push(n),1==e.length&&t()}}(),l=function(){var e=["Webkit","O","Moz","ms"],t={};function n(e){return e.replace(/^-ms-/,"ms-").replace(/-([\da-z])/gi,(function(e,t){return t.toUpperCase()}))}function a(t){var n=document.body.style;if(t in n)return t;for(var a,r=e.length,o=t.charAt(0).toUpperCase()+t.slice(1);r--;)if((a=e[r]+o)in n)return a;return t}function r(e){return e=n(e),t[e]||(t[e]=a(e))}function o(e,t,n){t=r(t),e.style[t]=n}return function(e,t){var n,a,r=arguments;if(2==r.length)for(n in t)void 0!==(a=t[n])&&t.hasOwnProperty(n)&&o(e,n,a);else o(e,r[1],r[2])}}();function c(e,t){return("string"==typeof e?e:f(e)).indexOf(" "+t+" ")>=0}function u(e,t){var n=f(e),a=n+t;c(n,t)||(e.className=a.substring(1))}function d(e,t){var n,a=f(e);c(e,t)&&(n=a.replace(" "+t+" "," "),e.className=n.substring(1,n.length-1))}function f(e){return(" "+(e.className||"")+" ").replace(/\s+/gi," ")}function p(e){e&&e.parentNode&&e.parentNode.removeChild(e)}return n},void 0===(r="function"==typeof a?a.call(t,n,t,e):a)||(e.exports=r)},7418:e=>{"use strict";var t=Object.getOwnPropertySymbols,n=Object.prototype.hasOwnProperty,a=Object.prototype.propertyIsEnumerable;e.exports=function(){try{if(!Object.assign)return!1;var e=new String("abc");if(e[5]="de","5"===Object.getOwnPropertyNames(e)[0])return!1;for(var t={},n=0;n<10;n++)t["_"+String.fromCharCode(n)]=n;if("0123456789"!==Object.getOwnPropertyNames(t).map((function(e){return t[e]})).join(""))return!1;var a={};return"abcdefghijklmnopqrst".split("").forEach((function(e){a[e]=e})),"abcdefghijklmnopqrst"===Object.keys(Object.assign({},a)).join("")}catch(r){return!1}}()?Object.assign:function(e,r){for(var o,i,s=function(e){if(null==e)throw new TypeError("Object.assign cannot be called with null or undefined");return Object(e)}(e),l=1;l<arguments.length;l++){for(var c in o=Object(arguments[l]))n.call(o,c)&&(s[c]=o[c]);if(t){i=t(o);for(var u=0;u<i.length;u++)a.call(o,i[u])&&(s[i[u]]=o[i[u]])}}return s}},4779:(e,t,n)=>{var a=n(5826);e.exports=p,e.exports.parse=o,e.exports.compile=function(e,t){return s(o(e,t),t)},e.exports.tokensToFunction=s,e.exports.tokensToRegExp=f;var r=new RegExp(["(\\\\.)","([\\/.])?(?:(?:\\:(\\w+)(?:\\(((?:\\\\.|[^\\\\()])+)\\))?|\\(((?:\\\\.|[^\\\\()])+)\\))([+*?])?|(\\*))"].join("|"),"g");function o(e,t){for(var n,a=[],o=0,i=0,s="",u=t&&t.delimiter||"/";null!=(n=r.exec(e));){var d=n[0],f=n[1],p=n.index;if(s+=e.slice(i,p),i=p+d.length,f)s+=f[1];else{var m=e[i],h=n[2],g=n[3],_=n[4],b=n[5],k=n[6],v=n[7];s&&(a.push(s),s="");var y=null!=h&&null!=m&&m!==h,w="+"===k||"*"===k,S="?"===k||"*"===k,E=n[2]||u,x=_||b;a.push({name:g||o++,prefix:h||"",delimiter:E,optional:S,repeat:w,partial:y,asterisk:!!v,pattern:x?c(x):v?".*":"[^"+l(E)+"]+?"})}}return i<e.length&&(s+=e.substr(i)),s&&a.push(s),a}function i(e){return encodeURI(e).replace(/[\/?#]/g,(function(e){return"%"+e.charCodeAt(0).toString(16).toUpperCase()}))}function s(e,t){for(var n=new Array(e.length),r=0;r<e.length;r++)"object"==typeof e[r]&&(n[r]=new RegExp("^(?:"+e[r].pattern+")$",d(t)));return function(t,r){for(var o="",s=t||{},l=(r||{}).pretty?i:encodeURIComponent,c=0;c<e.length;c++){var u=e[c];if("string"!=typeof u){var d,f=s[u.name];if(null==f){if(u.optional){u.partial&&(o+=u.prefix);continue}throw new TypeError('Expected "'+u.name+'" to be defined')}if(a(f)){if(!u.repeat)throw new TypeError('Expected "'+u.name+'" to not repeat, but received `'+JSON.stringify(f)+"`");if(0===f.length){if(u.optional)continue;throw new TypeError('Expected "'+u.name+'" to not be empty')}for(var p=0;p<f.length;p++){if(d=l(f[p]),!n[c].test(d))throw new TypeError('Expected all "'+u.name+'" to match "'+u.pattern+'", but received `'+JSON.stringify(d)+"`");o+=(0===p?u.prefix:u.delimiter)+d}}else{if(d=u.asterisk?encodeURI(f).replace(/[?#]/g,(function(e){return"%"+e.charCodeAt(0).toString(16).toUpperCase()})):l(f),!n[c].test(d))throw new TypeError('Expected "'+u.name+'" to match "'+u.pattern+'", but received "'+d+'"');o+=u.prefix+d}}else o+=u}return o}}function l(e){return e.replace(/([.+*?=^!:${}()[\]|\/\\])/g,"\\$1")}function c(e){return e.replace(/([=!:$\/()])/g,"\\$1")}function u(e,t){return e.keys=t,e}function d(e){return e&&e.sensitive?"":"i"}function f(e,t,n){a(t)||(n=t||n,t=[]);for(var r=(n=n||{}).strict,o=!1!==n.end,i="",s=0;s<e.length;s++){var c=e[s];if("string"==typeof c)i+=l(c);else{var f=l(c.prefix),p="(?:"+c.pattern+")";t.push(c),c.repeat&&(p+="(?:"+f+p+")*"),i+=p=c.optional?c.partial?f+"("+p+")?":"(?:"+f+"("+p+"))?":f+"("+p+")"}}var m=l(n.delimiter||"/"),h=i.slice(-m.length)===m;return r||(i=(h?i.slice(0,-m.length):i)+"(?:"+m+"(?=$))?"),i+=o?"$":r&&h?"":"(?="+m+"|$)",u(new RegExp("^"+i,d(n)),t)}function p(e,t,n){return a(t)||(n=t||n,t=[]),n=n||{},e instanceof RegExp?function(e,t){var n=e.source.match(/\((?!\?)/g);if(n)for(var a=0;a<n.length;a++)t.push({name:a,prefix:null,delimiter:null,optional:!1,repeat:!1,partial:!1,asterisk:!1,pattern:null});return u(e,t)}(e,t):a(e)?function(e,t,n){for(var a=[],r=0;r<e.length;r++)a.push(p(e[r],t,n).source);return u(new RegExp("(?:"+a.join("|")+")",d(n)),t)}(e,t,n):function(e,t,n){return f(o(e,n),t,n)}(e,t,n)}},7410:(e,t,n)=>{"use strict";n.d(t,{Z:()=>o});var a=function(){var e=/(?:^|\s)lang(?:uage)?-([\w-]+)(?=\s|$)/i,t=0,n={},a={util:{encode:function e(t){return t instanceof r?new r(t.type,e(t.content),t.alias):Array.isArray(t)?t.map(e):t.replace(/&/g,"&").replace(/</g,"<").replace(/\u00a0/g," ")},type:function(e){return Object.prototype.toString.call(e).slice(8,-1)},objId:function(e){return e.__id||Object.defineProperty(e,"__id",{value:++t}),e.__id},clone:function e(t,n){var r,o;switch(n=n||{},a.util.type(t)){case"Object":if(o=a.util.objId(t),n[o])return n[o];for(var i in r={},n[o]=r,t)t.hasOwnProperty(i)&&(r[i]=e(t[i],n));return r;case"Array":return o=a.util.objId(t),n[o]?n[o]:(r=[],n[o]=r,t.forEach((function(t,a){r[a]=e(t,n)})),r);default:return t}},getLanguage:function(t){for(;t;){var n=e.exec(t.className);if(n)return n[1].toLowerCase();t=t.parentElement}return"none"},setLanguage:function(t,n){t.className=t.className.replace(RegExp(e,"gi"),""),t.classList.add("language-"+n)},isActive:function(e,t,n){for(var a="no-"+t;e;){var r=e.classList;if(r.contains(t))return!0;if(r.contains(a))return!1;e=e.parentElement}return!!n}},languages:{plain:n,plaintext:n,text:n,txt:n,extend:function(e,t){var n=a.util.clone(a.languages[e]);for(var r in t)n[r]=t[r];return n},insertBefore:function(e,t,n,r){var o=(r=r||a.languages)[e],i={};for(var s in o)if(o.hasOwnProperty(s)){if(s==t)for(var l in n)n.hasOwnProperty(l)&&(i[l]=n[l]);n.hasOwnProperty(s)||(i[s]=o[s])}var c=r[e];return r[e]=i,a.languages.DFS(a.languages,(function(t,n){n===c&&t!=e&&(this[t]=i)})),i},DFS:function e(t,n,r,o){o=o||{};var i=a.util.objId;for(var s in t)if(t.hasOwnProperty(s)){n.call(t,s,t[s],r||s);var l=t[s],c=a.util.type(l);"Object"!==c||o[i(l)]?"Array"!==c||o[i(l)]||(o[i(l)]=!0,e(l,n,s,o)):(o[i(l)]=!0,e(l,n,null,o))}}},plugins:{},highlight:function(e,t,n){var o={code:e,grammar:t,language:n};return a.hooks.run("before-tokenize",o),o.tokens=a.tokenize(o.code,o.grammar),a.hooks.run("after-tokenize",o),r.stringify(a.util.encode(o.tokens),o.language)},tokenize:function(e,t){var n=t.rest;if(n){for(var a in n)t[a]=n[a];delete t.rest}var r=new s;return l(r,r.head,e),i(e,r,t,r.head,0),function(e){var t=[],n=e.head.next;for(;n!==e.tail;)t.push(n.value),n=n.next;return t}(r)},hooks:{all:{},add:function(e,t){var n=a.hooks.all;n[e]=n[e]||[],n[e].push(t)},run:function(e,t){var n=a.hooks.all[e];if(n&&n.length)for(var r,o=0;r=n[o++];)r(t)}},Token:r};function r(e,t,n,a){this.type=e,this.content=t,this.alias=n,this.length=0|(a||"").length}function o(e,t,n,a){e.lastIndex=t;var r=e.exec(n);if(r&&a&&r[1]){var o=r[1].length;r.index+=o,r[0]=r[0].slice(o)}return r}function i(e,t,n,s,u,d){for(var f in n)if(n.hasOwnProperty(f)&&n[f]){var p=n[f];p=Array.isArray(p)?p:[p];for(var m=0;m<p.length;++m){if(d&&d.cause==f+","+m)return;var h=p[m],g=h.inside,_=!!h.lookbehind,b=!!h.greedy,k=h.alias;if(b&&!h.pattern.global){var v=h.pattern.toString().match(/[imsuy]*$/)[0];h.pattern=RegExp(h.pattern.source,v+"g")}for(var y=h.pattern||h,w=s.next,S=u;w!==t.tail&&!(d&&S>=d.reach);S+=w.value.length,w=w.next){var E=w.value;if(t.length>e.length)return;if(!(E instanceof r)){var x,C=1;if(b){if(!(x=o(y,S,e,_))||x.index>=e.length)break;var T=x.index,A=x.index+x[0].length,G=S;for(G+=w.value.length;T>=G;)G+=(w=w.next).value.length;if(S=G-=w.value.length,w.value instanceof r)continue;for(var L=w;L!==t.tail&&(G<A||"string"==typeof L.value);L=L.next)C++,G+=L.value.length;C--,E=e.slice(S,G),x.index-=S}else if(!(x=o(y,0,E,_)))continue;T=x.index;var N=x[0],R=E.slice(0,T),P=E.slice(T+N.length),I=S+E.length;d&&I>d.reach&&(d.reach=I);var O=w.prev;if(R&&(O=l(t,O,R),S+=R.length),c(t,O,C),w=l(t,O,new r(f,g?a.tokenize(N,g):N,k,N)),P&&l(t,w,P),C>1){var B={cause:f+","+m,reach:I};i(e,t,n,w.prev,S,B),d&&B.reach>d.reach&&(d.reach=B.reach)}}}}}}function s(){var e={value:null,prev:null,next:null},t={value:null,prev:e,next:null};e.next=t,this.head=e,this.tail=t,this.length=0}function l(e,t,n){var a=t.next,r={value:n,prev:t,next:a};return t.next=r,a.prev=r,e.length++,r}function c(e,t,n){for(var a=t.next,r=0;r<n&&a!==e.tail;r++)a=a.next;t.next=a,a.prev=t,e.length-=r}return r.stringify=function e(t,n){if("string"==typeof t)return t;if(Array.isArray(t)){var r="";return t.forEach((function(t){r+=e(t,n)})),r}var o={type:t.type,content:e(t.content,n),tag:"span",classes:["token",t.type],attributes:{},language:n},i=t.alias;i&&(Array.isArray(i)?Array.prototype.push.apply(o.classes,i):o.classes.push(i)),a.hooks.run("wrap",o);var s="";for(var l in o.attributes)s+=" "+l+'="'+(o.attributes[l]||"").replace(/"/g,""")+'"';return"<"+o.tag+' class="'+o.classes.join(" ")+'"'+s+">"+o.content+"</"+o.tag+">"},a}(),r=a;a.default=a,r.languages.markup={comment:{pattern:/<!--(?:(?!<!--)[\s\S])*?-->/,greedy:!0},prolog:{pattern:/<\?[\s\S]+?\?>/,greedy:!0},doctype:{pattern:/<!DOCTYPE(?:[^>"'[\]]|"[^"]*"|'[^']*')+(?:\[(?:[^<"'\]]|"[^"]*"|'[^']*'|<(?!!--)|<!--(?:[^-]|-(?!->))*-->)*\]\s*)?>/i,greedy:!0,inside:{"internal-subset":{pattern:/(^[^\[]*\[)[\s\S]+(?=\]>$)/,lookbehind:!0,greedy:!0,inside:null},string:{pattern:/"[^"]*"|'[^']*'/,greedy:!0},punctuation:/^<!|>$|[[\]]/,"doctype-tag":/^DOCTYPE/i,name:/[^\s<>'"]+/}},cdata:{pattern:/<!\[CDATA\[[\s\S]*?\]\]>/i,greedy:!0},tag:{pattern:/<\/?(?!\d)[^\s>\/=$<%]+(?:\s(?:\s*[^\s>\/=]+(?:\s*=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+(?=[\s>]))|(?=[\s/>])))+)?\s*\/?>/,greedy:!0,inside:{tag:{pattern:/^<\/?[^\s>\/]+/,inside:{punctuation:/^<\/?/,namespace:/^[^\s>\/:]+:/}},"special-attr":[],"attr-value":{pattern:/=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+)/,inside:{punctuation:[{pattern:/^=/,alias:"attr-equals"},/"|'/]}},punctuation:/\/?>/,"attr-name":{pattern:/[^\s>\/]+/,inside:{namespace:/^[^\s>\/:]+:/}}}},entity:[{pattern:/&[\da-z]{1,8};/i,alias:"named-entity"},/&#x?[\da-f]{1,8};/i]},r.languages.markup.tag.inside["attr-value"].inside.entity=r.languages.markup.entity,r.languages.markup.doctype.inside["internal-subset"].inside=r.languages.markup,r.hooks.add("wrap",(function(e){"entity"===e.type&&(e.attributes.title=e.content.replace(/&/,"&"))})),Object.defineProperty(r.languages.markup.tag,"addInlined",{value:function(e,t){var n={};n["language-"+t]={pattern:/(^<!\[CDATA\[)[\s\S]+?(?=\]\]>$)/i,lookbehind:!0,inside:r.languages[t]},n.cdata=/^<!\[CDATA\[|\]\]>$/i;var a={"included-cdata":{pattern:/<!\[CDATA\[[\s\S]*?\]\]>/i,inside:n}};a["language-"+t]={pattern:/[\s\S]+/,inside:r.languages[t]};var o={};o[e]={pattern:RegExp(/(<__[^>]*>)(?:<!\[CDATA\[(?:[^\]]|\](?!\]>))*\]\]>|(?!<!\[CDATA\[)[\s\S])*?(?=<\/__>)/.source.replace(/__/g,(function(){return e})),"i"),lookbehind:!0,greedy:!0,inside:a},r.languages.insertBefore("markup","cdata",o)}}),Object.defineProperty(r.languages.markup.tag,"addAttribute",{value:function(e,t){r.languages.markup.tag.inside["special-attr"].push({pattern:RegExp(/(^|["'\s])/.source+"(?:"+e+")"+/\s*=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+(?=[\s>]))/.source,"i"),lookbehind:!0,inside:{"attr-name":/^[^\s=]+/,"attr-value":{pattern:/=[\s\S]+/,inside:{value:{pattern:/(^=\s*(["']|(?!["'])))\S[\s\S]*(?=\2$)/,lookbehind:!0,alias:[t,"language-"+t],inside:r.languages[t]},punctuation:[{pattern:/^=/,alias:"attr-equals"},/"|'/]}}}})}}),r.languages.html=r.languages.markup,r.languages.mathml=r.languages.markup,r.languages.svg=r.languages.markup,r.languages.xml=r.languages.extend("markup",{}),r.languages.ssml=r.languages.xml,r.languages.atom=r.languages.xml,r.languages.rss=r.languages.xml,function(e){var t="\\b(?:BASH|BASHOPTS|BASH_ALIASES|BASH_ARGC|BASH_ARGV|BASH_CMDS|BASH_COMPLETION_COMPAT_DIR|BASH_LINENO|BASH_REMATCH|BASH_SOURCE|BASH_VERSINFO|BASH_VERSION|COLORTERM|COLUMNS|COMP_WORDBREAKS|DBUS_SESSION_BUS_ADDRESS|DEFAULTS_PATH|DESKTOP_SESSION|DIRSTACK|DISPLAY|EUID|GDMSESSION|GDM_LANG|GNOME_KEYRING_CONTROL|GNOME_KEYRING_PID|GPG_AGENT_INFO|GROUPS|HISTCONTROL|HISTFILE|HISTFILESIZE|HISTSIZE|HOME|HOSTNAME|HOSTTYPE|IFS|INSTANCE|JOB|LANG|LANGUAGE|LC_ADDRESS|LC_ALL|LC_IDENTIFICATION|LC_MEASUREMENT|LC_MONETARY|LC_NAME|LC_NUMERIC|LC_PAPER|LC_TELEPHONE|LC_TIME|LESSCLOSE|LESSOPEN|LINES|LOGNAME|LS_COLORS|MACHTYPE|MAILCHECK|MANDATORY_PATH|NO_AT_BRIDGE|OLDPWD|OPTERR|OPTIND|ORBIT_SOCKETDIR|OSTYPE|PAPERSIZE|PATH|PIPESTATUS|PPID|PS1|PS2|PS3|PS4|PWD|RANDOM|REPLY|SECONDS|SELINUX_INIT|SESSION|SESSIONTYPE|SESSION_MANAGER|SHELL|SHELLOPTS|SHLVL|SSH_AUTH_SOCK|TERM|UID|UPSTART_EVENTS|UPSTART_INSTANCE|UPSTART_JOB|UPSTART_SESSION|USER|WINDOWID|XAUTHORITY|XDG_CONFIG_DIRS|XDG_CURRENT_DESKTOP|XDG_DATA_DIRS|XDG_GREETER_DATA_DIR|XDG_MENU_PREFIX|XDG_RUNTIME_DIR|XDG_SEAT|XDG_SEAT_PATH|XDG_SESSION_DESKTOP|XDG_SESSION_ID|XDG_SESSION_PATH|XDG_SESSION_TYPE|XDG_VTNR|XMODIFIERS)\\b",n={pattern:/(^(["']?)\w+\2)[ \t]+\S.*/,lookbehind:!0,alias:"punctuation",inside:null},a={bash:n,environment:{pattern:RegExp("\\$"+t),alias:"constant"},variable:[{pattern:/\$?\(\([\s\S]+?\)\)/,greedy:!0,inside:{variable:[{pattern:/(^\$\(\([\s\S]+)\)\)/,lookbehind:!0},/^\$\(\(/],number:/\b0x[\dA-Fa-f]+\b|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:[Ee]-?\d+)?/,operator:/--|\+\+|\*\*=?|<<=?|>>=?|&&|\|\||[=!+\-*/%<>^&|]=?|[?~:]/,punctuation:/\(\(?|\)\)?|,|;/}},{pattern:/\$\((?:\([^)]+\)|[^()])+\)|`[^`]+`/,greedy:!0,inside:{variable:/^\$\(|^`|\)$|`$/}},{pattern:/\$\{[^}]+\}/,greedy:!0,inside:{operator:/:[-=?+]?|[!\/]|##?|%%?|\^\^?|,,?/,punctuation:/[\[\]]/,environment:{pattern:RegExp("(\\{)"+t),lookbehind:!0,alias:"constant"}}},/\$(?:\w+|[#?*!@$])/],entity:/\\(?:[abceEfnrtv\\"]|O?[0-7]{1,3}|U[0-9a-fA-F]{8}|u[0-9a-fA-F]{4}|x[0-9a-fA-F]{1,2})/};e.languages.bash={shebang:{pattern:/^#!\s*\/.*/,alias:"important"},comment:{pattern:/(^|[^"{\\$])#.*/,lookbehind:!0},"function-name":[{pattern:/(\bfunction\s+)[\w-]+(?=(?:\s*\(?:\s*\))?\s*\{)/,lookbehind:!0,alias:"function"},{pattern:/\b[\w-]+(?=\s*\(\s*\)\s*\{)/,alias:"function"}],"for-or-select":{pattern:/(\b(?:for|select)\s+)\w+(?=\s+in\s)/,alias:"variable",lookbehind:!0},"assign-left":{pattern:/(^|[\s;|&]|[<>]\()\w+(?=\+?=)/,inside:{environment:{pattern:RegExp("(^|[\\s;|&]|[<>]\\()"+t),lookbehind:!0,alias:"constant"}},alias:"variable",lookbehind:!0},string:[{pattern:/((?:^|[^<])<<-?\s*)(\w+)\s[\s\S]*?(?:\r?\n|\r)\2/,lookbehind:!0,greedy:!0,inside:a},{pattern:/((?:^|[^<])<<-?\s*)(["'])(\w+)\2\s[\s\S]*?(?:\r?\n|\r)\3/,lookbehind:!0,greedy:!0,inside:{bash:n}},{pattern:/(^|[^\\](?:\\\\)*)"(?:\\[\s\S]|\$\([^)]+\)|\$(?!\()|`[^`]+`|[^"\\`$])*"/,lookbehind:!0,greedy:!0,inside:a},{pattern:/(^|[^$\\])'[^']*'/,lookbehind:!0,greedy:!0},{pattern:/\$'(?:[^'\\]|\\[\s\S])*'/,greedy:!0,inside:{entity:a.entity}}],environment:{pattern:RegExp("\\$?"+t),alias:"constant"},variable:a.variable,function:{pattern:/(^|[\s;|&]|[<>]\()(?:add|apropos|apt|apt-cache|apt-get|aptitude|aspell|automysqlbackup|awk|basename|bash|bc|bconsole|bg|bzip2|cal|cat|cfdisk|chgrp|chkconfig|chmod|chown|chroot|cksum|clear|cmp|column|comm|composer|cp|cron|crontab|csplit|curl|cut|date|dc|dd|ddrescue|debootstrap|df|diff|diff3|dig|dir|dircolors|dirname|dirs|dmesg|docker|docker-compose|du|egrep|eject|env|ethtool|expand|expect|expr|fdformat|fdisk|fg|fgrep|file|find|fmt|fold|format|free|fsck|ftp|fuser|gawk|git|gparted|grep|groupadd|groupdel|groupmod|groups|grub-mkconfig|gzip|halt|head|hg|history|host|hostname|htop|iconv|id|ifconfig|ifdown|ifup|import|install|ip|jobs|join|kill|killall|less|link|ln|locate|logname|logrotate|look|lpc|lpr|lprint|lprintd|lprintq|lprm|ls|lsof|lynx|make|man|mc|mdadm|mkconfig|mkdir|mke2fs|mkfifo|mkfs|mkisofs|mknod|mkswap|mmv|more|most|mount|mtools|mtr|mutt|mv|nano|nc|netstat|nice|nl|node|nohup|notify-send|npm|nslookup|op|open|parted|passwd|paste|pathchk|ping|pkill|pnpm|podman|podman-compose|popd|pr|printcap|printenv|ps|pushd|pv|quota|quotacheck|quotactl|ram|rar|rcp|reboot|remsync|rename|renice|rev|rm|rmdir|rpm|rsync|scp|screen|sdiff|sed|sendmail|seq|service|sftp|sh|shellcheck|shuf|shutdown|sleep|slocate|sort|split|ssh|stat|strace|su|sudo|sum|suspend|swapon|sync|tac|tail|tar|tee|time|timeout|top|touch|tr|traceroute|tsort|tty|umount|uname|unexpand|uniq|units|unrar|unshar|unzip|update-grub|uptime|useradd|userdel|usermod|users|uudecode|uuencode|v|vcpkg|vdir|vi|vim|virsh|vmstat|wait|watch|wc|wget|whereis|which|who|whoami|write|xargs|xdg-open|yarn|yes|zenity|zip|zsh|zypper)(?=$|[)\s;|&])/,lookbehind:!0},keyword:{pattern:/(^|[\s;|&]|[<>]\()(?:case|do|done|elif|else|esac|fi|for|function|if|in|select|then|until|while)(?=$|[)\s;|&])/,lookbehind:!0},builtin:{pattern:/(^|[\s;|&]|[<>]\()(?:\.|:|alias|bind|break|builtin|caller|cd|command|continue|declare|echo|enable|eval|exec|exit|export|getopts|hash|help|let|local|logout|mapfile|printf|pwd|read|readarray|readonly|return|set|shift|shopt|source|test|times|trap|type|typeset|ulimit|umask|unalias|unset)(?=$|[)\s;|&])/,lookbehind:!0,alias:"class-name"},boolean:{pattern:/(^|[\s;|&]|[<>]\()(?:false|true)(?=$|[)\s;|&])/,lookbehind:!0},"file-descriptor":{pattern:/\B&\d\b/,alias:"important"},operator:{pattern:/\d?<>|>\||\+=|=[=~]?|!=?|<<[<-]?|[&\d]?>>|\d[<>]&?|[<>][&=]?|&[>&]?|\|[&|]?/,inside:{"file-descriptor":{pattern:/^\d/,alias:"important"}}},punctuation:/\$?\(\(?|\)\)?|\.\.|[{}[\];\\]/,number:{pattern:/(^|\s)(?:[1-9]\d*|0)(?:[.,]\d+)?\b/,lookbehind:!0}},n.inside=e.languages.bash;for(var r=["comment","function-name","for-or-select","assign-left","string","environment","function","keyword","builtin","boolean","file-descriptor","operator","punctuation","number"],o=a.variable[1].inside,i=0;i<r.length;i++)o[r[i]]=e.languages.bash[r[i]];e.languages.shell=e.languages.bash}(r),r.languages.clike={comment:[{pattern:/(^|[^\\])\/\*[\s\S]*?(?:\*\/|$)/,lookbehind:!0,greedy:!0},{pattern:/(^|[^\\:])\/\/.*/,lookbehind:!0,greedy:!0}],string:{pattern:/(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0},"class-name":{pattern:/(\b(?:class|extends|implements|instanceof|interface|new|trait)\s+|\bcatch\s+\()[\w.\\]+/i,lookbehind:!0,inside:{punctuation:/[.\\]/}},keyword:/\b(?:break|catch|continue|do|else|finally|for|function|if|in|instanceof|new|null|return|throw|try|while)\b/,boolean:/\b(?:false|true)\b/,function:/\b\w+(?=\()/,number:/\b0x[\da-f]+\b|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:e[+-]?\d+)?/i,operator:/[<>]=?|[!=]=?=?|--?|\+\+?|&&?|\|\|?|[?*/~^%]/,punctuation:/[{}[\];(),.:]/},r.languages.c=r.languages.extend("clike",{comment:{pattern:/\/\/(?:[^\r\n\\]|\\(?:\r\n?|\n|(?![\r\n])))*|\/\*[\s\S]*?(?:\*\/|$)/,greedy:!0},string:{pattern:/"(?:\\(?:\r\n|[\s\S])|[^"\\\r\n])*"/,greedy:!0},"class-name":{pattern:/(\b(?:enum|struct)\s+(?:__attribute__\s*\(\([\s\S]*?\)\)\s*)?)\w+|\b[a-z]\w*_t\b/,lookbehind:!0},keyword:/\b(?:_Alignas|_Alignof|_Atomic|_Bool|_Complex|_Generic|_Imaginary|_Noreturn|_Static_assert|_Thread_local|__attribute__|asm|auto|break|case|char|const|continue|default|do|double|else|enum|extern|float|for|goto|if|inline|int|long|register|return|short|signed|sizeof|static|struct|switch|typedef|typeof|union|unsigned|void|volatile|while)\b/,function:/\b[a-z_]\w*(?=\s*\()/i,number:/(?:\b0x(?:[\da-f]+(?:\.[\da-f]*)?|\.[\da-f]+)(?:p[+-]?\d+)?|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:e[+-]?\d+)?)[ful]{0,4}/i,operator:/>>=?|<<=?|->|([-+&|:])\1|[?:~]|[-+*/%&|^!=<>]=?/}),r.languages.insertBefore("c","string",{char:{pattern:/'(?:\\(?:\r\n|[\s\S])|[^'\\\r\n]){0,32}'/,greedy:!0}}),r.languages.insertBefore("c","string",{macro:{pattern:/(^[\t ]*)#\s*[a-z](?:[^\r\n\\/]|\/(?!\*)|\/\*(?:[^*]|\*(?!\/))*\*\/|\\(?:\r\n|[\s\S]))*/im,lookbehind:!0,greedy:!0,alias:"property",inside:{string:[{pattern:/^(#\s*include\s*)<[^>]+>/,lookbehind:!0},r.languages.c.string],char:r.languages.c.char,comment:r.languages.c.comment,"macro-name":[{pattern:/(^#\s*define\s+)\w+\b(?!\()/i,lookbehind:!0},{pattern:/(^#\s*define\s+)\w+\b(?=\()/i,lookbehind:!0,alias:"function"}],directive:{pattern:/^(#\s*)[a-z]+/,lookbehind:!0,alias:"keyword"},"directive-hash":/^#/,punctuation:/##|\\(?=[\r\n])/,expression:{pattern:/\S[\s\S]*/,inside:r.languages.c}}}}),r.languages.insertBefore("c","function",{constant:/\b(?:EOF|NULL|SEEK_CUR|SEEK_END|SEEK_SET|__DATE__|__FILE__|__LINE__|__TIMESTAMP__|__TIME__|__func__|stderr|stdin|stdout)\b/}),delete r.languages.c.boolean,function(e){var t=/\b(?:alignas|alignof|asm|auto|bool|break|case|catch|char|char16_t|char32_t|char8_t|class|co_await|co_return|co_yield|compl|concept|const|const_cast|consteval|constexpr|constinit|continue|decltype|default|delete|do|double|dynamic_cast|else|enum|explicit|export|extern|final|float|for|friend|goto|if|import|inline|int|int16_t|int32_t|int64_t|int8_t|long|module|mutable|namespace|new|noexcept|nullptr|operator|override|private|protected|public|register|reinterpret_cast|requires|return|short|signed|sizeof|static|static_assert|static_cast|struct|switch|template|this|thread_local|throw|try|typedef|typeid|typename|uint16_t|uint32_t|uint64_t|uint8_t|union|unsigned|using|virtual|void|volatile|wchar_t|while)\b/,n=/\b(?!<keyword>)\w+(?:\s*\.\s*\w+)*\b/.source.replace(/<keyword>/g,(function(){return t.source}));e.languages.cpp=e.languages.extend("c",{"class-name":[{pattern:RegExp(/(\b(?:class|concept|enum|struct|typename)\s+)(?!<keyword>)\w+/.source.replace(/<keyword>/g,(function(){return t.source}))),lookbehind:!0},/\b[A-Z]\w*(?=\s*::\s*\w+\s*\()/,/\b[A-Z_]\w*(?=\s*::\s*~\w+\s*\()/i,/\b\w+(?=\s*<(?:[^<>]|<(?:[^<>]|<[^<>]*>)*>)*>\s*::\s*\w+\s*\()/],keyword:t,number:{pattern:/(?:\b0b[01']+|\b0x(?:[\da-f']+(?:\.[\da-f']*)?|\.[\da-f']+)(?:p[+-]?[\d']+)?|(?:\b[\d']+(?:\.[\d']*)?|\B\.[\d']+)(?:e[+-]?[\d']+)?)[ful]{0,4}/i,greedy:!0},operator:/>>=?|<<=?|->|--|\+\+|&&|\|\||[?:~]|<=>|[-+*/%&|^!=<>]=?|\b(?:and|and_eq|bitand|bitor|not|not_eq|or|or_eq|xor|xor_eq)\b/,boolean:/\b(?:false|true)\b/}),e.languages.insertBefore("cpp","string",{module:{pattern:RegExp(/(\b(?:import|module)\s+)/.source+"(?:"+/"(?:\\(?:\r\n|[\s\S])|[^"\\\r\n])*"|<[^<>\r\n]*>/.source+"|"+/<mod-name>(?:\s*:\s*<mod-name>)?|:\s*<mod-name>/.source.replace(/<mod-name>/g,(function(){return n}))+")"),lookbehind:!0,greedy:!0,inside:{string:/^[<"][\s\S]+/,operator:/:/,punctuation:/\./}},"raw-string":{pattern:/R"([^()\\ ]{0,16})\([\s\S]*?\)\1"/,alias:"string",greedy:!0}}),e.languages.insertBefore("cpp","keyword",{"generic-function":{pattern:/\b(?!operator\b)[a-z_]\w*\s*<(?:[^<>]|<[^<>]*>)*>(?=\s*\()/i,inside:{function:/^\w+/,generic:{pattern:/<[\s\S]+/,alias:"class-name",inside:e.languages.cpp}}}}),e.languages.insertBefore("cpp","operator",{"double-colon":{pattern:/::/,alias:"punctuation"}}),e.languages.insertBefore("cpp","class-name",{"base-clause":{pattern:/(\b(?:class|struct)\s+\w+\s*:\s*)[^;{}"'\s]+(?:\s+[^;{}"'\s]+)*(?=\s*[;{])/,lookbehind:!0,greedy:!0,inside:e.languages.extend("cpp",{})}}),e.languages.insertBefore("inside","double-colon",{"class-name":/\b[a-z_]\w*\b(?!\s*::)/i},e.languages.cpp["base-clause"])}(r),function(e){var t=/(?:"(?:\\(?:\r\n|[\s\S])|[^"\\\r\n])*"|'(?:\\(?:\r\n|[\s\S])|[^'\\\r\n])*')/;e.languages.css={comment:/\/\*[\s\S]*?\*\//,atrule:{pattern:/@[\w-](?:[^;{\s]|\s+(?![\s{]))*(?:;|(?=\s*\{))/,inside:{rule:/^@[\w-]+/,"selector-function-argument":{pattern:/(\bselector\s*\(\s*(?![\s)]))(?:[^()\s]|\s+(?![\s)])|\((?:[^()]|\([^()]*\))*\))+(?=\s*\))/,lookbehind:!0,alias:"selector"},keyword:{pattern:/(^|[^\w-])(?:and|not|only|or)(?![\w-])/,lookbehind:!0}}},url:{pattern:RegExp("\\burl\\((?:"+t.source+"|"+/(?:[^\\\r\n()"']|\\[\s\S])*/.source+")\\)","i"),greedy:!0,inside:{function:/^url/i,punctuation:/^\(|\)$/,string:{pattern:RegExp("^"+t.source+"$"),alias:"url"}}},selector:{pattern:RegExp("(^|[{}\\s])[^{}\\s](?:[^{};\"'\\s]|\\s+(?![\\s{])|"+t.source+")*(?=\\s*\\{)"),lookbehind:!0},string:{pattern:t,greedy:!0},property:{pattern:/(^|[^-\w\xA0-\uFFFF])(?!\s)[-_a-z\xA0-\uFFFF](?:(?!\s)[-\w\xA0-\uFFFF])*(?=\s*:)/i,lookbehind:!0},important:/!important\b/i,function:{pattern:/(^|[^-a-z0-9])[-a-z0-9]+(?=\()/i,lookbehind:!0},punctuation:/[(){};:,]/},e.languages.css.atrule.inside.rest=e.languages.css;var n=e.languages.markup;n&&(n.tag.addInlined("style","css"),n.tag.addAttribute("style","css"))}(r),function(e){var t,n=/("|')(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/;e.languages.css.selector={pattern:e.languages.css.selector.pattern,lookbehind:!0,inside:t={"pseudo-element":/:(?:after|before|first-letter|first-line|selection)|::[-\w]+/,"pseudo-class":/:[-\w]+/,class:/\.[-\w]+/,id:/#[-\w]+/,attribute:{pattern:RegExp("\\[(?:[^[\\]\"']|"+n.source+")*\\]"),greedy:!0,inside:{punctuation:/^\[|\]$/,"case-sensitivity":{pattern:/(\s)[si]$/i,lookbehind:!0,alias:"keyword"},namespace:{pattern:/^(\s*)(?:(?!\s)[-*\w\xA0-\uFFFF])*\|(?!=)/,lookbehind:!0,inside:{punctuation:/\|$/}},"attr-name":{pattern:/^(\s*)(?:(?!\s)[-\w\xA0-\uFFFF])+/,lookbehind:!0},"attr-value":[n,{pattern:/(=\s*)(?:(?!\s)[-\w\xA0-\uFFFF])+(?=\s*$)/,lookbehind:!0}],operator:/[|~*^$]?=/}},"n-th":[{pattern:/(\(\s*)[+-]?\d*[\dn](?:\s*[+-]\s*\d+)?(?=\s*\))/,lookbehind:!0,inside:{number:/[\dn]+/,operator:/[+-]/}},{pattern:/(\(\s*)(?:even|odd)(?=\s*\))/i,lookbehind:!0}],combinator:/>|\+|~|\|\|/,punctuation:/[(),]/}},e.languages.css.atrule.inside["selector-function-argument"].inside=t,e.languages.insertBefore("css","property",{variable:{pattern:/(^|[^-\w\xA0-\uFFFF])--(?!\s)[-_a-z\xA0-\uFFFF](?:(?!\s)[-\w\xA0-\uFFFF])*/i,lookbehind:!0}});var a={pattern:/(\b\d+)(?:%|[a-z]+(?![\w-]))/,lookbehind:!0},r={pattern:/(^|[^\w.-])-?(?:\d+(?:\.\d+)?|\.\d+)/,lookbehind:!0};e.languages.insertBefore("css","function",{operator:{pattern:/(\s)[+\-*\/](?=\s)/,lookbehind:!0},hexcode:{pattern:/\B#[\da-f]{3,8}\b/i,alias:"color"},color:[{pattern:/(^|[^\w-])(?:AliceBlue|AntiqueWhite|Aqua|Aquamarine|Azure|Beige|Bisque|Black|BlanchedAlmond|Blue|BlueViolet|Brown|BurlyWood|CadetBlue|Chartreuse|Chocolate|Coral|CornflowerBlue|Cornsilk|Crimson|Cyan|DarkBlue|DarkCyan|DarkGoldenRod|DarkGr[ae]y|DarkGreen|DarkKhaki|DarkMagenta|DarkOliveGreen|DarkOrange|DarkOrchid|DarkRed|DarkSalmon|DarkSeaGreen|DarkSlateBlue|DarkSlateGr[ae]y|DarkTurquoise|DarkViolet|DeepPink|DeepSkyBlue|DimGr[ae]y|DodgerBlue|FireBrick|FloralWhite|ForestGreen|Fuchsia|Gainsboro|GhostWhite|Gold|GoldenRod|Gr[ae]y|Green|GreenYellow|HoneyDew|HotPink|IndianRed|Indigo|Ivory|Khaki|Lavender|LavenderBlush|LawnGreen|LemonChiffon|LightBlue|LightCoral|LightCyan|LightGoldenRodYellow|LightGr[ae]y|LightGreen|LightPink|LightSalmon|LightSeaGreen|LightSkyBlue|LightSlateGr[ae]y|LightSteelBlue|LightYellow|Lime|LimeGreen|Linen|Magenta|Maroon|MediumAquaMarine|MediumBlue|MediumOrchid|MediumPurple|MediumSeaGreen|MediumSlateBlue|MediumSpringGreen|MediumTurquoise|MediumVioletRed|MidnightBlue|MintCream|MistyRose|Moccasin|NavajoWhite|Navy|OldLace|Olive|OliveDrab|Orange|OrangeRed|Orchid|PaleGoldenRod|PaleGreen|PaleTurquoise|PaleVioletRed|PapayaWhip|PeachPuff|Peru|Pink|Plum|PowderBlue|Purple|Red|RosyBrown|RoyalBlue|SaddleBrown|Salmon|SandyBrown|SeaGreen|SeaShell|Sienna|Silver|SkyBlue|SlateBlue|SlateGr[ae]y|Snow|SpringGreen|SteelBlue|Tan|Teal|Thistle|Tomato|Transparent|Turquoise|Violet|Wheat|White|WhiteSmoke|Yellow|YellowGreen)(?![\w-])/i,lookbehind:!0},{pattern:/\b(?:hsl|rgb)\(\s*\d{1,3}\s*,\s*\d{1,3}%?\s*,\s*\d{1,3}%?\s*\)\B|\b(?:hsl|rgb)a\(\s*\d{1,3}\s*,\s*\d{1,3}%?\s*,\s*\d{1,3}%?\s*,\s*(?:0|0?\.\d+|1)\s*\)\B/i,inside:{unit:a,number:r,function:/[\w-]+(?=\()/,punctuation:/[(),]/}}],entity:/\\[\da-f]{1,8}/i,unit:a,number:r})}(r),r.languages.javascript=r.languages.extend("clike",{"class-name":[r.languages.clike["class-name"],{pattern:/(^|[^$\w\xA0-\uFFFF])(?!\s)[_$A-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\.(?:constructor|prototype))/,lookbehind:!0}],keyword:[{pattern:/((?:^|\})\s*)catch\b/,lookbehind:!0},{pattern:/(^|[^.]|\.\.\.\s*)\b(?:as|assert(?=\s*\{)|async(?=\s*(?:function\b|\(|[$\w\xA0-\uFFFF]|$))|await|break|case|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally(?=\s*(?:\{|$))|for|from(?=\s*(?:['"]|$))|function|(?:get|set)(?=\s*(?:[#\[$\w\xA0-\uFFFF]|$))|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)\b/,lookbehind:!0}],function:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*(?:\.\s*(?:apply|bind|call)\s*)?\()/,number:{pattern:RegExp(/(^|[^\w$])/.source+"(?:"+/NaN|Infinity/.source+"|"+/0[bB][01]+(?:_[01]+)*n?/.source+"|"+/0[oO][0-7]+(?:_[0-7]+)*n?/.source+"|"+/0[xX][\dA-Fa-f]+(?:_[\dA-Fa-f]+)*n?/.source+"|"+/\d+(?:_\d+)*n/.source+"|"+/(?:\d+(?:_\d+)*(?:\.(?:\d+(?:_\d+)*)?)?|\.\d+(?:_\d+)*)(?:[Ee][+-]?\d+(?:_\d+)*)?/.source+")"+/(?![\w$])/.source),lookbehind:!0},operator:/--|\+\+|\*\*=?|=>|&&=?|\|\|=?|[!=]==|<<=?|>>>?=?|[-+*/%&|^!=<>]=?|\.{3}|\?\?=?|\?\.?|[~:]/}),r.languages.javascript["class-name"][0].pattern=/(\b(?:class|extends|implements|instanceof|interface|new)\s+)[\w.\\]+/,r.languages.insertBefore("javascript","keyword",{regex:{pattern:/((?:^|[^$\w\xA0-\uFFFF."'\])\s]|\b(?:return|yield))\s*)\/(?:\[(?:[^\]\\\r\n]|\\.)*\]|\\.|[^/\\\[\r\n])+\/[dgimyus]{0,7}(?=(?:\s|\/\*(?:[^*]|\*(?!\/))*\*\/)*(?:$|[\r\n,.;:})\]]|\/\/))/,lookbehind:!0,greedy:!0,inside:{"regex-source":{pattern:/^(\/)[\s\S]+(?=\/[a-z]*$)/,lookbehind:!0,alias:"language-regex",inside:r.languages.regex},"regex-delimiter":/^\/|\/$/,"regex-flags":/^[a-z]+$/}},"function-variable":{pattern:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*[=:]\s*(?:async\s*)?(?:\bfunction\b|(?:\((?:[^()]|\([^()]*\))*\)|(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*)\s*=>))/,alias:"function"},parameter:[{pattern:/(function(?:\s+(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*)?\s*\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\))/,lookbehind:!0,inside:r.languages.javascript},{pattern:/(^|[^$\w\xA0-\uFFFF])(?!\s)[_$a-z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*=>)/i,lookbehind:!0,inside:r.languages.javascript},{pattern:/(\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\)\s*=>)/,lookbehind:!0,inside:r.languages.javascript},{pattern:/((?:\b|\s|^)(?!(?:as|async|await|break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally|for|from|function|get|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|set|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)(?![$\w\xA0-\uFFFF]))(?:(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*\s*)\(\s*|\]\s*\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\)\s*\{)/,lookbehind:!0,inside:r.languages.javascript}],constant:/\b[A-Z](?:[A-Z_]|\dx?)*\b/}),r.languages.insertBefore("javascript","string",{hashbang:{pattern:/^#!.*/,greedy:!0,alias:"comment"},"template-string":{pattern:/`(?:\\[\s\S]|\$\{(?:[^{}]|\{(?:[^{}]|\{[^}]*\})*\})+\}|(?!\$\{)[^\\`])*`/,greedy:!0,inside:{"template-punctuation":{pattern:/^`|`$/,alias:"string"},interpolation:{pattern:/((?:^|[^\\])(?:\\{2})*)\$\{(?:[^{}]|\{(?:[^{}]|\{[^}]*\})*\})+\}/,lookbehind:!0,inside:{"interpolation-punctuation":{pattern:/^\$\{|\}$/,alias:"punctuation"},rest:r.languages.javascript}},string:/[\s\S]+/}},"string-property":{pattern:/((?:^|[,{])[ \t]*)(["'])(?:\\(?:\r\n|[\s\S])|(?!\2)[^\\\r\n])*\2(?=\s*:)/m,lookbehind:!0,greedy:!0,alias:"property"}}),r.languages.insertBefore("javascript","operator",{"literal-property":{pattern:/((?:^|[,{])[ \t]*)(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*:)/m,lookbehind:!0,alias:"property"}}),r.languages.markup&&(r.languages.markup.tag.addInlined("script","javascript"),r.languages.markup.tag.addAttribute(/on(?:abort|blur|change|click|composition(?:end|start|update)|dblclick|error|focus(?:in|out)?|key(?:down|up)|load|mouse(?:down|enter|leave|move|out|over|up)|reset|resize|scroll|select|slotchange|submit|unload|wheel)/.source,"javascript")),r.languages.js=r.languages.javascript,function(e){var t=/#(?!\{).+/,n={pattern:/#\{[^}]+\}/,alias:"variable"};e.languages.coffeescript=e.languages.extend("javascript",{comment:t,string:[{pattern:/'(?:\\[\s\S]|[^\\'])*'/,greedy:!0},{pattern:/"(?:\\[\s\S]|[^\\"])*"/,greedy:!0,inside:{interpolation:n}}],keyword:/\b(?:and|break|by|catch|class|continue|debugger|delete|do|each|else|extend|extends|false|finally|for|if|in|instanceof|is|isnt|let|loop|namespace|new|no|not|null|of|off|on|or|own|return|super|switch|then|this|throw|true|try|typeof|undefined|unless|until|when|while|window|with|yes|yield)\b/,"class-member":{pattern:/@(?!\d)\w+/,alias:"variable"}}),e.languages.insertBefore("coffeescript","comment",{"multiline-comment":{pattern:/###[\s\S]+?###/,alias:"comment"},"block-regex":{pattern:/\/{3}[\s\S]*?\/{3}/,alias:"regex",inside:{comment:t,interpolation:n}}}),e.languages.insertBefore("coffeescript","string",{"inline-javascript":{pattern:/`(?:\\[\s\S]|[^\\`])*`/,inside:{delimiter:{pattern:/^`|`$/,alias:"punctuation"},script:{pattern:/[\s\S]+/,alias:"language-javascript",inside:e.languages.javascript}}},"multiline-string":[{pattern:/'''[\s\S]*?'''/,greedy:!0,alias:"string"},{pattern:/"""[\s\S]*?"""/,greedy:!0,alias:"string",inside:{interpolation:n}}]}),e.languages.insertBefore("coffeescript","keyword",{property:/(?!\d)\w+(?=\s*:(?!:))/}),delete e.languages.coffeescript["template-string"],e.languages.coffee=e.languages.coffeescript}(r),function(e){var t=/[*&][^\s[\]{},]+/,n=/!(?:<[\w\-%#;/?:@&=+$,.!~*'()[\]]+>|(?:[a-zA-Z\d-]*!)?[\w\-%#;/?:@&=+$.~*'()]+)?/,a="(?:"+n.source+"(?:[ \t]+"+t.source+")?|"+t.source+"(?:[ \t]+"+n.source+")?)",r=/(?:[^\s\x00-\x08\x0e-\x1f!"#%&'*,\-:>?@[\]`{|}\x7f-\x84\x86-\x9f\ud800-\udfff\ufffe\uffff]|[?:-]<PLAIN>)(?:[ \t]*(?:(?![#:])<PLAIN>|:<PLAIN>))*/.source.replace(/<PLAIN>/g,(function(){return/[^\s\x00-\x08\x0e-\x1f,[\]{}\x7f-\x84\x86-\x9f\ud800-\udfff\ufffe\uffff]/.source})),o=/"(?:[^"\\\r\n]|\\.)*"|'(?:[^'\\\r\n]|\\.)*'/.source;function i(e,t){t=(t||"").replace(/m/g,"")+"m";var n=/([:\-,[{]\s*(?:\s<<prop>>[ \t]+)?)(?:<<value>>)(?=[ \t]*(?:$|,|\]|\}|(?:[\r\n]\s*)?#))/.source.replace(/<<prop>>/g,(function(){return a})).replace(/<<value>>/g,(function(){return e}));return RegExp(n,t)}e.languages.yaml={scalar:{pattern:RegExp(/([\-:]\s*(?:\s<<prop>>[ \t]+)?[|>])[ \t]*(?:((?:\r?\n|\r)[ \t]+)\S[^\r\n]*(?:\2[^\r\n]+)*)/.source.replace(/<<prop>>/g,(function(){return a}))),lookbehind:!0,alias:"string"},comment:/#.*/,key:{pattern:RegExp(/((?:^|[:\-,[{\r\n?])[ \t]*(?:<<prop>>[ \t]+)?)<<key>>(?=\s*:\s)/.source.replace(/<<prop>>/g,(function(){return a})).replace(/<<key>>/g,(function(){return"(?:"+r+"|"+o+")"}))),lookbehind:!0,greedy:!0,alias:"atrule"},directive:{pattern:/(^[ \t]*)%.+/m,lookbehind:!0,alias:"important"},datetime:{pattern:i(/\d{4}-\d\d?-\d\d?(?:[tT]|[ \t]+)\d\d?:\d{2}:\d{2}(?:\.\d*)?(?:[ \t]*(?:Z|[-+]\d\d?(?::\d{2})?))?|\d{4}-\d{2}-\d{2}|\d\d?:\d{2}(?::\d{2}(?:\.\d*)?)?/.source),lookbehind:!0,alias:"number"},boolean:{pattern:i(/false|true/.source,"i"),lookbehind:!0,alias:"important"},null:{pattern:i(/null|~/.source,"i"),lookbehind:!0,alias:"important"},string:{pattern:i(o),lookbehind:!0,greedy:!0},number:{pattern:i(/[+-]?(?:0x[\da-f]+|0o[0-7]+|(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?|\.inf|\.nan)/.source,"i"),lookbehind:!0},tag:n,important:t,punctuation:/---|[:[\]{}\-,|>?]|\.\.\./},e.languages.yml=e.languages.yaml}(r),function(e){var t=/(?:\\.|[^\\\n\r]|(?:\n|\r\n?)(?![\r\n]))/.source;function n(e){return e=e.replace(/<inner>/g,(function(){return t})),RegExp(/((?:^|[^\\])(?:\\{2})*)/.source+"(?:"+e+")")}var a=/(?:\\.|``(?:[^`\r\n]|`(?!`))+``|`[^`\r\n]+`|[^\\|\r\n`])+/.source,r=/\|?__(?:\|__)+\|?(?:(?:\n|\r\n?)|(?![\s\S]))/.source.replace(/__/g,(function(){return a})),o=/\|?[ \t]*:?-{3,}:?[ \t]*(?:\|[ \t]*:?-{3,}:?[ \t]*)+\|?(?:\n|\r\n?)/.source;e.languages.markdown=e.languages.extend("markup",{}),e.languages.insertBefore("markdown","prolog",{"front-matter-block":{pattern:/(^(?:\s*[\r\n])?)---(?!.)[\s\S]*?[\r\n]---(?!.)/,lookbehind:!0,greedy:!0,inside:{punctuation:/^---|---$/,"front-matter":{pattern:/\S+(?:\s+\S+)*/,alias:["yaml","language-yaml"],inside:e.languages.yaml}}},blockquote:{pattern:/^>(?:[\t ]*>)*/m,alias:"punctuation"},table:{pattern:RegExp("^"+r+o+"(?:"+r+")*","m"),inside:{"table-data-rows":{pattern:RegExp("^("+r+o+")(?:"+r+")*$"),lookbehind:!0,inside:{"table-data":{pattern:RegExp(a),inside:e.languages.markdown},punctuation:/\|/}},"table-line":{pattern:RegExp("^("+r+")"+o+"$"),lookbehind:!0,inside:{punctuation:/\||:?-{3,}:?/}},"table-header-row":{pattern:RegExp("^"+r+"$"),inside:{"table-header":{pattern:RegExp(a),alias:"important",inside:e.languages.markdown},punctuation:/\|/}}}},code:[{pattern:/((?:^|\n)[ \t]*\n|(?:^|\r\n?)[ \t]*\r\n?)(?: {4}|\t).+(?:(?:\n|\r\n?)(?: {4}|\t).+)*/,lookbehind:!0,alias:"keyword"},{pattern:/^```[\s\S]*?^```$/m,greedy:!0,inside:{"code-block":{pattern:/^(```.*(?:\n|\r\n?))[\s\S]+?(?=(?:\n|\r\n?)^```$)/m,lookbehind:!0},"code-language":{pattern:/^(```).+/,lookbehind:!0},punctuation:/```/}}],title:[{pattern:/\S.*(?:\n|\r\n?)(?:==+|--+)(?=[ \t]*$)/m,alias:"important",inside:{punctuation:/==+$|--+$/}},{pattern:/(^\s*)#.+/m,lookbehind:!0,alias:"important",inside:{punctuation:/^#+|#+$/}}],hr:{pattern:/(^\s*)([*-])(?:[\t ]*\2){2,}(?=\s*$)/m,lookbehind:!0,alias:"punctuation"},list:{pattern:/(^\s*)(?:[*+-]|\d+\.)(?=[\t ].)/m,lookbehind:!0,alias:"punctuation"},"url-reference":{pattern:/!?\[[^\]]+\]:[\t ]+(?:\S+|<(?:\\.|[^>\\])+>)(?:[\t ]+(?:"(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|\((?:\\.|[^)\\])*\)))?/,inside:{variable:{pattern:/^(!?\[)[^\]]+/,lookbehind:!0},string:/(?:"(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|\((?:\\.|[^)\\])*\))$/,punctuation:/^[\[\]!:]|[<>]/},alias:"url"},bold:{pattern:n(/\b__(?:(?!_)<inner>|_(?:(?!_)<inner>)+_)+__\b|\*\*(?:(?!\*)<inner>|\*(?:(?!\*)<inner>)+\*)+\*\*/.source),lookbehind:!0,greedy:!0,inside:{content:{pattern:/(^..)[\s\S]+(?=..$)/,lookbehind:!0,inside:{}},punctuation:/\*\*|__/}},italic:{pattern:n(/\b_(?:(?!_)<inner>|__(?:(?!_)<inner>)+__)+_\b|\*(?:(?!\*)<inner>|\*\*(?:(?!\*)<inner>)+\*\*)+\*/.source),lookbehind:!0,greedy:!0,inside:{content:{pattern:/(^.)[\s\S]+(?=.$)/,lookbehind:!0,inside:{}},punctuation:/[*_]/}},strike:{pattern:n(/(~~?)(?:(?!~)<inner>)+\2/.source),lookbehind:!0,greedy:!0,inside:{content:{pattern:/(^~~?)[\s\S]+(?=\1$)/,lookbehind:!0,inside:{}},punctuation:/~~?/}},"code-snippet":{pattern:/(^|[^\\`])(?:``[^`\r\n]+(?:`[^`\r\n]+)*``(?!`)|`[^`\r\n]+`(?!`))/,lookbehind:!0,greedy:!0,alias:["code","keyword"]},url:{pattern:n(/!?\[(?:(?!\])<inner>)+\](?:\([^\s)]+(?:[\t ]+"(?:\\.|[^"\\])*")?\)|[ \t]?\[(?:(?!\])<inner>)+\])/.source),lookbehind:!0,greedy:!0,inside:{operator:/^!/,content:{pattern:/(^\[)[^\]]+(?=\])/,lookbehind:!0,inside:{}},variable:{pattern:/(^\][ \t]?\[)[^\]]+(?=\]$)/,lookbehind:!0},url:{pattern:/(^\]\()[^\s)]+/,lookbehind:!0},string:{pattern:/(^[ \t]+)"(?:\\.|[^"\\])*"(?=\)$)/,lookbehind:!0}}}}),["url","bold","italic","strike"].forEach((function(t){["url","bold","italic","strike","code-snippet"].forEach((function(n){t!==n&&(e.languages.markdown[t].inside.content.inside[n]=e.languages.markdown[n])}))})),e.hooks.add("after-tokenize",(function(e){"markdown"!==e.language&&"md"!==e.language||function e(t){if(t&&"string"!=typeof t)for(var n=0,a=t.length;n<a;n++){var r=t[n];if("code"===r.type){var o=r.content[1],i=r.content[3];if(o&&i&&"code-language"===o.type&&"code-block"===i.type&&"string"==typeof o.content){var s=o.content.replace(/\b#/g,"sharp").replace(/\b\+\+/g,"pp"),l="language-"+(s=(/[a-z][\w-]*/i.exec(s)||[""])[0].toLowerCase());i.alias?"string"==typeof i.alias?i.alias=[i.alias,l]:i.alias.push(l):i.alias=[l]}}else e(r.content)}}(e.tokens)})),e.hooks.add("wrap",(function(t){if("code-block"===t.type){for(var n="",a=0,r=t.classes.length;a<r;a++){var o=t.classes[a],c=/language-(.+)/.exec(o);if(c){n=c[1];break}}var u,d=e.languages[n];if(d)t.content=e.highlight((u=t.content,u.replace(i,"").replace(/&(\w{1,8}|#x?[\da-f]{1,8});/gi,(function(e,t){var n;if("#"===(t=t.toLowerCase())[0])return n="x"===t[1]?parseInt(t.slice(2),16):Number(t.slice(1)),l(n);var a=s[t];return a||e}))),d,n);else if(n&&"none"!==n&&e.plugins.autoloader){var f="md-"+(new Date).valueOf()+"-"+Math.floor(1e16*Math.random());t.attributes.id=f,e.plugins.autoloader.loadLanguages(n,(function(){var t=document.getElementById(f);t&&(t.innerHTML=e.highlight(t.textContent,e.languages[n],n))}))}}}));var i=RegExp(e.languages.markup.tag.pattern.source,"gi"),s={amp:"&",lt:"<",gt:">",quot:'"'},l=String.fromCodePoint||String.fromCharCode;e.languages.md=e.languages.markdown}(r),r.languages.graphql={comment:/#.*/,description:{pattern:/(?:"""(?:[^"]|(?!""")")*"""|"(?:\\.|[^\\"\r\n])*")(?=\s*[a-z_])/i,greedy:!0,alias:"string",inside:{"language-markdown":{pattern:/(^"(?:"")?)(?!\1)[\s\S]+(?=\1$)/,lookbehind:!0,inside:r.languages.markdown}}},string:{pattern:/"""(?:[^"]|(?!""")")*"""|"(?:\\.|[^\\"\r\n])*"/,greedy:!0},number:/(?:\B-|\b)\d+(?:\.\d+)?(?:e[+-]?\d+)?\b/i,boolean:/\b(?:false|true)\b/,variable:/\$[a-z_]\w*/i,directive:{pattern:/@[a-z_]\w*/i,alias:"function"},"attr-name":{pattern:/\b[a-z_]\w*(?=\s*(?:\((?:[^()"]|"(?:\\.|[^\\"\r\n])*")*\))?:)/i,greedy:!0},"atom-input":{pattern:/\b[A-Z]\w*Input\b/,alias:"class-name"},scalar:/\b(?:Boolean|Float|ID|Int|String)\b/,constant:/\b[A-Z][A-Z_\d]*\b/,"class-name":{pattern:/(\b(?:enum|implements|interface|on|scalar|type|union)\s+|&\s*|:\s*|\[)[A-Z_]\w*/,lookbehind:!0},fragment:{pattern:/(\bfragment\s+|\.{3}\s*(?!on\b))[a-zA-Z_]\w*/,lookbehind:!0,alias:"function"},"definition-mutation":{pattern:/(\bmutation\s+)[a-zA-Z_]\w*/,lookbehind:!0,alias:"function"},"definition-query":{pattern:/(\bquery\s+)[a-zA-Z_]\w*/,lookbehind:!0,alias:"function"},keyword:/\b(?:directive|enum|extend|fragment|implements|input|interface|mutation|on|query|repeatable|scalar|schema|subscription|type|union)\b/,operator:/[!=|&]|\.{3}/,"property-query":/\w+(?=\s*\()/,object:/\w+(?=\s*\{)/,punctuation:/[!(){}\[\]:=,]/,property:/\w+/},r.hooks.add("after-tokenize",(function(e){if("graphql"===e.language)for(var t=e.tokens.filter((function(e){return"string"!=typeof e&&"comment"!==e.type&&"scalar"!==e.type})),n=0;n<t.length;){var a=t[n++];if("keyword"===a.type&&"mutation"===a.content){var r=[];if(d(["definition-mutation","punctuation"])&&"("===u(1).content){n+=2;var o=f(/^\($/,/^\)$/);if(-1===o)continue;for(;n<o;n++){var i=u(0);"variable"===i.type&&(p(i,"variable-input"),r.push(i.content))}n=o+1}if(d(["punctuation","property-query"])&&"{"===u(0).content&&(n++,p(u(0),"property-mutation"),r.length>0)){var s=f(/^\{$/,/^\}$/);if(-1===s)continue;for(var l=n;l<s;l++){var c=t[l];"variable"===c.type&&r.indexOf(c.content)>=0&&p(c,"variable-input")}}}}function u(e){return t[n+e]}function d(e,t){t=t||0;for(var n=0;n<e.length;n++){var a=u(n+t);if(!a||a.type!==e[n])return!1}return!0}function f(e,a){for(var r=1,o=n;o<t.length;o++){var i=t[o],s=i.content;if("punctuation"===i.type&&"string"==typeof s)if(e.test(s))r++;else if(a.test(s)&&0===--r)return o}return-1}function p(e,t){var n=e.alias;n?Array.isArray(n)||(e.alias=n=[n]):e.alias=n=[],n.push(t)}})),r.languages.sql={comment:{pattern:/(^|[^\\])(?:\/\*[\s\S]*?\*\/|(?:--|\/\/|#).*)/,lookbehind:!0},variable:[{pattern:/@(["'`])(?:\\[\s\S]|(?!\1)[^\\])+\1/,greedy:!0},/@[\w.$]+/],string:{pattern:/(^|[^@\\])("|')(?:\\[\s\S]|(?!\2)[^\\]|\2\2)*\2/,greedy:!0,lookbehind:!0},identifier:{pattern:/(^|[^@\\])`(?:\\[\s\S]|[^`\\]|``)*`/,greedy:!0,lookbehind:!0,inside:{punctuation:/^`|`$/}},function:/\b(?:AVG|COUNT|FIRST|FORMAT|LAST|LCASE|LEN|MAX|MID|MIN|MOD|NOW|ROUND|SUM|UCASE)(?=\s*\()/i,keyword:/\b(?:ACTION|ADD|AFTER|ALGORITHM|ALL|ALTER|ANALYZE|ANY|APPLY|AS|ASC|AUTHORIZATION|AUTO_INCREMENT|BACKUP|BDB|BEGIN|BERKELEYDB|BIGINT|BINARY|BIT|BLOB|BOOL|BOOLEAN|BREAK|BROWSE|BTREE|BULK|BY|CALL|CASCADED?|CASE|CHAIN|CHAR(?:ACTER|SET)?|CHECK(?:POINT)?|CLOSE|CLUSTERED|COALESCE|COLLATE|COLUMNS?|COMMENT|COMMIT(?:TED)?|COMPUTE|CONNECT|CONSISTENT|CONSTRAINT|CONTAINS(?:TABLE)?|CONTINUE|CONVERT|CREATE|CROSS|CURRENT(?:_DATE|_TIME|_TIMESTAMP|_USER)?|CURSOR|CYCLE|DATA(?:BASES?)?|DATE(?:TIME)?|DAY|DBCC|DEALLOCATE|DEC|DECIMAL|DECLARE|DEFAULT|DEFINER|DELAYED|DELETE|DELIMITERS?|DENY|DESC|DESCRIBE|DETERMINISTIC|DISABLE|DISCARD|DISK|DISTINCT|DISTINCTROW|DISTRIBUTED|DO|DOUBLE|DROP|DUMMY|DUMP(?:FILE)?|DUPLICATE|ELSE(?:IF)?|ENABLE|ENCLOSED|END|ENGINE|ENUM|ERRLVL|ERRORS|ESCAPED?|EXCEPT|EXEC(?:UTE)?|EXISTS|EXIT|EXPLAIN|EXTENDED|FETCH|FIELDS|FILE|FILLFACTOR|FIRST|FIXED|FLOAT|FOLLOWING|FOR(?: EACH ROW)?|FORCE|FOREIGN|FREETEXT(?:TABLE)?|FROM|FULL|FUNCTION|GEOMETRY(?:COLLECTION)?|GLOBAL|GOTO|GRANT|GROUP|HANDLER|HASH|HAVING|HOLDLOCK|HOUR|IDENTITY(?:COL|_INSERT)?|IF|IGNORE|IMPORT|INDEX|INFILE|INNER|INNODB|INOUT|INSERT|INT|INTEGER|INTERSECT|INTERVAL|INTO|INVOKER|ISOLATION|ITERATE|JOIN|KEYS?|KILL|LANGUAGE|LAST|LEAVE|LEFT|LEVEL|LIMIT|LINENO|LINES|LINESTRING|LOAD|LOCAL|LOCK|LONG(?:BLOB|TEXT)|LOOP|MATCH(?:ED)?|MEDIUM(?:BLOB|INT|TEXT)|MERGE|MIDDLEINT|MINUTE|MODE|MODIFIES|MODIFY|MONTH|MULTI(?:LINESTRING|POINT|POLYGON)|NATIONAL|NATURAL|NCHAR|NEXT|NO|NONCLUSTERED|NULLIF|NUMERIC|OFF?|OFFSETS?|ON|OPEN(?:DATASOURCE|QUERY|ROWSET)?|OPTIMIZE|OPTION(?:ALLY)?|ORDER|OUT(?:ER|FILE)?|OVER|PARTIAL|PARTITION|PERCENT|PIVOT|PLAN|POINT|POLYGON|PRECEDING|PRECISION|PREPARE|PREV|PRIMARY|PRINT|PRIVILEGES|PROC(?:EDURE)?|PUBLIC|PURGE|QUICK|RAISERROR|READS?|REAL|RECONFIGURE|REFERENCES|RELEASE|RENAME|REPEAT(?:ABLE)?|REPLACE|REPLICATION|REQUIRE|RESIGNAL|RESTORE|RESTRICT|RETURN(?:ING|S)?|REVOKE|RIGHT|ROLLBACK|ROUTINE|ROW(?:COUNT|GUIDCOL|S)?|RTREE|RULE|SAVE(?:POINT)?|SCHEMA|SECOND|SELECT|SERIAL(?:IZABLE)?|SESSION(?:_USER)?|SET(?:USER)?|SHARE|SHOW|SHUTDOWN|SIMPLE|SMALLINT|SNAPSHOT|SOME|SONAME|SQL|START(?:ING)?|STATISTICS|STATUS|STRIPED|SYSTEM_USER|TABLES?|TABLESPACE|TEMP(?:ORARY|TABLE)?|TERMINATED|TEXT(?:SIZE)?|THEN|TIME(?:STAMP)?|TINY(?:BLOB|INT|TEXT)|TOP?|TRAN(?:SACTIONS?)?|TRIGGER|TRUNCATE|TSEQUAL|TYPES?|UNBOUNDED|UNCOMMITTED|UNDEFINED|UNION|UNIQUE|UNLOCK|UNPIVOT|UNSIGNED|UPDATE(?:TEXT)?|USAGE|USE|USER|USING|VALUES?|VAR(?:BINARY|CHAR|CHARACTER|YING)|VIEW|WAITFOR|WARNINGS|WHEN|WHERE|WHILE|WITH(?: ROLLUP|IN)?|WORK|WRITE(?:TEXT)?|YEAR)\b/i,boolean:/\b(?:FALSE|NULL|TRUE)\b/i,number:/\b0x[\da-f]+\b|\b\d+(?:\.\d*)?|\B\.\d+\b/i,operator:/[-+*\/=%^~]|&&?|\|\|?|!=?|<(?:=>?|<|>)?|>[>=]?|\b(?:AND|BETWEEN|DIV|ILIKE|IN|IS|LIKE|NOT|OR|REGEXP|RLIKE|SOUNDS LIKE|XOR)\b/i,punctuation:/[;[\]()`,.]/},function(e){var t=e.languages.javascript["template-string"],n=t.pattern.source,a=t.inside.interpolation,r=a.inside["interpolation-punctuation"],o=a.pattern.source;function i(t,a){if(e.languages[t])return{pattern:RegExp("((?:"+a+")\\s*)"+n),lookbehind:!0,greedy:!0,inside:{"template-punctuation":{pattern:/^`|`$/,alias:"string"},"embedded-code":{pattern:/[\s\S]+/,alias:t}}}}function s(e,t){return"___"+t.toUpperCase()+"_"+e+"___"}function l(t,n,a){var r={code:t,grammar:n,language:a};return e.hooks.run("before-tokenize",r),r.tokens=e.tokenize(r.code,r.grammar),e.hooks.run("after-tokenize",r),r.tokens}function c(t){var n={};n["interpolation-punctuation"]=r;var o=e.tokenize(t,n);if(3===o.length){var i=[1,1];i.push.apply(i,l(o[1],e.languages.javascript,"javascript")),o.splice.apply(o,i)}return new e.Token("interpolation",o,a.alias,t)}function u(t,n,a){var r=e.tokenize(t,{interpolation:{pattern:RegExp(o),lookbehind:!0}}),i=0,u={},d=l(r.map((function(e){if("string"==typeof e)return e;for(var n,r=e.content;-1!==t.indexOf(n=s(i++,a)););return u[n]=r,n})).join(""),n,a),f=Object.keys(u);return i=0,function e(t){for(var n=0;n<t.length;n++){if(i>=f.length)return;var a=t[n];if("string"==typeof a||"string"==typeof a.content){var r=f[i],o="string"==typeof a?a:a.content,s=o.indexOf(r);if(-1!==s){++i;var l=o.substring(0,s),d=c(u[r]),p=o.substring(s+r.length),m=[];if(l&&m.push(l),m.push(d),p){var h=[p];e(h),m.push.apply(m,h)}"string"==typeof a?(t.splice.apply(t,[n,1].concat(m)),n+=m.length-1):a.content=m}}else{var g=a.content;Array.isArray(g)?e(g):e([g])}}}(d),new e.Token(a,d,"language-"+a,t)}e.languages.javascript["template-string"]=[i("css",/\b(?:styled(?:\([^)]*\))?(?:\s*\.\s*\w+(?:\([^)]*\))*)*|css(?:\s*\.\s*(?:global|resolve))?|createGlobalStyle|keyframes)/.source),i("html",/\bhtml|\.\s*(?:inner|outer)HTML\s*\+?=/.source),i("svg",/\bsvg/.source),i("markdown",/\b(?:markdown|md)/.source),i("graphql",/\b(?:gql|graphql(?:\s*\.\s*experimental)?)/.source),i("sql",/\bsql/.source),t].filter(Boolean);var d={javascript:!0,js:!0,typescript:!0,ts:!0,jsx:!0,tsx:!0};function f(e){return"string"==typeof e?e:Array.isArray(e)?e.map(f).join(""):f(e.content)}e.hooks.add("after-tokenize",(function(t){t.language in d&&function t(n){for(var a=0,r=n.length;a<r;a++){var o=n[a];if("string"!=typeof o){var i=o.content;if(Array.isArray(i))if("template-string"===o.type){var s=i[1];if(3===i.length&&"string"!=typeof s&&"embedded-code"===s.type){var l=f(s),c=s.alias,d=Array.isArray(c)?c[0]:c,p=e.languages[d];if(!p)continue;i[1]=u(l,p,d)}}else t(i);else"string"!=typeof i&&t([i])}}}(t.tokens)}))}(r),function(e){e.languages.typescript=e.languages.extend("javascript",{"class-name":{pattern:/(\b(?:class|extends|implements|instanceof|interface|new|type)\s+)(?!keyof\b)(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?:\s*<(?:[^<>]|<(?:[^<>]|<[^<>]*>)*>)*>)?/,lookbehind:!0,greedy:!0,inside:null},builtin:/\b(?:Array|Function|Promise|any|boolean|console|never|number|string|symbol|unknown)\b/}),e.languages.typescript.keyword.push(/\b(?:abstract|declare|is|keyof|readonly|require)\b/,/\b(?:asserts|infer|interface|module|namespace|type)\b(?=\s*(?:[{_$a-zA-Z\xA0-\uFFFF]|$))/,/\btype\b(?=\s*(?:[\{*]|$))/),delete e.languages.typescript.parameter,delete e.languages.typescript["literal-property"];var t=e.languages.extend("typescript",{});delete t["class-name"],e.languages.typescript["class-name"].inside=t,e.languages.insertBefore("typescript","function",{decorator:{pattern:/@[$\w\xA0-\uFFFF]+/,inside:{at:{pattern:/^@/,alias:"operator"},function:/^[\s\S]+/}},"generic-function":{pattern:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*\s*<(?:[^<>]|<(?:[^<>]|<[^<>]*>)*>)*>(?=\s*\()/,greedy:!0,inside:{function:/^#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*/,generic:{pattern:/<[\s\S]+/,alias:"class-name",inside:t}}}}),e.languages.ts=e.languages.typescript}(r),function(e){function t(e,t){return RegExp(e.replace(/<ID>/g,(function(){return/(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*/.source})),t)}e.languages.insertBefore("javascript","function-variable",{"method-variable":{pattern:RegExp("(\\.\\s*)"+e.languages.javascript["function-variable"].pattern.source),lookbehind:!0,alias:["function-variable","method","function","property-access"]}}),e.languages.insertBefore("javascript","function",{method:{pattern:RegExp("(\\.\\s*)"+e.languages.javascript.function.source),lookbehind:!0,alias:["function","property-access"]}}),e.languages.insertBefore("javascript","constant",{"known-class-name":[{pattern:/\b(?:(?:Float(?:32|64)|(?:Int|Uint)(?:8|16|32)|Uint8Clamped)?Array|ArrayBuffer|BigInt|Boolean|DataView|Date|Error|Function|Intl|JSON|(?:Weak)?(?:Map|Set)|Math|Number|Object|Promise|Proxy|Reflect|RegExp|String|Symbol|WebAssembly)\b/,alias:"class-name"},{pattern:/\b(?:[A-Z]\w*)Error\b/,alias:"class-name"}]}),e.languages.insertBefore("javascript","keyword",{imports:{pattern:t(/(\bimport\b\s*)(?:<ID>(?:\s*,\s*(?:\*\s*as\s+<ID>|\{[^{}]*\}))?|\*\s*as\s+<ID>|\{[^{}]*\})(?=\s*\bfrom\b)/.source),lookbehind:!0,inside:e.languages.javascript},exports:{pattern:t(/(\bexport\b\s*)(?:\*(?:\s*as\s+<ID>)?(?=\s*\bfrom\b)|\{[^{}]*\})/.source),lookbehind:!0,inside:e.languages.javascript}}),e.languages.javascript.keyword.unshift({pattern:/\b(?:as|default|export|from|import)\b/,alias:"module"},{pattern:/\b(?:await|break|catch|continue|do|else|finally|for|if|return|switch|throw|try|while|yield)\b/,alias:"control-flow"},{pattern:/\bnull\b/,alias:["null","nil"]},{pattern:/\bundefined\b/,alias:"nil"}),e.languages.insertBefore("javascript","operator",{spread:{pattern:/\.{3}/,alias:"operator"},arrow:{pattern:/=>/,alias:"operator"}}),e.languages.insertBefore("javascript","punctuation",{"property-access":{pattern:t(/(\.\s*)#?<ID>/.source),lookbehind:!0},"maybe-class-name":{pattern:/(^|[^$\w\xA0-\uFFFF])[A-Z][$\w\xA0-\uFFFF]+/,lookbehind:!0},dom:{pattern:/\b(?:document|(?:local|session)Storage|location|navigator|performance|window)\b/,alias:"variable"},console:{pattern:/\bconsole(?=\s*\.)/,alias:"class-name"}});for(var n=["function","function-variable","method","method-variable","property-access"],a=0;a<n.length;a++){var r=n[a],o=e.languages.javascript[r];"RegExp"===e.util.type(o)&&(o=e.languages.javascript[r]={pattern:o});var i=o.inside||{};o.inside=i,i["maybe-class-name"]=/^[A-Z][\s\S]*/}}(r),function(e){var t=e.util.clone(e.languages.javascript),n=/(?:\s|\/\/.*(?!.)|\/\*(?:[^*]|\*(?!\/))\*\/)/.source,a=/(?:\{(?:\{(?:\{[^{}]*\}|[^{}])*\}|[^{}])*\})/.source,r=/(?:\{<S>*\.{3}(?:[^{}]|<BRACES>)*\})/.source;function o(e,t){return e=e.replace(/<S>/g,(function(){return n})).replace(/<BRACES>/g,(function(){return a})).replace(/<SPREAD>/g,(function(){return r})),RegExp(e,t)}r=o(r).source,e.languages.jsx=e.languages.extend("markup",t),e.languages.jsx.tag.pattern=o(/<\/?(?:[\w.:-]+(?:<S>+(?:[\w.:$-]+(?:=(?:"(?:\\[\s\S]|[^\\"])*"|'(?:\\[\s\S]|[^\\'])*'|[^\s{'"/>=]+|<BRACES>))?|<SPREAD>))*<S>*\/?)?>/.source),e.languages.jsx.tag.inside.tag.pattern=/^<\/?[^\s>\/]*/,e.languages.jsx.tag.inside["attr-value"].pattern=/=(?!\{)(?:"(?:\\[\s\S]|[^\\"])*"|'(?:\\[\s\S]|[^\\'])*'|[^\s'">]+)/,e.languages.jsx.tag.inside.tag.inside["class-name"]=/^[A-Z]\w*(?:\.[A-Z]\w*)*$/,e.languages.jsx.tag.inside.comment=t.comment,e.languages.insertBefore("inside","attr-name",{spread:{pattern:o(/<SPREAD>/.source),inside:e.languages.jsx}},e.languages.jsx.tag),e.languages.insertBefore("inside","special-attr",{script:{pattern:o(/=<BRACES>/.source),alias:"language-javascript",inside:{"script-punctuation":{pattern:/^=(?=\{)/,alias:"punctuation"},rest:e.languages.jsx}}},e.languages.jsx.tag);var i=function(e){return e?"string"==typeof e?e:"string"==typeof e.content?e.content:e.content.map(i).join(""):""},s=function(t){for(var n=[],a=0;a<t.length;a++){var r=t[a],o=!1;if("string"!=typeof r&&("tag"===r.type&&r.content[0]&&"tag"===r.content[0].type?"</"===r.content[0].content[0].content?n.length>0&&n[n.length-1].tagName===i(r.content[0].content[1])&&n.pop():"/>"===r.content[r.content.length-1].content||n.push({tagName:i(r.content[0].content[1]),openedBraces:0}):n.length>0&&"punctuation"===r.type&&"{"===r.content?n[n.length-1].openedBraces++:n.length>0&&n[n.length-1].openedBraces>0&&"punctuation"===r.type&&"}"===r.content?n[n.length-1].openedBraces--:o=!0),(o||"string"==typeof r)&&n.length>0&&0===n[n.length-1].openedBraces){var l=i(r);a<t.length-1&&("string"==typeof t[a+1]||"plain-text"===t[a+1].type)&&(l+=i(t[a+1]),t.splice(a+1,1)),a>0&&("string"==typeof t[a-1]||"plain-text"===t[a-1].type)&&(l=i(t[a-1])+l,t.splice(a-1,1),a--),t[a]=new e.Token("plain-text",l,null,l)}r.content&&"string"!=typeof r.content&&s(r.content)}};e.hooks.add("after-tokenize",(function(e){"jsx"!==e.language&&"tsx"!==e.language||s(e.tokens)}))}(r),function(e){e.languages.diff={coord:[/^(?:\*{3}|-{3}|\+{3}).*$/m,/^@@.*@@$/m,/^\d.*$/m]};var t={"deleted-sign":"-","deleted-arrow":"<","inserted-sign":"+","inserted-arrow":">",unchanged:" ",diff:"!"};Object.keys(t).forEach((function(n){var a=t[n],r=[];/^\w+$/.test(n)||r.push(/\w+/.exec(n)[0]),"diff"===n&&r.push("bold"),e.languages.diff[n]={pattern:RegExp("^(?:["+a+"].*(?:\r\n?|\n|(?![\\s\\S])))+","m"),alias:r,inside:{line:{pattern:/(.)(?=[\s\S]).*(?:\r\n?|\n)?/,lookbehind:!0},prefix:{pattern:/[\s\S]/,alias:/\w+/.exec(n)[0]}}}})),Object.defineProperty(e.languages.diff,"PREFIXES",{value:t})}(r),r.languages.git={comment:/^#.*/m,deleted:/^[-\u2013].*/m,inserted:/^\+.*/m,string:/("|')(?:\\.|(?!\1)[^\\\r\n])*\1/,command:{pattern:/^.*\$ git .*$/m,inside:{parameter:/\s--?\w+/}},coord:/^@@.*@@$/m,"commit-sha1":/^commit \w{40}$/m},r.languages.go=r.languages.extend("clike",{string:{pattern:/(^|[^\\])"(?:\\.|[^"\\\r\n])*"|`[^`]*`/,lookbehind:!0,greedy:!0},keyword:/\b(?:break|case|chan|const|continue|default|defer|else|fallthrough|for|func|go(?:to)?|if|import|interface|map|package|range|return|select|struct|switch|type|var)\b/,boolean:/\b(?:_|false|iota|nil|true)\b/,number:[/\b0(?:b[01_]+|o[0-7_]+)i?\b/i,/\b0x(?:[a-f\d_]+(?:\.[a-f\d_]*)?|\.[a-f\d_]+)(?:p[+-]?\d+(?:_\d+)*)?i?(?!\w)/i,/(?:\b\d[\d_]*(?:\.[\d_]*)?|\B\.\d[\d_]*)(?:e[+-]?[\d_]+)?i?(?!\w)/i],operator:/[*\/%^!=]=?|\+[=+]?|-[=-]?|\|[=|]?|&(?:=|&|\^=?)?|>(?:>=?|=)?|<(?:<=?|=|-)?|:=|\.\.\./,builtin:/\b(?:append|bool|byte|cap|close|complex|complex(?:64|128)|copy|delete|error|float(?:32|64)|u?int(?:8|16|32|64)?|imag|len|make|new|panic|print(?:ln)?|real|recover|rune|string|uintptr)\b/}),r.languages.insertBefore("go","string",{char:{pattern:/'(?:\\.|[^'\\\r\n]){0,10}'/,greedy:!0}}),delete r.languages.go["class-name"],function(e){function t(e,t){return"___"+e.toUpperCase()+t+"___"}Object.defineProperties(e.languages["markup-templating"]={},{buildPlaceholders:{value:function(n,a,r,o){if(n.language===a){var i=n.tokenStack=[];n.code=n.code.replace(r,(function(e){if("function"==typeof o&&!o(e))return e;for(var r,s=i.length;-1!==n.code.indexOf(r=t(a,s));)++s;return i[s]=e,r})),n.grammar=e.languages.markup}}},tokenizePlaceholders:{value:function(n,a){if(n.language===a&&n.tokenStack){n.grammar=e.languages[a];var r=0,o=Object.keys(n.tokenStack);!function i(s){for(var l=0;l<s.length&&!(r>=o.length);l++){var c=s[l];if("string"==typeof c||c.content&&"string"==typeof c.content){var u=o[r],d=n.tokenStack[u],f="string"==typeof c?c:c.content,p=t(a,u),m=f.indexOf(p);if(m>-1){++r;var h=f.substring(0,m),g=new e.Token(a,e.tokenize(d,n.grammar),"language-"+a,d),_=f.substring(m+p.length),b=[];h&&b.push.apply(b,i([h])),b.push(g),_&&b.push.apply(b,i([_])),"string"==typeof c?s.splice.apply(s,[l,1].concat(b)):c.content=b}}else c.content&&i(c.content)}return s}(n.tokens)}}}})}(r),function(e){e.languages.handlebars={comment:/\{\{![\s\S]*?\}\}/,delimiter:{pattern:/^\{\{\{?|\}\}\}?$/,alias:"punctuation"},string:/(["'])(?:\\.|(?!\1)[^\\\r\n])*\1/,number:/\b0x[\dA-Fa-f]+\b|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:[Ee][+-]?\d+)?/,boolean:/\b(?:false|true)\b/,block:{pattern:/^(\s*(?:~\s*)?)[#\/]\S+?(?=\s*(?:~\s*)?$|\s)/,lookbehind:!0,alias:"keyword"},brackets:{pattern:/\[[^\]]+\]/,inside:{punctuation:/\[|\]/,variable:/[\s\S]+/}},punctuation:/[!"#%&':()*+,.\/;<=>@\[\\\]^`{|}~]/,variable:/[^!"#%&'()*+,\/;<=>@\[\\\]^`{|}~\s]+/},e.hooks.add("before-tokenize",(function(t){e.languages["markup-templating"].buildPlaceholders(t,"handlebars",/\{\{\{[\s\S]+?\}\}\}|\{\{[\s\S]+?\}\}/g)})),e.hooks.add("after-tokenize",(function(t){e.languages["markup-templating"].tokenizePlaceholders(t,"handlebars")})),e.languages.hbs=e.languages.handlebars}(r),r.languages.json={property:{pattern:/(^|[^\\])"(?:\\.|[^\\"\r\n])*"(?=\s*:)/,lookbehind:!0,greedy:!0},string:{pattern:/(^|[^\\])"(?:\\.|[^\\"\r\n])*"(?!\s*:)/,lookbehind:!0,greedy:!0},comment:{pattern:/\/\/.*|\/\*[\s\S]*?(?:\*\/|$)/,greedy:!0},number:/-?\b\d+(?:\.\d+)?(?:e[+-]?\d+)?\b/i,punctuation:/[{}[\],]/,operator:/:/,boolean:/\b(?:false|true)\b/,null:{pattern:/\bnull\b/,alias:"keyword"}},r.languages.webmanifest=r.languages.json,r.languages.less=r.languages.extend("css",{comment:[/\/\*[\s\S]*?\*\//,{pattern:/(^|[^\\])\/\/.*/,lookbehind:!0}],atrule:{pattern:/@[\w-](?:\((?:[^(){}]|\([^(){}]*\))*\)|[^(){};\s]|\s+(?!\s))*?(?=\s*\{)/,inside:{punctuation:/[:()]/}},selector:{pattern:/(?:@\{[\w-]+\}|[^{};\s@])(?:@\{[\w-]+\}|\((?:[^(){}]|\([^(){}]*\))*\)|[^(){};@\s]|\s+(?!\s))*?(?=\s*\{)/,inside:{variable:/@+[\w-]+/}},property:/(?:@\{[\w-]+\}|[\w-])+(?:\+_?)?(?=\s*:)/,operator:/[+\-*\/]/}),r.languages.insertBefore("less","property",{variable:[{pattern:/@[\w-]+\s*:/,inside:{punctuation:/:/}},/@@?[\w-]+/],"mixin-usage":{pattern:/([{;]\s*)[.#](?!\d)[\w-].*?(?=[(;])/,lookbehind:!0,alias:"function"}}),r.languages.makefile={comment:{pattern:/(^|[^\\])#(?:\\(?:\r\n|[\s\S])|[^\\\r\n])*/,lookbehind:!0},string:{pattern:/(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0},"builtin-target":{pattern:/\.[A-Z][^:#=\s]+(?=\s*:(?!=))/,alias:"builtin"},target:{pattern:/^(?:[^:=\s]|[ \t]+(?![\s:]))+(?=\s*:(?!=))/m,alias:"symbol",inside:{variable:/\$+(?:(?!\$)[^(){}:#=\s]+|(?=[({]))/}},variable:/\$+(?:(?!\$)[^(){}:#=\s]+|\([@*%<^+?][DF]\)|(?=[({]))/,keyword:/-include\b|\b(?:define|else|endef|endif|export|ifn?def|ifn?eq|include|override|private|sinclude|undefine|unexport|vpath)\b/,function:{pattern:/(\()(?:abspath|addsuffix|and|basename|call|dir|error|eval|file|filter(?:-out)?|findstring|firstword|flavor|foreach|guile|if|info|join|lastword|load|notdir|or|origin|patsubst|realpath|shell|sort|strip|subst|suffix|value|warning|wildcard|word(?:list|s)?)(?=[ \t])/,lookbehind:!0},operator:/(?:::|[?:+!])?=|[|@]/,punctuation:/[:;(){}]/},r.languages.objectivec=r.languages.extend("c",{string:{pattern:/@?"(?:\\(?:\r\n|[\s\S])|[^"\\\r\n])*"/,greedy:!0},keyword:/\b(?:asm|auto|break|case|char|const|continue|default|do|double|else|enum|extern|float|for|goto|if|in|inline|int|long|register|return|self|short|signed|sizeof|static|struct|super|switch|typedef|typeof|union|unsigned|void|volatile|while)\b|(?:@interface|@end|@implementation|@protocol|@class|@public|@protected|@private|@property|@try|@catch|@finally|@throw|@synthesize|@dynamic|@selector)\b/,operator:/-[->]?|\+\+?|!=?|<<?=?|>>?=?|==?|&&?|\|\|?|[~^%?*\/@]/}),delete r.languages.objectivec["class-name"],r.languages.objc=r.languages.objectivec,r.languages.ocaml={comment:{pattern:/\(\*[\s\S]*?\*\)/,greedy:!0},char:{pattern:/'(?:[^\\\r\n']|\\(?:.|[ox]?[0-9a-f]{1,3}))'/i,greedy:!0},string:[{pattern:/"(?:\\(?:[\s\S]|\r\n)|[^\\\r\n"])*"/,greedy:!0},{pattern:/\{([a-z_]*)\|[\s\S]*?\|\1\}/,greedy:!0}],number:[/\b(?:0b[01][01_]*|0o[0-7][0-7_]*)\b/i,/\b0x[a-f0-9][a-f0-9_]*(?:\.[a-f0-9_]*)?(?:p[+-]?\d[\d_]*)?(?!\w)/i,/\b\d[\d_]*(?:\.[\d_]*)?(?:e[+-]?\d[\d_]*)?(?!\w)/i],directive:{pattern:/\B#\w+/,alias:"property"},label:{pattern:/\B~\w+/,alias:"property"},"type-variable":{pattern:/\B'\w+/,alias:"function"},variant:{pattern:/`\w+/,alias:"symbol"},keyword:/\b(?:as|assert|begin|class|constraint|do|done|downto|else|end|exception|external|for|fun|function|functor|if|in|include|inherit|initializer|lazy|let|match|method|module|mutable|new|nonrec|object|of|open|private|rec|sig|struct|then|to|try|type|val|value|virtual|when|where|while|with)\b/,boolean:/\b(?:false|true)\b/,"operator-like-punctuation":{pattern:/\[[<>|]|[>|]\]|\{<|>\}/,alias:"punctuation"},operator:/\.[.~]|:[=>]|[=<>@^|&+\-*\/$%!?~][!$%&*+\-.\/:<=>?@^|~]*|\b(?:and|asr|land|lor|lsl|lsr|lxor|mod|or)\b/,punctuation:/;;|::|[(){}\[\].,:;#]|\b_\b/},r.languages.python={comment:{pattern:/(^|[^\\])#.*/,lookbehind:!0,greedy:!0},"string-interpolation":{pattern:/(?:f|fr|rf)(?:("""|''')[\s\S]*?\1|("|')(?:\\.|(?!\2)[^\\\r\n])*\2)/i,greedy:!0,inside:{interpolation:{pattern:/((?:^|[^{])(?:\{\{)*)\{(?!\{)(?:[^{}]|\{(?!\{)(?:[^{}]|\{(?!\{)(?:[^{}])+\})+\})+\}/,lookbehind:!0,inside:{"format-spec":{pattern:/(:)[^:(){}]+(?=\}$)/,lookbehind:!0},"conversion-option":{pattern:/![sra](?=[:}]$)/,alias:"punctuation"},rest:null}},string:/[\s\S]+/}},"triple-quoted-string":{pattern:/(?:[rub]|br|rb)?("""|''')[\s\S]*?\1/i,greedy:!0,alias:"string"},string:{pattern:/(?:[rub]|br|rb)?("|')(?:\\.|(?!\1)[^\\\r\n])*\1/i,greedy:!0},function:{pattern:/((?:^|\s)def[ \t]+)[a-zA-Z_]\w*(?=\s*\()/g,lookbehind:!0},"class-name":{pattern:/(\bclass\s+)\w+/i,lookbehind:!0},decorator:{pattern:/(^[\t ]*)@\w+(?:\.\w+)*/m,lookbehind:!0,alias:["annotation","punctuation"],inside:{punctuation:/\./}},keyword:/\b(?:_(?=\s*:)|and|as|assert|async|await|break|case|class|continue|def|del|elif|else|except|exec|finally|for|from|global|if|import|in|is|lambda|match|nonlocal|not|or|pass|print|raise|return|try|while|with|yield)\b/,builtin:/\b(?:__import__|abs|all|any|apply|ascii|basestring|bin|bool|buffer|bytearray|bytes|callable|chr|classmethod|cmp|coerce|compile|complex|delattr|dict|dir|divmod|enumerate|eval|execfile|file|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|intern|isinstance|issubclass|iter|len|list|locals|long|map|max|memoryview|min|next|object|oct|open|ord|pow|property|range|raw_input|reduce|reload|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|unichr|unicode|vars|xrange|zip)\b/,boolean:/\b(?:False|None|True)\b/,number:/\b0(?:b(?:_?[01])+|o(?:_?[0-7])+|x(?:_?[a-f0-9])+)\b|(?:\b\d+(?:_\d+)*(?:\.(?:\d+(?:_\d+)*)?)?|\B\.\d+(?:_\d+)*)(?:e[+-]?\d+(?:_\d+)*)?j?(?!\w)/i,operator:/[-+%=]=?|!=|:=|\*\*?=?|\/\/?=?|<[<=>]?|>[=>]?|[&|^~]/,punctuation:/[{}[\];(),.:]/},r.languages.python["string-interpolation"].inside.interpolation.inside.rest=r.languages.python,r.languages.py=r.languages.python,r.languages.reason=r.languages.extend("clike",{string:{pattern:/"(?:\\(?:\r\n|[\s\S])|[^\\\r\n"])*"/,greedy:!0},"class-name":/\b[A-Z]\w*/,keyword:/\b(?:and|as|assert|begin|class|constraint|do|done|downto|else|end|exception|external|for|fun|function|functor|if|in|include|inherit|initializer|lazy|let|method|module|mutable|new|nonrec|object|of|open|or|private|rec|sig|struct|switch|then|to|try|type|val|virtual|when|while|with)\b/,operator:/\.{3}|:[:=]|\|>|->|=(?:==?|>)?|<=?|>=?|[|^?'#!~`]|[+\-*\/]\.?|\b(?:asr|land|lor|lsl|lsr|lxor|mod)\b/}),r.languages.insertBefore("reason","class-name",{char:{pattern:/'(?:\\x[\da-f]{2}|\\o[0-3][0-7][0-7]|\\\d{3}|\\.|[^'\\\r\n])'/,greedy:!0},constructor:/\b[A-Z]\w*\b(?!\s*\.)/,label:{pattern:/\b[a-z]\w*(?=::)/,alias:"symbol"}}),delete r.languages.reason.function,function(e){e.languages.sass=e.languages.extend("css",{comment:{pattern:/^([ \t]*)\/[\/*].*(?:(?:\r?\n|\r)\1[ \t].+)*/m,lookbehind:!0,greedy:!0}}),e.languages.insertBefore("sass","atrule",{"atrule-line":{pattern:/^(?:[ \t]*)[@+=].+/m,greedy:!0,inside:{atrule:/(?:@[\w-]+|[+=])/}}}),delete e.languages.sass.atrule;var t=/\$[-\w]+|#\{\$[-\w]+\}/,n=[/[+*\/%]|[=!]=|<=?|>=?|\b(?:and|not|or)\b/,{pattern:/(\s)-(?=\s)/,lookbehind:!0}];e.languages.insertBefore("sass","property",{"variable-line":{pattern:/^[ \t]*\$.+/m,greedy:!0,inside:{punctuation:/:/,variable:t,operator:n}},"property-line":{pattern:/^[ \t]*(?:[^:\s]+ *:.*|:[^:\s].*)/m,greedy:!0,inside:{property:[/[^:\s]+(?=\s*:)/,{pattern:/(:)[^:\s]+/,lookbehind:!0}],punctuation:/:/,variable:t,operator:n,important:e.languages.sass.important}}}),delete e.languages.sass.property,delete e.languages.sass.important,e.languages.insertBefore("sass","punctuation",{selector:{pattern:/^([ \t]*)\S(?:,[^,\r\n]+|[^,\r\n]*)(?:,[^,\r\n]+)*(?:,(?:\r?\n|\r)\1[ \t]+\S(?:,[^,\r\n]+|[^,\r\n]*)(?:,[^,\r\n]+)*)*/m,lookbehind:!0,greedy:!0}})}(r),r.languages.scss=r.languages.extend("css",{comment:{pattern:/(^|[^\\])(?:\/\*[\s\S]*?\*\/|\/\/.*)/,lookbehind:!0},atrule:{pattern:/@[\w-](?:\([^()]+\)|[^()\s]|\s+(?!\s))*?(?=\s+[{;])/,inside:{rule:/@[\w-]+/}},url:/(?:[-a-z]+-)?url(?=\()/i,selector:{pattern:/(?=\S)[^@;{}()]?(?:[^@;{}()\s]|\s+(?!\s)|#\{\$[-\w]+\})+(?=\s*\{(?:\}|\s|[^}][^:{}]*[:{][^}]))/,inside:{parent:{pattern:/&/,alias:"important"},placeholder:/%[-\w]+/,variable:/\$[-\w]+|#\{\$[-\w]+\}/}},property:{pattern:/(?:[-\w]|\$[-\w]|#\{\$[-\w]+\})+(?=\s*:)/,inside:{variable:/\$[-\w]+|#\{\$[-\w]+\}/}}}),r.languages.insertBefore("scss","atrule",{keyword:[/@(?:content|debug|each|else(?: if)?|extend|for|forward|function|if|import|include|mixin|return|use|warn|while)\b/i,{pattern:/( )(?:from|through)(?= )/,lookbehind:!0}]}),r.languages.insertBefore("scss","important",{variable:/\$[-\w]+|#\{\$[-\w]+\}/}),r.languages.insertBefore("scss","function",{"module-modifier":{pattern:/\b(?:as|hide|show|with)\b/i,alias:"keyword"},placeholder:{pattern:/%[-\w]+/,alias:"selector"},statement:{pattern:/\B!(?:default|optional)\b/i,alias:"keyword"},boolean:/\b(?:false|true)\b/,null:{pattern:/\bnull\b/,alias:"keyword"},operator:{pattern:/(\s)(?:[-+*\/%]|[=!]=|<=?|>=?|and|not|or)(?=\s)/,lookbehind:!0}}),r.languages.scss.atrule.inside.rest=r.languages.scss,function(e){var t={pattern:/(\b\d+)(?:%|[a-z]+)/,lookbehind:!0},n={pattern:/(^|[^\w.-])-?(?:\d+(?:\.\d+)?|\.\d+)/,lookbehind:!0},a={comment:{pattern:/(^|[^\\])(?:\/\*[\s\S]*?\*\/|\/\/.*)/,lookbehind:!0},url:{pattern:/\burl\((["']?).*?\1\)/i,greedy:!0},string:{pattern:/("|')(?:(?!\1)[^\\\r\n]|\\(?:\r\n|[\s\S]))*\1/,greedy:!0},interpolation:null,func:null,important:/\B!(?:important|optional)\b/i,keyword:{pattern:/(^|\s+)(?:(?:else|for|if|return|unless)(?=\s|$)|@[\w-]+)/,lookbehind:!0},hexcode:/#[\da-f]{3,6}/i,color:[/\b(?:AliceBlue|AntiqueWhite|Aqua|Aquamarine|Azure|Beige|Bisque|Black|BlanchedAlmond|Blue|BlueViolet|Brown|BurlyWood|CadetBlue|Chartreuse|Chocolate|Coral|CornflowerBlue|Cornsilk|Crimson|Cyan|DarkBlue|DarkCyan|DarkGoldenRod|DarkGr[ae]y|DarkGreen|DarkKhaki|DarkMagenta|DarkOliveGreen|DarkOrange|DarkOrchid|DarkRed|DarkSalmon|DarkSeaGreen|DarkSlateBlue|DarkSlateGr[ae]y|DarkTurquoise|DarkViolet|DeepPink|DeepSkyBlue|DimGr[ae]y|DodgerBlue|FireBrick|FloralWhite|ForestGreen|Fuchsia|Gainsboro|GhostWhite|Gold|GoldenRod|Gr[ae]y|Green|GreenYellow|HoneyDew|HotPink|IndianRed|Indigo|Ivory|Khaki|Lavender|LavenderBlush|LawnGreen|LemonChiffon|LightBlue|LightCoral|LightCyan|LightGoldenRodYellow|LightGr[ae]y|LightGreen|LightPink|LightSalmon|LightSeaGreen|LightSkyBlue|LightSlateGr[ae]y|LightSteelBlue|LightYellow|Lime|LimeGreen|Linen|Magenta|Maroon|MediumAquaMarine|MediumBlue|MediumOrchid|MediumPurple|MediumSeaGreen|MediumSlateBlue|MediumSpringGreen|MediumTurquoise|MediumVioletRed|MidnightBlue|MintCream|MistyRose|Moccasin|NavajoWhite|Navy|OldLace|Olive|OliveDrab|Orange|OrangeRed|Orchid|PaleGoldenRod|PaleGreen|PaleTurquoise|PaleVioletRed|PapayaWhip|PeachPuff|Peru|Pink|Plum|PowderBlue|Purple|Red|RosyBrown|RoyalBlue|SaddleBrown|Salmon|SandyBrown|SeaGreen|SeaShell|Sienna|Silver|SkyBlue|SlateBlue|SlateGr[ae]y|Snow|SpringGreen|SteelBlue|Tan|Teal|Thistle|Tomato|Transparent|Turquoise|Violet|Wheat|White|WhiteSmoke|Yellow|YellowGreen)\b/i,{pattern:/\b(?:hsl|rgb)\(\s*\d{1,3}\s*,\s*\d{1,3}%?\s*,\s*\d{1,3}%?\s*\)\B|\b(?:hsl|rgb)a\(\s*\d{1,3}\s*,\s*\d{1,3}%?\s*,\s*\d{1,3}%?\s*,\s*(?:0|0?\.\d+|1)\s*\)\B/i,inside:{unit:t,number:n,function:/[\w-]+(?=\()/,punctuation:/[(),]/}}],entity:/\\[\da-f]{1,8}/i,unit:t,boolean:/\b(?:false|true)\b/,operator:[/~|[+!\/%<>?=]=?|[-:]=|\*[*=]?|\.{2,3}|&&|\|\||\B-\B|\b(?:and|in|is(?: a| defined| not|nt)?|not|or)\b/],number:n,punctuation:/[{}()\[\];:,]/};a.interpolation={pattern:/\{[^\r\n}:]+\}/,alias:"variable",inside:{delimiter:{pattern:/^\{|\}$/,alias:"punctuation"},rest:a}},a.func={pattern:/[\w-]+\([^)]*\).*/,inside:{function:/^[^(]+/,rest:a}},e.languages.stylus={"atrule-declaration":{pattern:/(^[ \t]*)@.+/m,lookbehind:!0,inside:{atrule:/^@[\w-]+/,rest:a}},"variable-declaration":{pattern:/(^[ \t]*)[\w$-]+\s*.?=[ \t]*(?:\{[^{}]*\}|\S.*|$)/m,lookbehind:!0,inside:{variable:/^\S+/,rest:a}},statement:{pattern:/(^[ \t]*)(?:else|for|if|return|unless)[ \t].+/m,lookbehind:!0,inside:{keyword:/^\S+/,rest:a}},"property-declaration":{pattern:/((?:^|\{)([ \t]*))(?:[\w-]|\{[^}\r\n]+\})+(?:\s*:\s*|[ \t]+)(?!\s)[^{\r\n]*(?:;|[^{\r\n,]$(?!(?:\r?\n|\r)(?:\{|\2[ \t])))/m,lookbehind:!0,inside:{property:{pattern:/^[^\s:]+/,inside:{interpolation:a.interpolation}},rest:a}},selector:{pattern:/(^[ \t]*)(?:(?=\S)(?:[^{}\r\n:()]|::?[\w-]+(?:\([^)\r\n]*\)|(?![\w-]))|\{[^}\r\n]+\})+)(?:(?:\r?\n|\r)(?:\1(?:(?=\S)(?:[^{}\r\n:()]|::?[\w-]+(?:\([^)\r\n]*\)|(?![\w-]))|\{[^}\r\n]+\})+)))*(?:,$|\{|(?=(?:\r?\n|\r)(?:\{|\1[ \t])))/m,lookbehind:!0,inside:{interpolation:a.interpolation,comment:a.comment,punctuation:/[{},]/}},func:a.func,string:a.string,comment:{pattern:/(^|[^\\])(?:\/\*[\s\S]*?\*\/|\/\/.*)/,lookbehind:!0,greedy:!0},interpolation:a.interpolation,punctuation:/[{}()\[\];:.]/}}(r),function(e){var t=e.util.clone(e.languages.typescript);e.languages.tsx=e.languages.extend("jsx",t),delete e.languages.tsx.parameter,delete e.languages.tsx["literal-property"];var n=e.languages.tsx.tag;n.pattern=RegExp(/(^|[^\w$]|(?=<\/))/.source+"(?:"+n.pattern.source+")",n.pattern.flags),n.lookbehind=!0}(r),r.languages.wasm={comment:[/\(;[\s\S]*?;\)/,{pattern:/;;.*/,greedy:!0}],string:{pattern:/"(?:\\[\s\S]|[^"\\])*"/,greedy:!0},keyword:[{pattern:/\b(?:align|offset)=/,inside:{operator:/=/}},{pattern:/\b(?:(?:f32|f64|i32|i64)(?:\.(?:abs|add|and|ceil|clz|const|convert_[su]\/i(?:32|64)|copysign|ctz|demote\/f64|div(?:_[su])?|eqz?|extend_[su]\/i32|floor|ge(?:_[su])?|gt(?:_[su])?|le(?:_[su])?|load(?:(?:8|16|32)_[su])?|lt(?:_[su])?|max|min|mul|neg?|nearest|or|popcnt|promote\/f32|reinterpret\/[fi](?:32|64)|rem_[su]|rot[lr]|shl|shr_[su]|sqrt|store(?:8|16|32)?|sub|trunc(?:_[su]\/f(?:32|64))?|wrap\/i64|xor))?|memory\.(?:grow|size))\b/,inside:{punctuation:/\./}},/\b(?:anyfunc|block|br(?:_if|_table)?|call(?:_indirect)?|data|drop|elem|else|end|export|func|get_(?:global|local)|global|if|import|local|loop|memory|module|mut|nop|offset|param|result|return|select|set_(?:global|local)|start|table|tee_local|then|type|unreachable)\b/],variable:/\$[\w!#$%&'*+\-./:<=>?@\\^`|~]+/,number:/[+-]?\b(?:\d(?:_?\d)*(?:\.\d(?:_?\d)*)?(?:[eE][+-]?\d(?:_?\d)*)?|0x[\da-fA-F](?:_?[\da-fA-F])*(?:\.[\da-fA-F](?:_?[\da-fA-D])*)?(?:[pP][+-]?\d(?:_?\d)*)?)\b|\binf\b|\bnan(?::0x[\da-fA-F](?:_?[\da-fA-D])*)?\b/,punctuation:/[()]/};const o=r},9901:e=>{e.exports&&(e.exports={core:{meta:{path:"components/prism-core.js",option:"mandatory"},core:"Core"},themes:{meta:{path:"themes/{id}.css",link:"index.html?theme={id}",exclusive:!0},prism:{title:"Default",option:"default"},"prism-dark":"Dark","prism-funky":"Funky","prism-okaidia":{title:"Okaidia",owner:"ocodia"},"prism-twilight":{title:"Twilight",owner:"remybach"},"prism-coy":{title:"Coy",owner:"tshedor"},"prism-solarizedlight":{title:"Solarized Light",owner:"hectormatos2011 "},"prism-tomorrow":{title:"Tomorrow Night",owner:"Rosey"}},languages:{meta:{path:"components/prism-{id}",noCSS:!0,examplesPath:"examples/prism-{id}",addCheckAll:!0},markup:{title:"Markup",alias:["html","xml","svg","mathml","ssml","atom","rss"],aliasTitles:{html:"HTML",xml:"XML",svg:"SVG",mathml:"MathML",ssml:"SSML",atom:"Atom",rss:"RSS"},option:"default"},css:{title:"CSS",option:"default",modify:"markup"},clike:{title:"C-like",option:"default"},javascript:{title:"JavaScript",require:"clike",modify:"markup",optional:"regex",alias:"js",option:"default"},abap:{title:"ABAP",owner:"dellagustin"},abnf:{title:"ABNF",owner:"RunDevelopment"},actionscript:{title:"ActionScript",require:"javascript",modify:"markup",owner:"Golmote"},ada:{title:"Ada",owner:"Lucretia"},agda:{title:"Agda",owner:"xy-ren"},al:{title:"AL",owner:"RunDevelopment"},antlr4:{title:"ANTLR4",alias:"g4",owner:"RunDevelopment"},apacheconf:{title:"Apache Configuration",owner:"GuiTeK"},apex:{title:"Apex",require:["clike","sql"],owner:"RunDevelopment"},apl:{title:"APL",owner:"ngn"},applescript:{title:"AppleScript",owner:"Golmote"},aql:{title:"AQL",owner:"RunDevelopment"},arduino:{title:"Arduino",require:"cpp",alias:"ino",owner:"dkern"},arff:{title:"ARFF",owner:"Golmote"},armasm:{title:"ARM Assembly",alias:"arm-asm",owner:"RunDevelopment"},arturo:{title:"Arturo",alias:"art",optional:["bash","css","javascript","markup","markdown","sql"],owner:"drkameleon"},asciidoc:{alias:"adoc",title:"AsciiDoc",owner:"Golmote"},aspnet:{title:"ASP.NET (C#)",require:["markup","csharp"],owner:"nauzilus"},asm6502:{title:"6502 Assembly",owner:"kzurawel"},asmatmel:{title:"Atmel AVR Assembly",owner:"cerkit"},autohotkey:{title:"AutoHotkey",owner:"aviaryan"},autoit:{title:"AutoIt",owner:"Golmote"},avisynth:{title:"AviSynth",alias:"avs",owner:"Zinfidel"},"avro-idl":{title:"Avro IDL",alias:"avdl",owner:"RunDevelopment"},awk:{title:"AWK",alias:"gawk",aliasTitles:{gawk:"GAWK"},owner:"RunDevelopment"},bash:{title:"Bash",alias:["sh","shell"],aliasTitles:{sh:"Shell",shell:"Shell"},owner:"zeitgeist87"},basic:{title:"BASIC",owner:"Golmote"},batch:{title:"Batch",owner:"Golmote"},bbcode:{title:"BBcode",alias:"shortcode",aliasTitles:{shortcode:"Shortcode"},owner:"RunDevelopment"},bbj:{title:"BBj",owner:"hyyan"},bicep:{title:"Bicep",owner:"johnnyreilly"},birb:{title:"Birb",require:"clike",owner:"Calamity210"},bison:{title:"Bison",require:"c",owner:"Golmote"},bnf:{title:"BNF",alias:"rbnf",aliasTitles:{rbnf:"RBNF"},owner:"RunDevelopment"},bqn:{title:"BQN",owner:"yewscion"},brainfuck:{title:"Brainfuck",owner:"Golmote"},brightscript:{title:"BrightScript",owner:"RunDevelopment"},bro:{title:"Bro",owner:"wayward710"},bsl:{title:"BSL (1C:Enterprise)",alias:"oscript",aliasTitles:{oscript:"OneScript"},owner:"Diversus23"},c:{title:"C",require:"clike",owner:"zeitgeist87"},csharp:{title:"C#",require:"clike",alias:["cs","dotnet"],owner:"mvalipour"},cpp:{title:"C++",require:"c",owner:"zeitgeist87"},cfscript:{title:"CFScript",require:"clike",alias:"cfc",owner:"mjclemente"},chaiscript:{title:"ChaiScript",require:["clike","cpp"],owner:"RunDevelopment"},cil:{title:"CIL",owner:"sbrl"},cilkc:{title:"Cilk/C",require:"c",alias:"cilk-c",owner:"OpenCilk"},cilkcpp:{title:"Cilk/C++",require:"cpp",alias:["cilk-cpp","cilk"],owner:"OpenCilk"},clojure:{title:"Clojure",owner:"troglotit"},cmake:{title:"CMake",owner:"mjrogozinski"},cobol:{title:"COBOL",owner:"RunDevelopment"},coffeescript:{title:"CoffeeScript",require:"javascript",alias:"coffee",owner:"R-osey"},concurnas:{title:"Concurnas",alias:"conc",owner:"jasontatton"},csp:{title:"Content-Security-Policy",owner:"ScottHelme"},cooklang:{title:"Cooklang",owner:"ahue"},coq:{title:"Coq",owner:"RunDevelopment"},crystal:{title:"Crystal",require:"ruby",owner:"MakeNowJust"},"css-extras":{title:"CSS Extras",require:"css",modify:"css",owner:"milesj"},csv:{title:"CSV",owner:"RunDevelopment"},cue:{title:"CUE",owner:"RunDevelopment"},cypher:{title:"Cypher",owner:"RunDevelopment"},d:{title:"D",require:"clike",owner:"Golmote"},dart:{title:"Dart",require:"clike",owner:"Golmote"},dataweave:{title:"DataWeave",owner:"machaval"},dax:{title:"DAX",owner:"peterbud"},dhall:{title:"Dhall",owner:"RunDevelopment"},diff:{title:"Diff",owner:"uranusjr"},django:{title:"Django/Jinja2",require:"markup-templating",alias:"jinja2",owner:"romanvm"},"dns-zone-file":{title:"DNS zone file",owner:"RunDevelopment",alias:"dns-zone"},docker:{title:"Docker",alias:"dockerfile",owner:"JustinBeckwith"},dot:{title:"DOT (Graphviz)",alias:"gv",optional:"markup",owner:"RunDevelopment"},ebnf:{title:"EBNF",owner:"RunDevelopment"},editorconfig:{title:"EditorConfig",owner:"osipxd"},eiffel:{title:"Eiffel",owner:"Conaclos"},ejs:{title:"EJS",require:["javascript","markup-templating"],owner:"RunDevelopment",alias:"eta",aliasTitles:{eta:"Eta"}},elixir:{title:"Elixir",owner:"Golmote"},elm:{title:"Elm",owner:"zwilias"},etlua:{title:"Embedded Lua templating",require:["lua","markup-templating"],owner:"RunDevelopment"},erb:{title:"ERB",require:["ruby","markup-templating"],owner:"Golmote"},erlang:{title:"Erlang",owner:"Golmote"},"excel-formula":{title:"Excel Formula",alias:["xlsx","xls"],owner:"RunDevelopment"},fsharp:{title:"F#",require:"clike",owner:"simonreynolds7"},factor:{title:"Factor",owner:"catb0t"},false:{title:"False",owner:"edukisto"},"firestore-security-rules":{title:"Firestore security rules",require:"clike",owner:"RunDevelopment"},flow:{title:"Flow",require:"javascript",owner:"Golmote"},fortran:{title:"Fortran",owner:"Golmote"},ftl:{title:"FreeMarker Template Language",require:"markup-templating",owner:"RunDevelopment"},gml:{title:"GameMaker Language",alias:"gamemakerlanguage",require:"clike",owner:"LiarOnce"},gap:{title:"GAP (CAS)",owner:"RunDevelopment"},gcode:{title:"G-code",owner:"RunDevelopment"},gdscript:{title:"GDScript",owner:"RunDevelopment"},gedcom:{title:"GEDCOM",owner:"Golmote"},gettext:{title:"gettext",alias:"po",owner:"RunDevelopment"},gherkin:{title:"Gherkin",owner:"hason"},git:{title:"Git",owner:"lgiraudel"},glsl:{title:"GLSL",require:"c",owner:"Golmote"},gn:{title:"GN",alias:"gni",owner:"RunDevelopment"},"linker-script":{title:"GNU Linker Script",alias:"ld",owner:"RunDevelopment"},go:{title:"Go",require:"clike",owner:"arnehormann"},"go-module":{title:"Go module",alias:"go-mod",owner:"RunDevelopment"},gradle:{title:"Gradle",require:"clike",owner:"zeabdelkhalek-badido18"},graphql:{title:"GraphQL",optional:"markdown",owner:"Golmote"},groovy:{title:"Groovy",require:"clike",owner:"robfletcher"},haml:{title:"Haml",require:"ruby",optional:["css","css-extras","coffeescript","erb","javascript","less","markdown","scss","textile"],owner:"Golmote"},handlebars:{title:"Handlebars",require:"markup-templating",alias:["hbs","mustache"],aliasTitles:{mustache:"Mustache"},owner:"Golmote"},haskell:{title:"Haskell",alias:"hs",owner:"bholst"},haxe:{title:"Haxe",require:"clike",optional:"regex",owner:"Golmote"},hcl:{title:"HCL",owner:"outsideris"},hlsl:{title:"HLSL",require:"c",owner:"RunDevelopment"},hoon:{title:"Hoon",owner:"matildepark"},http:{title:"HTTP",optional:["csp","css","hpkp","hsts","javascript","json","markup","uri"],owner:"danielgtaylor"},hpkp:{title:"HTTP Public-Key-Pins",owner:"ScottHelme"},hsts:{title:"HTTP Strict-Transport-Security",owner:"ScottHelme"},ichigojam:{title:"IchigoJam",owner:"BlueCocoa"},icon:{title:"Icon",owner:"Golmote"},"icu-message-format":{title:"ICU Message Format",owner:"RunDevelopment"},idris:{title:"Idris",alias:"idr",owner:"KeenS",require:"haskell"},ignore:{title:".ignore",owner:"osipxd",alias:["gitignore","hgignore","npmignore"],aliasTitles:{gitignore:".gitignore",hgignore:".hgignore",npmignore:".npmignore"}},inform7:{title:"Inform 7",owner:"Golmote"},ini:{title:"Ini",owner:"aviaryan"},io:{title:"Io",owner:"AlesTsurko"},j:{title:"J",owner:"Golmote"},java:{title:"Java",require:"clike",owner:"sherblot"},javadoc:{title:"JavaDoc",require:["markup","java","javadoclike"],modify:"java",optional:"scala",owner:"RunDevelopment"},javadoclike:{title:"JavaDoc-like",modify:["java","javascript","php"],owner:"RunDevelopment"},javastacktrace:{title:"Java stack trace",owner:"RunDevelopment"},jexl:{title:"Jexl",owner:"czosel"},jolie:{title:"Jolie",require:"clike",owner:"thesave"},jq:{title:"JQ",owner:"RunDevelopment"},jsdoc:{title:"JSDoc",require:["javascript","javadoclike","typescript"],modify:"javascript",optional:["actionscript","coffeescript"],owner:"RunDevelopment"},"js-extras":{title:"JS Extras",require:"javascript",modify:"javascript",optional:["actionscript","coffeescript","flow","n4js","typescript"],owner:"RunDevelopment"},json:{title:"JSON",alias:"webmanifest",aliasTitles:{webmanifest:"Web App Manifest"},owner:"CupOfTea696"},json5:{title:"JSON5",require:"json",owner:"RunDevelopment"},jsonp:{title:"JSONP",require:"json",owner:"RunDevelopment"},jsstacktrace:{title:"JS stack trace",owner:"sbrl"},"js-templates":{title:"JS Templates",require:"javascript",modify:"javascript",optional:["css","css-extras","graphql","markdown","markup","sql"],owner:"RunDevelopment"},julia:{title:"Julia",owner:"cdagnino"},keepalived:{title:"Keepalived Configure",owner:"dev-itsheng"},keyman:{title:"Keyman",owner:"mcdurdin"},kotlin:{title:"Kotlin",alias:["kt","kts"],aliasTitles:{kts:"Kotlin Script"},require:"clike",owner:"Golmote"},kumir:{title:"KuMir (\u041a\u0443\u041c\u0438\u0440)",alias:"kum",owner:"edukisto"},kusto:{title:"Kusto",owner:"RunDevelopment"},latex:{title:"LaTeX",alias:["tex","context"],aliasTitles:{tex:"TeX",context:"ConTeXt"},owner:"japborst"},latte:{title:"Latte",require:["clike","markup-templating","php"],owner:"nette"},less:{title:"Less",require:"css",optional:"css-extras",owner:"Golmote"},lilypond:{title:"LilyPond",require:"scheme",alias:"ly",owner:"RunDevelopment"},liquid:{title:"Liquid",require:"markup-templating",owner:"cinhtau"},lisp:{title:"Lisp",alias:["emacs","elisp","emacs-lisp"],owner:"JuanCaicedo"},livescript:{title:"LiveScript",owner:"Golmote"},llvm:{title:"LLVM IR",owner:"porglezomp"},log:{title:"Log file",optional:"javastacktrace",owner:"RunDevelopment"},lolcode:{title:"LOLCODE",owner:"Golmote"},lua:{title:"Lua",owner:"Golmote"},magma:{title:"Magma (CAS)",owner:"RunDevelopment"},makefile:{title:"Makefile",owner:"Golmote"},markdown:{title:"Markdown",require:"markup",optional:"yaml",alias:"md",owner:"Golmote"},"markup-templating":{title:"Markup templating",require:"markup",owner:"Golmote"},mata:{title:"Mata",owner:"RunDevelopment"},matlab:{title:"MATLAB",owner:"Golmote"},maxscript:{title:"MAXScript",owner:"RunDevelopment"},mel:{title:"MEL",owner:"Golmote"},mermaid:{title:"Mermaid",owner:"RunDevelopment"},metafont:{title:"METAFONT",owner:"LaeriExNihilo"},mizar:{title:"Mizar",owner:"Golmote"},mongodb:{title:"MongoDB",owner:"airs0urce",require:"javascript"},monkey:{title:"Monkey",owner:"Golmote"},moonscript:{title:"MoonScript",alias:"moon",owner:"RunDevelopment"},n1ql:{title:"N1QL",owner:"TMWilds"},n4js:{title:"N4JS",require:"javascript",optional:"jsdoc",alias:"n4jsd",owner:"bsmith-n4"},"nand2tetris-hdl":{title:"Nand To Tetris HDL",owner:"stephanmax"},naniscript:{title:"Naninovel Script",owner:"Elringus",alias:"nani"},nasm:{title:"NASM",owner:"rbmj"},neon:{title:"NEON",owner:"nette"},nevod:{title:"Nevod",owner:"nezaboodka"},nginx:{title:"nginx",owner:"volado"},nim:{title:"Nim",owner:"Golmote"},nix:{title:"Nix",owner:"Golmote"},nsis:{title:"NSIS",owner:"idleberg"},objectivec:{title:"Objective-C",require:"c",alias:"objc",owner:"uranusjr"},ocaml:{title:"OCaml",owner:"Golmote"},odin:{title:"Odin",owner:"edukisto"},opencl:{title:"OpenCL",require:"c",modify:["c","cpp"],owner:"Milania1"},openqasm:{title:"OpenQasm",alias:"qasm",owner:"RunDevelopment"},oz:{title:"Oz",owner:"Golmote"},parigp:{title:"PARI/GP",owner:"Golmote"},parser:{title:"Parser",require:"markup",owner:"Golmote"},pascal:{title:"Pascal",alias:"objectpascal",aliasTitles:{objectpascal:"Object Pascal"},owner:"Golmote"},pascaligo:{title:"Pascaligo",owner:"DefinitelyNotAGoat"},psl:{title:"PATROL Scripting Language",owner:"bertysentry"},pcaxis:{title:"PC-Axis",alias:"px",owner:"RunDevelopment"},peoplecode:{title:"PeopleCode",alias:"pcode",owner:"RunDevelopment"},perl:{title:"Perl",owner:"Golmote"},php:{title:"PHP",require:"markup-templating",owner:"milesj"},phpdoc:{title:"PHPDoc",require:["php","javadoclike"],modify:"php",owner:"RunDevelopment"},"php-extras":{title:"PHP Extras",require:"php",modify:"php",owner:"milesj"},"plant-uml":{title:"PlantUML",alias:"plantuml",owner:"RunDevelopment"},plsql:{title:"PL/SQL",require:"sql",owner:"Golmote"},powerquery:{title:"PowerQuery",alias:["pq","mscript"],owner:"peterbud"},powershell:{title:"PowerShell",owner:"nauzilus"},processing:{title:"Processing",require:"clike",owner:"Golmote"},prolog:{title:"Prolog",owner:"Golmote"},promql:{title:"PromQL",owner:"arendjr"},properties:{title:".properties",owner:"Golmote"},protobuf:{title:"Protocol Buffers",require:"clike",owner:"just-boris"},pug:{title:"Pug",require:["markup","javascript"],optional:["coffeescript","ejs","handlebars","less","livescript","markdown","scss","stylus","twig"],owner:"Golmote"},puppet:{title:"Puppet",owner:"Golmote"},pure:{title:"Pure",optional:["c","cpp","fortran"],owner:"Golmote"},purebasic:{title:"PureBasic",require:"clike",alias:"pbfasm",owner:"HeX0R101"},purescript:{title:"PureScript",require:"haskell",alias:"purs",owner:"sriharshachilakapati"},python:{title:"Python",alias:"py",owner:"multipetros"},qsharp:{title:"Q#",require:"clike",alias:"qs",owner:"fedonman"},q:{title:"Q (kdb+ database)",owner:"Golmote"},qml:{title:"QML",require:"javascript",owner:"RunDevelopment"},qore:{title:"Qore",require:"clike",owner:"temnroegg"},r:{title:"R",owner:"Golmote"},racket:{title:"Racket",require:"scheme",alias:"rkt",owner:"RunDevelopment"},cshtml:{title:"Razor C#",alias:"razor",require:["markup","csharp"],optional:["css","css-extras","javascript","js-extras"],owner:"RunDevelopment"},jsx:{title:"React JSX",require:["markup","javascript"],optional:["jsdoc","js-extras","js-templates"],owner:"vkbansal"},tsx:{title:"React TSX",require:["jsx","typescript"]},reason:{title:"Reason",require:"clike",owner:"Golmote"},regex:{title:"Regex",owner:"RunDevelopment"},rego:{title:"Rego",owner:"JordanSh"},renpy:{title:"Ren'py",alias:"rpy",owner:"HyuchiaDiego"},rescript:{title:"ReScript",alias:"res",owner:"vmarcosp"},rest:{title:"reST (reStructuredText)",owner:"Golmote"},rip:{title:"Rip",owner:"ravinggenius"},roboconf:{title:"Roboconf",owner:"Golmote"},robotframework:{title:"Robot Framework",alias:"robot",owner:"RunDevelopment"},ruby:{title:"Ruby",require:"clike",alias:"rb",owner:"samflores"},rust:{title:"Rust",owner:"Golmote"},sas:{title:"SAS",optional:["groovy","lua","sql"],owner:"Golmote"},sass:{title:"Sass (Sass)",require:"css",optional:"css-extras",owner:"Golmote"},scss:{title:"Sass (SCSS)",require:"css",optional:"css-extras",owner:"MoOx"},scala:{title:"Scala",require:"java",owner:"jozic"},scheme:{title:"Scheme",owner:"bacchus123"},"shell-session":{title:"Shell session",require:"bash",alias:["sh-session","shellsession"],owner:"RunDevelopment"},smali:{title:"Smali",owner:"RunDevelopment"},smalltalk:{title:"Smalltalk",owner:"Golmote"},smarty:{title:"Smarty",require:"markup-templating",optional:"php",owner:"Golmote"},sml:{title:"SML",alias:"smlnj",aliasTitles:{smlnj:"SML/NJ"},owner:"RunDevelopment"},solidity:{title:"Solidity (Ethereum)",alias:"sol",require:"clike",owner:"glachaud"},"solution-file":{title:"Solution file",alias:"sln",owner:"RunDevelopment"},soy:{title:"Soy (Closure Template)",require:"markup-templating",owner:"Golmote"},sparql:{title:"SPARQL",require:"turtle",owner:"Triply-Dev",alias:"rq"},"splunk-spl":{title:"Splunk SPL",owner:"RunDevelopment"},sqf:{title:"SQF: Status Quo Function (Arma 3)",require:"clike",owner:"RunDevelopment"},sql:{title:"SQL",owner:"multipetros"},squirrel:{title:"Squirrel",require:"clike",owner:"RunDevelopment"},stan:{title:"Stan",owner:"RunDevelopment"},stata:{title:"Stata Ado",require:["mata","java","python"],owner:"RunDevelopment"},iecst:{title:"Structured Text (IEC 61131-3)",owner:"serhioromano"},stylus:{title:"Stylus",owner:"vkbansal"},supercollider:{title:"SuperCollider",alias:"sclang",owner:"RunDevelopment"},swift:{title:"Swift",owner:"chrischares"},systemd:{title:"Systemd configuration file",owner:"RunDevelopment"},"t4-templating":{title:"T4 templating",owner:"RunDevelopment"},"t4-cs":{title:"T4 Text Templates (C#)",require:["t4-templating","csharp"],alias:"t4",owner:"RunDevelopment"},"t4-vb":{title:"T4 Text Templates (VB)",require:["t4-templating","vbnet"],owner:"RunDevelopment"},tap:{title:"TAP",owner:"isaacs",require:"yaml"},tcl:{title:"Tcl",owner:"PeterChaplin"},tt2:{title:"Template Toolkit 2",require:["clike","markup-templating"],owner:"gflohr"},textile:{title:"Textile",require:"markup",optional:"css",owner:"Golmote"},toml:{title:"TOML",owner:"RunDevelopment"},tremor:{title:"Tremor",alias:["trickle","troy"],owner:"darach",aliasTitles:{trickle:"trickle",troy:"troy"}},turtle:{title:"Turtle",alias:"trig",aliasTitles:{trig:"TriG"},owner:"jakubklimek"},twig:{title:"Twig",require:"markup-templating",owner:"brandonkelly"},typescript:{title:"TypeScript",require:"javascript",optional:"js-templates",alias:"ts",owner:"vkbansal"},typoscript:{title:"TypoScript",alias:"tsconfig",aliasTitles:{tsconfig:"TSConfig"},owner:"dkern"},unrealscript:{title:"UnrealScript",alias:["uscript","uc"],owner:"RunDevelopment"},uorazor:{title:"UO Razor Script",owner:"jaseowns"},uri:{title:"URI",alias:"url",aliasTitles:{url:"URL"},owner:"RunDevelopment"},v:{title:"V",require:"clike",owner:"taggon"},vala:{title:"Vala",require:"clike",optional:"regex",owner:"TemplarVolk"},vbnet:{title:"VB.Net",require:"basic",owner:"Bigsby"},velocity:{title:"Velocity",require:"markup",owner:"Golmote"},verilog:{title:"Verilog",owner:"a-rey"},vhdl:{title:"VHDL",owner:"a-rey"},vim:{title:"vim",owner:"westonganger"},"visual-basic":{title:"Visual Basic",alias:["vb","vba"],aliasTitles:{vba:"VBA"},owner:"Golmote"},warpscript:{title:"WarpScript",owner:"RunDevelopment"},wasm:{title:"WebAssembly",owner:"Golmote"},"web-idl":{title:"Web IDL",alias:"webidl",owner:"RunDevelopment"},wgsl:{title:"WGSL",owner:"Dr4gonthree"},wiki:{title:"Wiki markup",require:"markup",owner:"Golmote"},wolfram:{title:"Wolfram language",alias:["mathematica","nb","wl"],aliasTitles:{mathematica:"Mathematica",nb:"Mathematica Notebook"},owner:"msollami"},wren:{title:"Wren",owner:"clsource"},xeora:{title:"Xeora",require:"markup",alias:"xeoracube",aliasTitles:{xeoracube:"XeoraCube"},owner:"freakmaxi"},"xml-doc":{title:"XML doc (.net)",require:"markup",modify:["csharp","fsharp","vbnet"],owner:"RunDevelopment"},xojo:{title:"Xojo (REALbasic)",owner:"Golmote"},xquery:{title:"XQuery",require:"markup",owner:"Golmote"},yaml:{title:"YAML",alias:"yml",owner:"hason"},yang:{title:"YANG",owner:"RunDevelopment"},zig:{title:"Zig",owner:"RunDevelopment"}},plugins:{meta:{path:"plugins/{id}/prism-{id}",link:"plugins/{id}/"},"line-highlight":{title:"Line Highlight",description:"Highlights specific lines and/or line ranges."},"line-numbers":{title:"Line Numbers",description:"Line number at the beginning of code lines.",owner:"kuba-kubula"},"show-invisibles":{title:"Show Invisibles",description:"Show hidden characters such as tabs and line breaks.",optional:["autolinker","data-uri-highlight"]},autolinker:{title:"Autolinker",description:"Converts URLs and emails in code to clickable links. Parses Markdown links in comments."},wpd:{title:"WebPlatform Docs",description:'Makes tokens link to <a href="https://webplatform.github.io/docs/">WebPlatform.org documentation</a>. The links open in a new tab.'},"custom-class":{title:"Custom Class",description:"This plugin allows you to prefix Prism's default classes (<code>.comment</code> can become <code>.namespace--comment</code>) or replace them with your defined ones (like <code>.editor__comment</code>). You can even add new classes.",owner:"dvkndn",noCSS:!0},"file-highlight":{title:"File Highlight",description:"Fetch external files and highlight them with Prism. Used on the Prism website itself.",noCSS:!0},"show-language":{title:"Show Language",description:"Display the highlighted language in code blocks (inline code does not show the label).",owner:"nauzilus",noCSS:!0,require:"toolbar"},"jsonp-highlight":{title:"JSONP Highlight",description:"Fetch content with JSONP and highlight some interesting content (e.g. GitHub/Gists or Bitbucket API).",noCSS:!0,owner:"nauzilus"},"highlight-keywords":{title:"Highlight Keywords",description:"Adds special CSS classes for each keyword for fine-grained highlighting.",owner:"vkbansal",noCSS:!0},"remove-initial-line-feed":{title:"Remove initial line feed",description:"Removes the initial line feed in code blocks.",owner:"Golmote",noCSS:!0},"inline-color":{title:"Inline color",description:"Adds a small inline preview for colors in style sheets.",require:"css-extras",owner:"RunDevelopment"},previewers:{title:"Previewers",description:"Previewers for angles, colors, gradients, easing and time.",require:"css-extras",owner:"Golmote"},autoloader:{title:"Autoloader",description:"Automatically loads the needed languages to highlight the code blocks.",owner:"Golmote",noCSS:!0},"keep-markup":{title:"Keep Markup",description:"Prevents custom markup from being dropped out during highlighting.",owner:"Golmote",optional:"normalize-whitespace",noCSS:!0},"command-line":{title:"Command Line",description:"Display a command line with a prompt and, optionally, the output/response from the commands.",owner:"chriswells0"},"unescaped-markup":{title:"Unescaped Markup",description:"Write markup without having to escape anything."},"normalize-whitespace":{title:"Normalize Whitespace",description:"Supports multiple operations to normalize whitespace in code blocks.",owner:"zeitgeist87",optional:"unescaped-markup",noCSS:!0},"data-uri-highlight":{title:"Data-URI Highlight",description:"Highlights data-URI contents.",owner:"Golmote",noCSS:!0},toolbar:{title:"Toolbar",description:"Attach a toolbar for plugins to easily register buttons on the top of a code block.",owner:"mAAdhaTTah"},"copy-to-clipboard":{title:"Copy to Clipboard Button",description:"Add a button that copies the code block to the clipboard when clicked.",owner:"mAAdhaTTah",require:"toolbar",noCSS:!0},"download-button":{title:"Download Button",description:"A button in the toolbar of a code block adding a convenient way to download a code file.",owner:"Golmote",require:"toolbar",noCSS:!0},"match-braces":{title:"Match braces",description:"Highlights matching braces.",owner:"RunDevelopment"},"diff-highlight":{title:"Diff Highlight",description:"Highlights the code inside diff blocks.",owner:"RunDevelopment",require:"diff"},"filter-highlight-all":{title:"Filter highlightAll",description:"Filters the elements the <code>highlightAll</code> and <code>highlightAllUnder</code> methods actually highlight.",owner:"RunDevelopment",noCSS:!0},treeview:{title:"Treeview",description:"A language with special styles to highlight file system tree structures.",owner:"Golmote"}}})},2885:(e,t,n)=>{const a=n(9901),r=n(9642),o=new Set;function i(e){void 0===e?e=Object.keys(a.languages).filter((e=>"meta"!=e)):Array.isArray(e)||(e=[e]);const t=[...o,...Object.keys(Prism.languages)];r(a,e,t).load((e=>{if(!(e in a.languages))return void(i.silent||console.warn("Language does not exist: "+e));const t="./prism-"+e;delete n.c[n(6500).resolve(t)],delete Prism.languages[e],n(6500)(t),o.add(e)}))}i.silent=!1,e.exports=i},6726:(e,t,n)=>{var a={"./":2885};function r(e){var t=o(e);return n(t)}function o(e){if(!n.o(a,e)){var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}return a[e]}r.keys=function(){return Object.keys(a)},r.resolve=o,e.exports=r,r.id=6726},6500:(e,t,n)=>{var a={"./":2885};function r(e){var t=o(e);return n(t)}function o(e){if(!n.o(a,e)){var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}return a[e]}r.keys=function(){return Object.keys(a)},r.resolve=o,e.exports=r,r.id=6500},9642:e=>{"use strict";var t=function(){var e=function(){};function t(e,t){Array.isArray(e)?e.forEach(t):null!=e&&t(e,0)}function n(e){for(var t={},n=0,a=e.length;n<a;n++)t[e[n]]=!0;return t}function a(e){var n={},a=[];function r(a,o){if(!(a in n)){o.push(a);var i=o.indexOf(a);if(i<o.length-1)throw new Error("Circular dependency: "+o.slice(i).join(" -> "));var s={},l=e[a];if(l){function c(t){if(!(t in e))throw new Error(a+" depends on an unknown component "+t);if(!(t in s))for(var i in r(t,o),s[t]=!0,n[t])s[i]=!0}t(l.require,c),t(l.optional,c),t(l.modify,c)}n[a]=s,o.pop()}}return function(e){var t=n[e];return t||(r(e,a),t=n[e]),t}}function r(e){for(var t in e)return!0;return!1}return function(o,i,s){var l=function(e){var t={};for(var n in e){var a=e[n];for(var r in a)if("meta"!=r){var o=a[r];t[r]="string"==typeof o?{title:o}:o}}return t}(o),c=function(e){var n;return function(a){if(a in e)return a;if(!n)for(var r in n={},e){var o=e[r];t(o&&o.alias,(function(t){if(t in n)throw new Error(t+" cannot be alias for both "+r+" and "+n[t]);if(t in e)throw new Error(t+" cannot be alias of "+r+" because it is a component.");n[t]=r}))}return n[a]||a}}(l);i=i.map(c),s=(s||[]).map(c);var u=n(i),d=n(s);i.forEach((function e(n){var a=l[n];t(a&&a.require,(function(t){t in d||(u[t]=!0,e(t))}))}));for(var f,p=a(l),m=u;r(m);){for(var h in f={},m){var g=l[h];t(g&&g.modify,(function(e){e in d&&(f[e]=!0)}))}for(var _ in d)if(!(_ in u))for(var b in p(_))if(b in u){f[_]=!0;break}for(var k in m=f)u[k]=!0}var v={getIds:function(){var e=[];return v.load((function(t){e.push(t)})),e},load:function(t,n){return function(t,n,a,r){var o=r?r.series:void 0,i=r?r.parallel:e,s={},l={};function c(e){if(e in s)return s[e];l[e]=!0;var r,u=[];for(var d in t(e))d in n&&u.push(d);if(0===u.length)r=a(e);else{var f=i(u.map((function(e){var t=c(e);return delete l[e],t})));o?r=o(f,(function(){return a(e)})):a(e)}return s[e]=r}for(var u in n)c(u);var d=[];for(var f in l)d.push(s[f]);return i(d)}(p,u,t,n)}};return v}}();e.exports=t},2703:(e,t,n)=>{"use strict";var a=n(414);function r(){}function o(){}o.resetWarningCache=r,e.exports=function(){function e(e,t,n,r,o,i){if(i!==a){var s=new Error("Calling PropTypes validators directly is not supported by the `prop-types` package. Use PropTypes.checkPropTypes() to call them. Read more at http://fb.me/use-check-prop-types");throw s.name="Invariant Violation",s}}function t(){return e}e.isRequired=e;var n={array:e,bigint:e,bool:e,func:e,number:e,object:e,string:e,symbol:e,any:e,arrayOf:t,element:e,elementType:e,instanceOf:t,node:e,objectOf:t,oneOf:t,oneOfType:t,shape:t,exact:t,checkPropTypes:o,resetWarningCache:r};return n.PropTypes=n,n}},5697:(e,t,n)=>{e.exports=n(2703)()},414:e=>{"use strict";e.exports="SECRET_DO_NOT_PASS_THIS_OR_YOU_WILL_BE_FIRED"},4448:(e,t,n)=>{"use strict";var a=n(7294),r=n(7418),o=n(3840);function i(e){for(var t="https://reactjs.org/docs/error-decoder.html?invariant="+e,n=1;n<arguments.length;n++)t+="&args[]="+encodeURIComponent(arguments[n]);return"Minified React error #"+e+"; visit "+t+" for the full message or use the non-minified dev environment for full errors and additional helpful warnings."}if(!a)throw Error(i(227));var s=new Set,l={};function c(e,t){u(e,t),u(e+"Capture",t)}function u(e,t){for(l[e]=t,e=0;e<t.length;e++)s.add(t[e])}var d=!("undefined"==typeof window||void 0===window.document||void 0===window.document.createElement),f=/^[:A-Z_a-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD][:A-Z_a-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\-.0-9\u00B7\u0300-\u036F\u203F-\u2040]*$/,p=Object.prototype.hasOwnProperty,m={},h={};function g(e,t,n,a,r,o,i){this.acceptsBooleans=2===t||3===t||4===t,this.attributeName=a,this.attributeNamespace=r,this.mustUseProperty=n,this.propertyName=e,this.type=t,this.sanitizeURL=o,this.removeEmptyString=i}var _={};"children dangerouslySetInnerHTML defaultValue defaultChecked innerHTML suppressContentEditableWarning suppressHydrationWarning style".split(" ").forEach((function(e){_[e]=new g(e,0,!1,e,null,!1,!1)})),[["acceptCharset","accept-charset"],["className","class"],["htmlFor","for"],["httpEquiv","http-equiv"]].forEach((function(e){var t=e[0];_[t]=new g(t,1,!1,e[1],null,!1,!1)})),["contentEditable","draggable","spellCheck","value"].forEach((function(e){_[e]=new g(e,2,!1,e.toLowerCase(),null,!1,!1)})),["autoReverse","externalResourcesRequired","focusable","preserveAlpha"].forEach((function(e){_[e]=new g(e,2,!1,e,null,!1,!1)})),"allowFullScreen async autoFocus autoPlay controls default defer disabled disablePictureInPicture disableRemotePlayback formNoValidate hidden loop noModule noValidate open playsInline readOnly required reversed scoped seamless itemScope".split(" ").forEach((function(e){_[e]=new g(e,3,!1,e.toLowerCase(),null,!1,!1)})),["checked","multiple","muted","selected"].forEach((function(e){_[e]=new g(e,3,!0,e,null,!1,!1)})),["capture","download"].forEach((function(e){_[e]=new g(e,4,!1,e,null,!1,!1)})),["cols","rows","size","span"].forEach((function(e){_[e]=new g(e,6,!1,e,null,!1,!1)})),["rowSpan","start"].forEach((function(e){_[e]=new g(e,5,!1,e.toLowerCase(),null,!1,!1)}));var b=/[\-:]([a-z])/g;function k(e){return e[1].toUpperCase()}function v(e,t,n,a){var r=_.hasOwnProperty(t)?_[t]:null;(null!==r?0===r.type:!a&&(2<t.length&&("o"===t[0]||"O"===t[0])&&("n"===t[1]||"N"===t[1])))||(function(e,t,n,a){if(null==t||function(e,t,n,a){if(null!==n&&0===n.type)return!1;switch(typeof t){case"function":case"symbol":return!0;case"boolean":return!a&&(null!==n?!n.acceptsBooleans:"data-"!==(e=e.toLowerCase().slice(0,5))&&"aria-"!==e);default:return!1}}(e,t,n,a))return!0;if(a)return!1;if(null!==n)switch(n.type){case 3:return!t;case 4:return!1===t;case 5:return isNaN(t);case 6:return isNaN(t)||1>t}return!1}(t,n,r,a)&&(n=null),a||null===r?function(e){return!!p.call(h,e)||!p.call(m,e)&&(f.test(e)?h[e]=!0:(m[e]=!0,!1))}(t)&&(null===n?e.removeAttribute(t):e.setAttribute(t,""+n)):r.mustUseProperty?e[r.propertyName]=null===n?3!==r.type&&"":n:(t=r.attributeName,a=r.attributeNamespace,null===n?e.removeAttribute(t):(n=3===(r=r.type)||4===r&&!0===n?"":""+n,a?e.setAttributeNS(a,t,n):e.setAttribute(t,n))))}"accent-height alignment-baseline arabic-form baseline-shift cap-height clip-path clip-rule color-interpolation color-interpolation-filters color-profile color-rendering dominant-baseline enable-background fill-opacity fill-rule flood-color flood-opacity font-family font-size font-size-adjust font-stretch font-style font-variant font-weight glyph-name glyph-orientation-horizontal glyph-orientation-vertical horiz-adv-x horiz-origin-x image-rendering letter-spacing lighting-color marker-end marker-mid marker-start overline-position overline-thickness paint-order panose-1 pointer-events rendering-intent shape-rendering stop-color stop-opacity strikethrough-position strikethrough-thickness stroke-dasharray stroke-dashoffset stroke-linecap stroke-linejoin stroke-miterlimit stroke-opacity stroke-width text-anchor text-decoration text-rendering underline-position underline-thickness unicode-bidi unicode-range units-per-em v-alphabetic v-hanging v-ideographic v-mathematical vector-effect vert-adv-y vert-origin-x vert-origin-y word-spacing writing-mode xmlns:xlink x-height".split(" ").forEach((function(e){var t=e.replace(b,k);_[t]=new g(t,1,!1,e,null,!1,!1)})),"xlink:actuate xlink:arcrole xlink:role xlink:show xlink:title xlink:type".split(" ").forEach((function(e){var t=e.replace(b,k);_[t]=new g(t,1,!1,e,"http://www.w3.org/1999/xlink",!1,!1)})),["xml:base","xml:lang","xml:space"].forEach((function(e){var t=e.replace(b,k);_[t]=new g(t,1,!1,e,"http://www.w3.org/XML/1998/namespace",!1,!1)})),["tabIndex","crossOrigin"].forEach((function(e){_[e]=new g(e,1,!1,e.toLowerCase(),null,!1,!1)})),_.xlinkHref=new g("xlinkHref",1,!1,"xlink:href","http://www.w3.org/1999/xlink",!0,!1),["src","href","action","formAction"].forEach((function(e){_[e]=new g(e,1,!1,e.toLowerCase(),null,!0,!0)}));var y=a.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED,w=60103,S=60106,E=60107,x=60108,C=60114,T=60109,A=60110,G=60112,L=60113,N=60120,R=60115,P=60116,I=60121,O=60128,B=60129,D=60130,M=60131;if("function"==typeof Symbol&&Symbol.for){var F=Symbol.for;w=F("react.element"),S=F("react.portal"),E=F("react.fragment"),x=F("react.strict_mode"),C=F("react.profiler"),T=F("react.provider"),A=F("react.context"),G=F("react.forward_ref"),L=F("react.suspense"),N=F("react.suspense_list"),R=F("react.memo"),P=F("react.lazy"),I=F("react.block"),F("react.scope"),O=F("react.opaque.id"),B=F("react.debug_trace_mode"),D=F("react.offscreen"),M=F("react.legacy_hidden")}var U,j="function"==typeof Symbol&&Symbol.iterator;function z(e){return null===e||"object"!=typeof e?null:"function"==typeof(e=j&&e[j]||e["@@iterator"])?e:null}function K(e){if(void 0===U)try{throw Error()}catch(n){var t=n.stack.trim().match(/\n( *(at )?)/);U=t&&t[1]||""}return"\n"+U+e}var $=!1;function H(e,t){if(!e||$)return"";$=!0;var n=Error.prepareStackTrace;Error.prepareStackTrace=void 0;try{if(t)if(t=function(){throw Error()},Object.defineProperty(t.prototype,"props",{set:function(){throw Error()}}),"object"==typeof Reflect&&Reflect.construct){try{Reflect.construct(t,[])}catch(l){var a=l}Reflect.construct(e,[],t)}else{try{t.call()}catch(l){a=l}e.call(t.prototype)}else{try{throw Error()}catch(l){a=l}e()}}catch(l){if(l&&a&&"string"==typeof l.stack){for(var r=l.stack.split("\n"),o=a.stack.split("\n"),i=r.length-1,s=o.length-1;1<=i&&0<=s&&r[i]!==o[s];)s--;for(;1<=i&&0<=s;i--,s--)if(r[i]!==o[s]){if(1!==i||1!==s)do{if(i--,0>--s||r[i]!==o[s])return"\n"+r[i].replace(" at new "," at ")}while(1<=i&&0<=s);break}}}finally{$=!1,Error.prepareStackTrace=n}return(e=e?e.displayName||e.name:"")?K(e):""}function q(e){switch(e.tag){case 5:return K(e.type);case 16:return K("Lazy");case 13:return K("Suspense");case 19:return K("SuspenseList");case 0:case 2:case 15:return e=H(e.type,!1);case 11:return e=H(e.type.render,!1);case 22:return e=H(e.type._render,!1);case 1:return e=H(e.type,!0);default:return""}}function Z(e){if(null==e)return null;if("function"==typeof e)return e.displayName||e.name||null;if("string"==typeof e)return e;switch(e){case E:return"Fragment";case S:return"Portal";case C:return"Profiler";case x:return"StrictMode";case L:return"Suspense";case N:return"SuspenseList"}if("object"==typeof e)switch(e.$$typeof){case A:return(e.displayName||"Context")+".Consumer";case T:return(e._context.displayName||"Context")+".Provider";case G:var t=e.render;return t=t.displayName||t.name||"",e.displayName||(""!==t?"ForwardRef("+t+")":"ForwardRef");case R:return Z(e.type);case I:return Z(e._render);case P:t=e._payload,e=e._init;try{return Z(e(t))}catch(n){}}return null}function W(e){switch(typeof e){case"boolean":case"number":case"object":case"string":case"undefined":return e;default:return""}}function V(e){var t=e.type;return(e=e.nodeName)&&"input"===e.toLowerCase()&&("checkbox"===t||"radio"===t)}function Y(e){e._valueTracker||(e._valueTracker=function(e){var t=V(e)?"checked":"value",n=Object.getOwnPropertyDescriptor(e.constructor.prototype,t),a=""+e[t];if(!e.hasOwnProperty(t)&&void 0!==n&&"function"==typeof n.get&&"function"==typeof n.set){var r=n.get,o=n.set;return Object.defineProperty(e,t,{configurable:!0,get:function(){return r.call(this)},set:function(e){a=""+e,o.call(this,e)}}),Object.defineProperty(e,t,{enumerable:n.enumerable}),{getValue:function(){return a},setValue:function(e){a=""+e},stopTracking:function(){e._valueTracker=null,delete e[t]}}}}(e))}function Q(e){if(!e)return!1;var t=e._valueTracker;if(!t)return!0;var n=t.getValue(),a="";return e&&(a=V(e)?e.checked?"true":"false":e.value),(e=a)!==n&&(t.setValue(e),!0)}function X(e){if(void 0===(e=e||("undefined"!=typeof document?document:void 0)))return null;try{return e.activeElement||e.body}catch(t){return e.body}}function J(e,t){var n=t.checked;return r({},t,{defaultChecked:void 0,defaultValue:void 0,value:void 0,checked:null!=n?n:e._wrapperState.initialChecked})}function ee(e,t){var n=null==t.defaultValue?"":t.defaultValue,a=null!=t.checked?t.checked:t.defaultChecked;n=W(null!=t.value?t.value:n),e._wrapperState={initialChecked:a,initialValue:n,controlled:"checkbox"===t.type||"radio"===t.type?null!=t.checked:null!=t.value}}function te(e,t){null!=(t=t.checked)&&v(e,"checked",t,!1)}function ne(e,t){te(e,t);var n=W(t.value),a=t.type;if(null!=n)"number"===a?(0===n&&""===e.value||e.value!=n)&&(e.value=""+n):e.value!==""+n&&(e.value=""+n);else if("submit"===a||"reset"===a)return void e.removeAttribute("value");t.hasOwnProperty("value")?re(e,t.type,n):t.hasOwnProperty("defaultValue")&&re(e,t.type,W(t.defaultValue)),null==t.checked&&null!=t.defaultChecked&&(e.defaultChecked=!!t.defaultChecked)}function ae(e,t,n){if(t.hasOwnProperty("value")||t.hasOwnProperty("defaultValue")){var a=t.type;if(!("submit"!==a&&"reset"!==a||void 0!==t.value&&null!==t.value))return;t=""+e._wrapperState.initialValue,n||t===e.value||(e.value=t),e.defaultValue=t}""!==(n=e.name)&&(e.name=""),e.defaultChecked=!!e._wrapperState.initialChecked,""!==n&&(e.name=n)}function re(e,t,n){"number"===t&&X(e.ownerDocument)===e||(null==n?e.defaultValue=""+e._wrapperState.initialValue:e.defaultValue!==""+n&&(e.defaultValue=""+n))}function oe(e,t){return e=r({children:void 0},t),(t=function(e){var t="";return a.Children.forEach(e,(function(e){null!=e&&(t+=e)})),t}(t.children))&&(e.children=t),e}function ie(e,t,n,a){if(e=e.options,t){t={};for(var r=0;r<n.length;r++)t["$"+n[r]]=!0;for(n=0;n<e.length;n++)r=t.hasOwnProperty("$"+e[n].value),e[n].selected!==r&&(e[n].selected=r),r&&a&&(e[n].defaultSelected=!0)}else{for(n=""+W(n),t=null,r=0;r<e.length;r++){if(e[r].value===n)return e[r].selected=!0,void(a&&(e[r].defaultSelected=!0));null!==t||e[r].disabled||(t=e[r])}null!==t&&(t.selected=!0)}}function se(e,t){if(null!=t.dangerouslySetInnerHTML)throw Error(i(91));return r({},t,{value:void 0,defaultValue:void 0,children:""+e._wrapperState.initialValue})}function le(e,t){var n=t.value;if(null==n){if(n=t.children,t=t.defaultValue,null!=n){if(null!=t)throw Error(i(92));if(Array.isArray(n)){if(!(1>=n.length))throw Error(i(93));n=n[0]}t=n}null==t&&(t=""),n=t}e._wrapperState={initialValue:W(n)}}function ce(e,t){var n=W(t.value),a=W(t.defaultValue);null!=n&&((n=""+n)!==e.value&&(e.value=n),null==t.defaultValue&&e.defaultValue!==n&&(e.defaultValue=n)),null!=a&&(e.defaultValue=""+a)}function ue(e){var t=e.textContent;t===e._wrapperState.initialValue&&""!==t&&null!==t&&(e.value=t)}var de={html:"http://www.w3.org/1999/xhtml",mathml:"http://www.w3.org/1998/Math/MathML",svg:"http://www.w3.org/2000/svg"};function fe(e){switch(e){case"svg":return"http://www.w3.org/2000/svg";case"math":return"http://www.w3.org/1998/Math/MathML";default:return"http://www.w3.org/1999/xhtml"}}function pe(e,t){return null==e||"http://www.w3.org/1999/xhtml"===e?fe(t):"http://www.w3.org/2000/svg"===e&&"foreignObject"===t?"http://www.w3.org/1999/xhtml":e}var me,he,ge=(he=function(e,t){if(e.namespaceURI!==de.svg||"innerHTML"in e)e.innerHTML=t;else{for((me=me||document.createElement("div")).innerHTML="<svg>"+t.valueOf().toString()+"</svg>",t=me.firstChild;e.firstChild;)e.removeChild(e.firstChild);for(;t.firstChild;)e.appendChild(t.firstChild)}},"undefined"!=typeof MSApp&&MSApp.execUnsafeLocalFunction?function(e,t,n,a){MSApp.execUnsafeLocalFunction((function(){return he(e,t)}))}:he);function _e(e,t){if(t){var n=e.firstChild;if(n&&n===e.lastChild&&3===n.nodeType)return void(n.nodeValue=t)}e.textContent=t}var be={animationIterationCount:!0,borderImageOutset:!0,borderImageSlice:!0,borderImageWidth:!0,boxFlex:!0,boxFlexGroup:!0,boxOrdinalGroup:!0,columnCount:!0,columns:!0,flex:!0,flexGrow:!0,flexPositive:!0,flexShrink:!0,flexNegative:!0,flexOrder:!0,gridArea:!0,gridRow:!0,gridRowEnd:!0,gridRowSpan:!0,gridRowStart:!0,gridColumn:!0,gridColumnEnd:!0,gridColumnSpan:!0,gridColumnStart:!0,fontWeight:!0,lineClamp:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,tabSize:!0,widows:!0,zIndex:!0,zoom:!0,fillOpacity:!0,floodOpacity:!0,stopOpacity:!0,strokeDasharray:!0,strokeDashoffset:!0,strokeMiterlimit:!0,strokeOpacity:!0,strokeWidth:!0},ke=["Webkit","ms","Moz","O"];function ve(e,t,n){return null==t||"boolean"==typeof t||""===t?"":n||"number"!=typeof t||0===t||be.hasOwnProperty(e)&&be[e]?(""+t).trim():t+"px"}function ye(e,t){for(var n in e=e.style,t)if(t.hasOwnProperty(n)){var a=0===n.indexOf("--"),r=ve(n,t[n],a);"float"===n&&(n="cssFloat"),a?e.setProperty(n,r):e[n]=r}}Object.keys(be).forEach((function(e){ke.forEach((function(t){t=t+e.charAt(0).toUpperCase()+e.substring(1),be[t]=be[e]}))}));var we=r({menuitem:!0},{area:!0,base:!0,br:!0,col:!0,embed:!0,hr:!0,img:!0,input:!0,keygen:!0,link:!0,meta:!0,param:!0,source:!0,track:!0,wbr:!0});function Se(e,t){if(t){if(we[e]&&(null!=t.children||null!=t.dangerouslySetInnerHTML))throw Error(i(137,e));if(null!=t.dangerouslySetInnerHTML){if(null!=t.children)throw Error(i(60));if("object"!=typeof t.dangerouslySetInnerHTML||!("__html"in t.dangerouslySetInnerHTML))throw Error(i(61))}if(null!=t.style&&"object"!=typeof t.style)throw Error(i(62))}}function Ee(e,t){if(-1===e.indexOf("-"))return"string"==typeof t.is;switch(e){case"annotation-xml":case"color-profile":case"font-face":case"font-face-src":case"font-face-uri":case"font-face-format":case"font-face-name":case"missing-glyph":return!1;default:return!0}}function xe(e){return(e=e.target||e.srcElement||window).correspondingUseElement&&(e=e.correspondingUseElement),3===e.nodeType?e.parentNode:e}var Ce=null,Te=null,Ae=null;function Ge(e){if(e=nr(e)){if("function"!=typeof Ce)throw Error(i(280));var t=e.stateNode;t&&(t=rr(t),Ce(e.stateNode,e.type,t))}}function Le(e){Te?Ae?Ae.push(e):Ae=[e]:Te=e}function Ne(){if(Te){var e=Te,t=Ae;if(Ae=Te=null,Ge(e),t)for(e=0;e<t.length;e++)Ge(t[e])}}function Re(e,t){return e(t)}function Pe(e,t,n,a,r){return e(t,n,a,r)}function Ie(){}var Oe=Re,Be=!1,De=!1;function Me(){null===Te&&null===Ae||(Ie(),Ne())}function Fe(e,t){var n=e.stateNode;if(null===n)return null;var a=rr(n);if(null===a)return null;n=a[t];e:switch(t){case"onClick":case"onClickCapture":case"onDoubleClick":case"onDoubleClickCapture":case"onMouseDown":case"onMouseDownCapture":case"onMouseMove":case"onMouseMoveCapture":case"onMouseUp":case"onMouseUpCapture":case"onMouseEnter":(a=!a.disabled)||(a=!("button"===(e=e.type)||"input"===e||"select"===e||"textarea"===e)),e=!a;break e;default:e=!1}if(e)return null;if(n&&"function"!=typeof n)throw Error(i(231,t,typeof n));return n}var Ue=!1;if(d)try{var je={};Object.defineProperty(je,"passive",{get:function(){Ue=!0}}),window.addEventListener("test",je,je),window.removeEventListener("test",je,je)}catch(he){Ue=!1}function ze(e,t,n,a,r,o,i,s,l){var c=Array.prototype.slice.call(arguments,3);try{t.apply(n,c)}catch(u){this.onError(u)}}var Ke=!1,$e=null,He=!1,qe=null,Ze={onError:function(e){Ke=!0,$e=e}};function We(e,t,n,a,r,o,i,s,l){Ke=!1,$e=null,ze.apply(Ze,arguments)}function Ve(e){var t=e,n=e;if(e.alternate)for(;t.return;)t=t.return;else{e=t;do{0!=(1026&(t=e).flags)&&(n=t.return),e=t.return}while(e)}return 3===t.tag?n:null}function Ye(e){if(13===e.tag){var t=e.memoizedState;if(null===t&&(null!==(e=e.alternate)&&(t=e.memoizedState)),null!==t)return t.dehydrated}return null}function Qe(e){if(Ve(e)!==e)throw Error(i(188))}function Xe(e){if(e=function(e){var t=e.alternate;if(!t){if(null===(t=Ve(e)))throw Error(i(188));return t!==e?null:e}for(var n=e,a=t;;){var r=n.return;if(null===r)break;var o=r.alternate;if(null===o){if(null!==(a=r.return)){n=a;continue}break}if(r.child===o.child){for(o=r.child;o;){if(o===n)return Qe(r),e;if(o===a)return Qe(r),t;o=o.sibling}throw Error(i(188))}if(n.return!==a.return)n=r,a=o;else{for(var s=!1,l=r.child;l;){if(l===n){s=!0,n=r,a=o;break}if(l===a){s=!0,a=r,n=o;break}l=l.sibling}if(!s){for(l=o.child;l;){if(l===n){s=!0,n=o,a=r;break}if(l===a){s=!0,a=o,n=r;break}l=l.sibling}if(!s)throw Error(i(189))}}if(n.alternate!==a)throw Error(i(190))}if(3!==n.tag)throw Error(i(188));return n.stateNode.current===n?e:t}(e),!e)return null;for(var t=e;;){if(5===t.tag||6===t.tag)return t;if(t.child)t.child.return=t,t=t.child;else{if(t===e)break;for(;!t.sibling;){if(!t.return||t.return===e)return null;t=t.return}t.sibling.return=t.return,t=t.sibling}}return null}function Je(e,t){for(var n=e.alternate;null!==t;){if(t===e||t===n)return!0;t=t.return}return!1}var et,tt,nt,at,rt=!1,ot=[],it=null,st=null,lt=null,ct=new Map,ut=new Map,dt=[],ft="mousedown mouseup touchcancel touchend touchstart auxclick dblclick pointercancel pointerdown pointerup dragend dragstart drop compositionend compositionstart keydown keypress keyup input textInput copy cut paste click change contextmenu reset submit".split(" ");function pt(e,t,n,a,r){return{blockedOn:e,domEventName:t,eventSystemFlags:16|n,nativeEvent:r,targetContainers:[a]}}function mt(e,t){switch(e){case"focusin":case"focusout":it=null;break;case"dragenter":case"dragleave":st=null;break;case"mouseover":case"mouseout":lt=null;break;case"pointerover":case"pointerout":ct.delete(t.pointerId);break;case"gotpointercapture":case"lostpointercapture":ut.delete(t.pointerId)}}function ht(e,t,n,a,r,o){return null===e||e.nativeEvent!==o?(e=pt(t,n,a,r,o),null!==t&&(null!==(t=nr(t))&&tt(t)),e):(e.eventSystemFlags|=a,t=e.targetContainers,null!==r&&-1===t.indexOf(r)&&t.push(r),e)}function gt(e){var t=tr(e.target);if(null!==t){var n=Ve(t);if(null!==n)if(13===(t=n.tag)){if(null!==(t=Ye(n)))return e.blockedOn=t,void at(e.lanePriority,(function(){o.unstable_runWithPriority(e.priority,(function(){nt(n)}))}))}else if(3===t&&n.stateNode.hydrate)return void(e.blockedOn=3===n.tag?n.stateNode.containerInfo:null)}e.blockedOn=null}function _t(e){if(null!==e.blockedOn)return!1;for(var t=e.targetContainers;0<t.length;){var n=Xt(e.domEventName,e.eventSystemFlags,t[0],e.nativeEvent);if(null!==n)return null!==(t=nr(n))&&tt(t),e.blockedOn=n,!1;t.shift()}return!0}function bt(e,t,n){_t(e)&&n.delete(t)}function kt(){for(rt=!1;0<ot.length;){var e=ot[0];if(null!==e.blockedOn){null!==(e=nr(e.blockedOn))&&et(e);break}for(var t=e.targetContainers;0<t.length;){var n=Xt(e.domEventName,e.eventSystemFlags,t[0],e.nativeEvent);if(null!==n){e.blockedOn=n;break}t.shift()}null===e.blockedOn&&ot.shift()}null!==it&&_t(it)&&(it=null),null!==st&&_t(st)&&(st=null),null!==lt&&_t(lt)&&(lt=null),ct.forEach(bt),ut.forEach(bt)}function vt(e,t){e.blockedOn===t&&(e.blockedOn=null,rt||(rt=!0,o.unstable_scheduleCallback(o.unstable_NormalPriority,kt)))}function yt(e){function t(t){return vt(t,e)}if(0<ot.length){vt(ot[0],e);for(var n=1;n<ot.length;n++){var a=ot[n];a.blockedOn===e&&(a.blockedOn=null)}}for(null!==it&&vt(it,e),null!==st&&vt(st,e),null!==lt&&vt(lt,e),ct.forEach(t),ut.forEach(t),n=0;n<dt.length;n++)(a=dt[n]).blockedOn===e&&(a.blockedOn=null);for(;0<dt.length&&null===(n=dt[0]).blockedOn;)gt(n),null===n.blockedOn&&dt.shift()}function wt(e,t){var n={};return n[e.toLowerCase()]=t.toLowerCase(),n["Webkit"+e]="webkit"+t,n["Moz"+e]="moz"+t,n}var St={animationend:wt("Animation","AnimationEnd"),animationiteration:wt("Animation","AnimationIteration"),animationstart:wt("Animation","AnimationStart"),transitionend:wt("Transition","TransitionEnd")},Et={},xt={};function Ct(e){if(Et[e])return Et[e];if(!St[e])return e;var t,n=St[e];for(t in n)if(n.hasOwnProperty(t)&&t in xt)return Et[e]=n[t];return e}d&&(xt=document.createElement("div").style,"AnimationEvent"in window||(delete St.animationend.animation,delete St.animationiteration.animation,delete St.animationstart.animation),"TransitionEvent"in window||delete St.transitionend.transition);var Tt=Ct("animationend"),At=Ct("animationiteration"),Gt=Ct("animationstart"),Lt=Ct("transitionend"),Nt=new Map,Rt=new Map,Pt=["abort","abort",Tt,"animationEnd",At,"animationIteration",Gt,"animationStart","canplay","canPlay","canplaythrough","canPlayThrough","durationchange","durationChange","emptied","emptied","encrypted","encrypted","ended","ended","error","error","gotpointercapture","gotPointerCapture","load","load","loadeddata","loadedData","loadedmetadata","loadedMetadata","loadstart","loadStart","lostpointercapture","lostPointerCapture","playing","playing","progress","progress","seeking","seeking","stalled","stalled","suspend","suspend","timeupdate","timeUpdate",Lt,"transitionEnd","waiting","waiting"];function It(e,t){for(var n=0;n<e.length;n+=2){var a=e[n],r=e[n+1];r="on"+(r[0].toUpperCase()+r.slice(1)),Rt.set(a,t),Nt.set(a,r),c(r,[a])}}(0,o.unstable_now)();var Ot=8;function Bt(e){if(0!=(1&e))return Ot=15,1;if(0!=(2&e))return Ot=14,2;if(0!=(4&e))return Ot=13,4;var t=24&e;return 0!==t?(Ot=12,t):0!=(32&e)?(Ot=11,32):0!==(t=192&e)?(Ot=10,t):0!=(256&e)?(Ot=9,256):0!==(t=3584&e)?(Ot=8,t):0!=(4096&e)?(Ot=7,4096):0!==(t=4186112&e)?(Ot=6,t):0!==(t=62914560&e)?(Ot=5,t):67108864&e?(Ot=4,67108864):0!=(134217728&e)?(Ot=3,134217728):0!==(t=805306368&e)?(Ot=2,t):0!=(1073741824&e)?(Ot=1,1073741824):(Ot=8,e)}function Dt(e,t){var n=e.pendingLanes;if(0===n)return Ot=0;var a=0,r=0,o=e.expiredLanes,i=e.suspendedLanes,s=e.pingedLanes;if(0!==o)a=o,r=Ot=15;else if(0!==(o=134217727&n)){var l=o&~i;0!==l?(a=Bt(l),r=Ot):0!==(s&=o)&&(a=Bt(s),r=Ot)}else 0!==(o=n&~i)?(a=Bt(o),r=Ot):0!==s&&(a=Bt(s),r=Ot);if(0===a)return 0;if(a=n&((0>(a=31-Kt(a))?0:1<<a)<<1)-1,0!==t&&t!==a&&0==(t&i)){if(Bt(t),r<=Ot)return t;Ot=r}if(0!==(t=e.entangledLanes))for(e=e.entanglements,t&=a;0<t;)r=1<<(n=31-Kt(t)),a|=e[n],t&=~r;return a}function Mt(e){return 0!==(e=-1073741825&e.pendingLanes)?e:1073741824&e?1073741824:0}function Ft(e,t){switch(e){case 15:return 1;case 14:return 2;case 12:return 0===(e=Ut(24&~t))?Ft(10,t):e;case 10:return 0===(e=Ut(192&~t))?Ft(8,t):e;case 8:return 0===(e=Ut(3584&~t))&&(0===(e=Ut(4186112&~t))&&(e=512)),e;case 2:return 0===(t=Ut(805306368&~t))&&(t=268435456),t}throw Error(i(358,e))}function Ut(e){return e&-e}function jt(e){for(var t=[],n=0;31>n;n++)t.push(e);return t}function zt(e,t,n){e.pendingLanes|=t;var a=t-1;e.suspendedLanes&=a,e.pingedLanes&=a,(e=e.eventTimes)[t=31-Kt(t)]=n}var Kt=Math.clz32?Math.clz32:function(e){return 0===e?32:31-($t(e)/Ht|0)|0},$t=Math.log,Ht=Math.LN2;var qt=o.unstable_UserBlockingPriority,Zt=o.unstable_runWithPriority,Wt=!0;function Vt(e,t,n,a){Be||Ie();var r=Qt,o=Be;Be=!0;try{Pe(r,e,t,n,a)}finally{(Be=o)||Me()}}function Yt(e,t,n,a){Zt(qt,Qt.bind(null,e,t,n,a))}function Qt(e,t,n,a){var r;if(Wt)if((r=0==(4&t))&&0<ot.length&&-1<ft.indexOf(e))e=pt(null,e,t,n,a),ot.push(e);else{var o=Xt(e,t,n,a);if(null===o)r&&mt(e,a);else{if(r){if(-1<ft.indexOf(e))return e=pt(o,e,t,n,a),void ot.push(e);if(function(e,t,n,a,r){switch(t){case"focusin":return it=ht(it,e,t,n,a,r),!0;case"dragenter":return st=ht(st,e,t,n,a,r),!0;case"mouseover":return lt=ht(lt,e,t,n,a,r),!0;case"pointerover":var o=r.pointerId;return ct.set(o,ht(ct.get(o)||null,e,t,n,a,r)),!0;case"gotpointercapture":return o=r.pointerId,ut.set(o,ht(ut.get(o)||null,e,t,n,a,r)),!0}return!1}(o,e,t,n,a))return;mt(e,a)}Ia(e,t,a,null,n)}}}function Xt(e,t,n,a){var r=xe(a);if(null!==(r=tr(r))){var o=Ve(r);if(null===o)r=null;else{var i=o.tag;if(13===i){if(null!==(r=Ye(o)))return r;r=null}else if(3===i){if(o.stateNode.hydrate)return 3===o.tag?o.stateNode.containerInfo:null;r=null}else o!==r&&(r=null)}}return Ia(e,t,a,r,n),null}var Jt=null,en=null,tn=null;function nn(){if(tn)return tn;var e,t,n=en,a=n.length,r="value"in Jt?Jt.value:Jt.textContent,o=r.length;for(e=0;e<a&&n[e]===r[e];e++);var i=a-e;for(t=1;t<=i&&n[a-t]===r[o-t];t++);return tn=r.slice(e,1<t?1-t:void 0)}function an(e){var t=e.keyCode;return"charCode"in e?0===(e=e.charCode)&&13===t&&(e=13):e=t,10===e&&(e=13),32<=e||13===e?e:0}function rn(){return!0}function on(){return!1}function sn(e){function t(t,n,a,r,o){for(var i in this._reactName=t,this._targetInst=a,this.type=n,this.nativeEvent=r,this.target=o,this.currentTarget=null,e)e.hasOwnProperty(i)&&(t=e[i],this[i]=t?t(r):r[i]);return this.isDefaultPrevented=(null!=r.defaultPrevented?r.defaultPrevented:!1===r.returnValue)?rn:on,this.isPropagationStopped=on,this}return r(t.prototype,{preventDefault:function(){this.defaultPrevented=!0;var e=this.nativeEvent;e&&(e.preventDefault?e.preventDefault():"unknown"!=typeof e.returnValue&&(e.returnValue=!1),this.isDefaultPrevented=rn)},stopPropagation:function(){var e=this.nativeEvent;e&&(e.stopPropagation?e.stopPropagation():"unknown"!=typeof e.cancelBubble&&(e.cancelBubble=!0),this.isPropagationStopped=rn)},persist:function(){},isPersistent:rn}),t}var ln,cn,un,dn={eventPhase:0,bubbles:0,cancelable:0,timeStamp:function(e){return e.timeStamp||Date.now()},defaultPrevented:0,isTrusted:0},fn=sn(dn),pn=r({},dn,{view:0,detail:0}),mn=sn(pn),hn=r({},pn,{screenX:0,screenY:0,clientX:0,clientY:0,pageX:0,pageY:0,ctrlKey:0,shiftKey:0,altKey:0,metaKey:0,getModifierState:Tn,button:0,buttons:0,relatedTarget:function(e){return void 0===e.relatedTarget?e.fromElement===e.srcElement?e.toElement:e.fromElement:e.relatedTarget},movementX:function(e){return"movementX"in e?e.movementX:(e!==un&&(un&&"mousemove"===e.type?(ln=e.screenX-un.screenX,cn=e.screenY-un.screenY):cn=ln=0,un=e),ln)},movementY:function(e){return"movementY"in e?e.movementY:cn}}),gn=sn(hn),_n=sn(r({},hn,{dataTransfer:0})),bn=sn(r({},pn,{relatedTarget:0})),kn=sn(r({},dn,{animationName:0,elapsedTime:0,pseudoElement:0})),vn=r({},dn,{clipboardData:function(e){return"clipboardData"in e?e.clipboardData:window.clipboardData}}),yn=sn(vn),wn=sn(r({},dn,{data:0})),Sn={Esc:"Escape",Spacebar:" ",Left:"ArrowLeft",Up:"ArrowUp",Right:"ArrowRight",Down:"ArrowDown",Del:"Delete",Win:"OS",Menu:"ContextMenu",Apps:"ContextMenu",Scroll:"ScrollLock",MozPrintableKey:"Unidentified"},En={8:"Backspace",9:"Tab",12:"Clear",13:"Enter",16:"Shift",17:"Control",18:"Alt",19:"Pause",20:"CapsLock",27:"Escape",32:" ",33:"PageUp",34:"PageDown",35:"End",36:"Home",37:"ArrowLeft",38:"ArrowUp",39:"ArrowRight",40:"ArrowDown",45:"Insert",46:"Delete",112:"F1",113:"F2",114:"F3",115:"F4",116:"F5",117:"F6",118:"F7",119:"F8",120:"F9",121:"F10",122:"F11",123:"F12",144:"NumLock",145:"ScrollLock",224:"Meta"},xn={Alt:"altKey",Control:"ctrlKey",Meta:"metaKey",Shift:"shiftKey"};function Cn(e){var t=this.nativeEvent;return t.getModifierState?t.getModifierState(e):!!(e=xn[e])&&!!t[e]}function Tn(){return Cn}var An=r({},pn,{key:function(e){if(e.key){var t=Sn[e.key]||e.key;if("Unidentified"!==t)return t}return"keypress"===e.type?13===(e=an(e))?"Enter":String.fromCharCode(e):"keydown"===e.type||"keyup"===e.type?En[e.keyCode]||"Unidentified":""},code:0,location:0,ctrlKey:0,shiftKey:0,altKey:0,metaKey:0,repeat:0,locale:0,getModifierState:Tn,charCode:function(e){return"keypress"===e.type?an(e):0},keyCode:function(e){return"keydown"===e.type||"keyup"===e.type?e.keyCode:0},which:function(e){return"keypress"===e.type?an(e):"keydown"===e.type||"keyup"===e.type?e.keyCode:0}}),Gn=sn(An),Ln=sn(r({},hn,{pointerId:0,width:0,height:0,pressure:0,tangentialPressure:0,tiltX:0,tiltY:0,twist:0,pointerType:0,isPrimary:0})),Nn=sn(r({},pn,{touches:0,targetTouches:0,changedTouches:0,altKey:0,metaKey:0,ctrlKey:0,shiftKey:0,getModifierState:Tn})),Rn=sn(r({},dn,{propertyName:0,elapsedTime:0,pseudoElement:0})),Pn=r({},hn,{deltaX:function(e){return"deltaX"in e?e.deltaX:"wheelDeltaX"in e?-e.wheelDeltaX:0},deltaY:function(e){return"deltaY"in e?e.deltaY:"wheelDeltaY"in e?-e.wheelDeltaY:"wheelDelta"in e?-e.wheelDelta:0},deltaZ:0,deltaMode:0}),In=sn(Pn),On=[9,13,27,32],Bn=d&&"CompositionEvent"in window,Dn=null;d&&"documentMode"in document&&(Dn=document.documentMode);var Mn=d&&"TextEvent"in window&&!Dn,Fn=d&&(!Bn||Dn&&8<Dn&&11>=Dn),Un=String.fromCharCode(32),jn=!1;function zn(e,t){switch(e){case"keyup":return-1!==On.indexOf(t.keyCode);case"keydown":return 229!==t.keyCode;case"keypress":case"mousedown":case"focusout":return!0;default:return!1}}function Kn(e){return"object"==typeof(e=e.detail)&&"data"in e?e.data:null}var $n=!1;var Hn={color:!0,date:!0,datetime:!0,"datetime-local":!0,email:!0,month:!0,number:!0,password:!0,range:!0,search:!0,tel:!0,text:!0,time:!0,url:!0,week:!0};function qn(e){var t=e&&e.nodeName&&e.nodeName.toLowerCase();return"input"===t?!!Hn[e.type]:"textarea"===t}function Zn(e,t,n,a){Le(a),0<(t=Ba(t,"onChange")).length&&(n=new fn("onChange","change",null,n,a),e.push({event:n,listeners:t}))}var Wn=null,Vn=null;function Yn(e){Aa(e,0)}function Qn(e){if(Q(ar(e)))return e}function Xn(e,t){if("change"===e)return t}var Jn=!1;if(d){var ea;if(d){var ta="oninput"in document;if(!ta){var na=document.createElement("div");na.setAttribute("oninput","return;"),ta="function"==typeof na.oninput}ea=ta}else ea=!1;Jn=ea&&(!document.documentMode||9<document.documentMode)}function aa(){Wn&&(Wn.detachEvent("onpropertychange",ra),Vn=Wn=null)}function ra(e){if("value"===e.propertyName&&Qn(Vn)){var t=[];if(Zn(t,Vn,e,xe(e)),e=Yn,Be)e(t);else{Be=!0;try{Re(e,t)}finally{Be=!1,Me()}}}}function oa(e,t,n){"focusin"===e?(aa(),Vn=n,(Wn=t).attachEvent("onpropertychange",ra)):"focusout"===e&&aa()}function ia(e){if("selectionchange"===e||"keyup"===e||"keydown"===e)return Qn(Vn)}function sa(e,t){if("click"===e)return Qn(t)}function la(e,t){if("input"===e||"change"===e)return Qn(t)}var ca="function"==typeof Object.is?Object.is:function(e,t){return e===t&&(0!==e||1/e==1/t)||e!=e&&t!=t},ua=Object.prototype.hasOwnProperty;function da(e,t){if(ca(e,t))return!0;if("object"!=typeof e||null===e||"object"!=typeof t||null===t)return!1;var n=Object.keys(e),a=Object.keys(t);if(n.length!==a.length)return!1;for(a=0;a<n.length;a++)if(!ua.call(t,n[a])||!ca(e[n[a]],t[n[a]]))return!1;return!0}function fa(e){for(;e&&e.firstChild;)e=e.firstChild;return e}function pa(e,t){var n,a=fa(e);for(e=0;a;){if(3===a.nodeType){if(n=e+a.textContent.length,e<=t&&n>=t)return{node:a,offset:t-e};e=n}e:{for(;a;){if(a.nextSibling){a=a.nextSibling;break e}a=a.parentNode}a=void 0}a=fa(a)}}function ma(e,t){return!(!e||!t)&&(e===t||(!e||3!==e.nodeType)&&(t&&3===t.nodeType?ma(e,t.parentNode):"contains"in e?e.contains(t):!!e.compareDocumentPosition&&!!(16&e.compareDocumentPosition(t))))}function ha(){for(var e=window,t=X();t instanceof e.HTMLIFrameElement;){try{var n="string"==typeof t.contentWindow.location.href}catch(a){n=!1}if(!n)break;t=X((e=t.contentWindow).document)}return t}function ga(e){var t=e&&e.nodeName&&e.nodeName.toLowerCase();return t&&("input"===t&&("text"===e.type||"search"===e.type||"tel"===e.type||"url"===e.type||"password"===e.type)||"textarea"===t||"true"===e.contentEditable)}var _a=d&&"documentMode"in document&&11>=document.documentMode,ba=null,ka=null,va=null,ya=!1;function wa(e,t,n){var a=n.window===n?n.document:9===n.nodeType?n:n.ownerDocument;ya||null==ba||ba!==X(a)||("selectionStart"in(a=ba)&&ga(a)?a={start:a.selectionStart,end:a.selectionEnd}:a={anchorNode:(a=(a.ownerDocument&&a.ownerDocument.defaultView||window).getSelection()).anchorNode,anchorOffset:a.anchorOffset,focusNode:a.focusNode,focusOffset:a.focusOffset},va&&da(va,a)||(va=a,0<(a=Ba(ka,"onSelect")).length&&(t=new fn("onSelect","select",null,t,n),e.push({event:t,listeners:a}),t.target=ba)))}It("cancel cancel click click close close contextmenu contextMenu copy copy cut cut auxclick auxClick dblclick doubleClick dragend dragEnd dragstart dragStart drop drop focusin focus focusout blur input input invalid invalid keydown keyDown keypress keyPress keyup keyUp mousedown mouseDown mouseup mouseUp paste paste pause pause play play pointercancel pointerCancel pointerdown pointerDown pointerup pointerUp ratechange rateChange reset reset seeked seeked submit submit touchcancel touchCancel touchend touchEnd touchstart touchStart volumechange volumeChange".split(" "),0),It("drag drag dragenter dragEnter dragexit dragExit dragleave dragLeave dragover dragOver mousemove mouseMove mouseout mouseOut mouseover mouseOver pointermove pointerMove pointerout pointerOut pointerover pointerOver scroll scroll toggle toggle touchmove touchMove wheel wheel".split(" "),1),It(Pt,2);for(var Sa="change selectionchange textInput compositionstart compositionend compositionupdate".split(" "),Ea=0;Ea<Sa.length;Ea++)Rt.set(Sa[Ea],0);u("onMouseEnter",["mouseout","mouseover"]),u("onMouseLeave",["mouseout","mouseover"]),u("onPointerEnter",["pointerout","pointerover"]),u("onPointerLeave",["pointerout","pointerover"]),c("onChange","change click focusin focusout input keydown keyup selectionchange".split(" ")),c("onSelect","focusout contextmenu dragend focusin keydown keyup mousedown mouseup selectionchange".split(" ")),c("onBeforeInput",["compositionend","keypress","textInput","paste"]),c("onCompositionEnd","compositionend focusout keydown keypress keyup mousedown".split(" ")),c("onCompositionStart","compositionstart focusout keydown keypress keyup mousedown".split(" ")),c("onCompositionUpdate","compositionupdate focusout keydown keypress keyup mousedown".split(" "));var xa="abort canplay canplaythrough durationchange emptied encrypted ended error loadeddata loadedmetadata loadstart pause play playing progress ratechange seeked seeking stalled suspend timeupdate volumechange waiting".split(" "),Ca=new Set("cancel close invalid load scroll toggle".split(" ").concat(xa));function Ta(e,t,n){var a=e.type||"unknown-event";e.currentTarget=n,function(e,t,n,a,r,o,s,l,c){if(We.apply(this,arguments),Ke){if(!Ke)throw Error(i(198));var u=$e;Ke=!1,$e=null,He||(He=!0,qe=u)}}(a,t,void 0,e),e.currentTarget=null}function Aa(e,t){t=0!=(4&t);for(var n=0;n<e.length;n++){var a=e[n],r=a.event;a=a.listeners;e:{var o=void 0;if(t)for(var i=a.length-1;0<=i;i--){var s=a[i],l=s.instance,c=s.currentTarget;if(s=s.listener,l!==o&&r.isPropagationStopped())break e;Ta(r,s,c),o=l}else for(i=0;i<a.length;i++){if(l=(s=a[i]).instance,c=s.currentTarget,s=s.listener,l!==o&&r.isPropagationStopped())break e;Ta(r,s,c),o=l}}}if(He)throw e=qe,He=!1,qe=null,e}function Ga(e,t){var n=or(t),a=e+"__bubble";n.has(a)||(Pa(t,e,2,!1),n.add(a))}var La="_reactListening"+Math.random().toString(36).slice(2);function Na(e){e[La]||(e[La]=!0,s.forEach((function(t){Ca.has(t)||Ra(t,!1,e,null),Ra(t,!0,e,null)})))}function Ra(e,t,n,a){var r=4<arguments.length&&void 0!==arguments[4]?arguments[4]:0,o=n;if("selectionchange"===e&&9!==n.nodeType&&(o=n.ownerDocument),null!==a&&!t&&Ca.has(e)){if("scroll"!==e)return;r|=2,o=a}var i=or(o),s=e+"__"+(t?"capture":"bubble");i.has(s)||(t&&(r|=4),Pa(o,e,r,t),i.add(s))}function Pa(e,t,n,a){var r=Rt.get(t);switch(void 0===r?2:r){case 0:r=Vt;break;case 1:r=Yt;break;default:r=Qt}n=r.bind(null,t,n,e),r=void 0,!Ue||"touchstart"!==t&&"touchmove"!==t&&"wheel"!==t||(r=!0),a?void 0!==r?e.addEventListener(t,n,{capture:!0,passive:r}):e.addEventListener(t,n,!0):void 0!==r?e.addEventListener(t,n,{passive:r}):e.addEventListener(t,n,!1)}function Ia(e,t,n,a,r){var o=a;if(0==(1&t)&&0==(2&t)&&null!==a)e:for(;;){if(null===a)return;var i=a.tag;if(3===i||4===i){var s=a.stateNode.containerInfo;if(s===r||8===s.nodeType&&s.parentNode===r)break;if(4===i)for(i=a.return;null!==i;){var l=i.tag;if((3===l||4===l)&&((l=i.stateNode.containerInfo)===r||8===l.nodeType&&l.parentNode===r))return;i=i.return}for(;null!==s;){if(null===(i=tr(s)))return;if(5===(l=i.tag)||6===l){a=o=i;continue e}s=s.parentNode}}a=a.return}!function(e,t,n){if(De)return e(t,n);De=!0;try{return Oe(e,t,n)}finally{De=!1,Me()}}((function(){var a=o,r=xe(n),i=[];e:{var s=Nt.get(e);if(void 0!==s){var l=fn,c=e;switch(e){case"keypress":if(0===an(n))break e;case"keydown":case"keyup":l=Gn;break;case"focusin":c="focus",l=bn;break;case"focusout":c="blur",l=bn;break;case"beforeblur":case"afterblur":l=bn;break;case"click":if(2===n.button)break e;case"auxclick":case"dblclick":case"mousedown":case"mousemove":case"mouseup":case"mouseout":case"mouseover":case"contextmenu":l=gn;break;case"drag":case"dragend":case"dragenter":case"dragexit":case"dragleave":case"dragover":case"dragstart":case"drop":l=_n;break;case"touchcancel":case"touchend":case"touchmove":case"touchstart":l=Nn;break;case Tt:case At:case Gt:l=kn;break;case Lt:l=Rn;break;case"scroll":l=mn;break;case"wheel":l=In;break;case"copy":case"cut":case"paste":l=yn;break;case"gotpointercapture":case"lostpointercapture":case"pointercancel":case"pointerdown":case"pointermove":case"pointerout":case"pointerover":case"pointerup":l=Ln}var u=0!=(4&t),d=!u&&"scroll"===e,f=u?null!==s?s+"Capture":null:s;u=[];for(var p,m=a;null!==m;){var h=(p=m).stateNode;if(5===p.tag&&null!==h&&(p=h,null!==f&&(null!=(h=Fe(m,f))&&u.push(Oa(m,h,p)))),d)break;m=m.return}0<u.length&&(s=new l(s,c,null,n,r),i.push({event:s,listeners:u}))}}if(0==(7&t)){if(l="mouseout"===e||"pointerout"===e,(!(s="mouseover"===e||"pointerover"===e)||0!=(16&t)||!(c=n.relatedTarget||n.fromElement)||!tr(c)&&!c[Ja])&&(l||s)&&(s=r.window===r?r:(s=r.ownerDocument)?s.defaultView||s.parentWindow:window,l?(l=a,null!==(c=(c=n.relatedTarget||n.toElement)?tr(c):null)&&(c!==(d=Ve(c))||5!==c.tag&&6!==c.tag)&&(c=null)):(l=null,c=a),l!==c)){if(u=gn,h="onMouseLeave",f="onMouseEnter",m="mouse","pointerout"!==e&&"pointerover"!==e||(u=Ln,h="onPointerLeave",f="onPointerEnter",m="pointer"),d=null==l?s:ar(l),p=null==c?s:ar(c),(s=new u(h,m+"leave",l,n,r)).target=d,s.relatedTarget=p,h=null,tr(r)===a&&((u=new u(f,m+"enter",c,n,r)).target=p,u.relatedTarget=d,h=u),d=h,l&&c)e:{for(f=c,m=0,p=u=l;p;p=Da(p))m++;for(p=0,h=f;h;h=Da(h))p++;for(;0<m-p;)u=Da(u),m--;for(;0<p-m;)f=Da(f),p--;for(;m--;){if(u===f||null!==f&&u===f.alternate)break e;u=Da(u),f=Da(f)}u=null}else u=null;null!==l&&Ma(i,s,l,u,!1),null!==c&&null!==d&&Ma(i,d,c,u,!0)}if("select"===(l=(s=a?ar(a):window).nodeName&&s.nodeName.toLowerCase())||"input"===l&&"file"===s.type)var g=Xn;else if(qn(s))if(Jn)g=la;else{g=ia;var _=oa}else(l=s.nodeName)&&"input"===l.toLowerCase()&&("checkbox"===s.type||"radio"===s.type)&&(g=sa);switch(g&&(g=g(e,a))?Zn(i,g,n,r):(_&&_(e,s,a),"focusout"===e&&(_=s._wrapperState)&&_.controlled&&"number"===s.type&&re(s,"number",s.value)),_=a?ar(a):window,e){case"focusin":(qn(_)||"true"===_.contentEditable)&&(ba=_,ka=a,va=null);break;case"focusout":va=ka=ba=null;break;case"mousedown":ya=!0;break;case"contextmenu":case"mouseup":case"dragend":ya=!1,wa(i,n,r);break;case"selectionchange":if(_a)break;case"keydown":case"keyup":wa(i,n,r)}var b;if(Bn)e:{switch(e){case"compositionstart":var k="onCompositionStart";break e;case"compositionend":k="onCompositionEnd";break e;case"compositionupdate":k="onCompositionUpdate";break e}k=void 0}else $n?zn(e,n)&&(k="onCompositionEnd"):"keydown"===e&&229===n.keyCode&&(k="onCompositionStart");k&&(Fn&&"ko"!==n.locale&&($n||"onCompositionStart"!==k?"onCompositionEnd"===k&&$n&&(b=nn()):(en="value"in(Jt=r)?Jt.value:Jt.textContent,$n=!0)),0<(_=Ba(a,k)).length&&(k=new wn(k,e,null,n,r),i.push({event:k,listeners:_}),b?k.data=b:null!==(b=Kn(n))&&(k.data=b))),(b=Mn?function(e,t){switch(e){case"compositionend":return Kn(t);case"keypress":return 32!==t.which?null:(jn=!0,Un);case"textInput":return(e=t.data)===Un&&jn?null:e;default:return null}}(e,n):function(e,t){if($n)return"compositionend"===e||!Bn&&zn(e,t)?(e=nn(),tn=en=Jt=null,$n=!1,e):null;switch(e){case"paste":default:return null;case"keypress":if(!(t.ctrlKey||t.altKey||t.metaKey)||t.ctrlKey&&t.altKey){if(t.char&&1<t.char.length)return t.char;if(t.which)return String.fromCharCode(t.which)}return null;case"compositionend":return Fn&&"ko"!==t.locale?null:t.data}}(e,n))&&(0<(a=Ba(a,"onBeforeInput")).length&&(r=new wn("onBeforeInput","beforeinput",null,n,r),i.push({event:r,listeners:a}),r.data=b))}Aa(i,t)}))}function Oa(e,t,n){return{instance:e,listener:t,currentTarget:n}}function Ba(e,t){for(var n=t+"Capture",a=[];null!==e;){var r=e,o=r.stateNode;5===r.tag&&null!==o&&(r=o,null!=(o=Fe(e,n))&&a.unshift(Oa(e,o,r)),null!=(o=Fe(e,t))&&a.push(Oa(e,o,r))),e=e.return}return a}function Da(e){if(null===e)return null;do{e=e.return}while(e&&5!==e.tag);return e||null}function Ma(e,t,n,a,r){for(var o=t._reactName,i=[];null!==n&&n!==a;){var s=n,l=s.alternate,c=s.stateNode;if(null!==l&&l===a)break;5===s.tag&&null!==c&&(s=c,r?null!=(l=Fe(n,o))&&i.unshift(Oa(n,l,s)):r||null!=(l=Fe(n,o))&&i.push(Oa(n,l,s))),n=n.return}0!==i.length&&e.push({event:t,listeners:i})}function Fa(){}var Ua=null,ja=null;function za(e,t){switch(e){case"button":case"input":case"select":case"textarea":return!!t.autoFocus}return!1}function Ka(e,t){return"textarea"===e||"option"===e||"noscript"===e||"string"==typeof t.children||"number"==typeof t.children||"object"==typeof t.dangerouslySetInnerHTML&&null!==t.dangerouslySetInnerHTML&&null!=t.dangerouslySetInnerHTML.__html}var $a="function"==typeof setTimeout?setTimeout:void 0,Ha="function"==typeof clearTimeout?clearTimeout:void 0;function qa(e){1===e.nodeType?e.textContent="":9===e.nodeType&&(null!=(e=e.body)&&(e.textContent=""))}function Za(e){for(;null!=e;e=e.nextSibling){var t=e.nodeType;if(1===t||3===t)break}return e}function Wa(e){e=e.previousSibling;for(var t=0;e;){if(8===e.nodeType){var n=e.data;if("$"===n||"$!"===n||"$?"===n){if(0===t)return e;t--}else"/$"===n&&t++}e=e.previousSibling}return null}var Va=0;var Ya=Math.random().toString(36).slice(2),Qa="__reactFiber$"+Ya,Xa="__reactProps$"+Ya,Ja="__reactContainer$"+Ya,er="__reactEvents$"+Ya;function tr(e){var t=e[Qa];if(t)return t;for(var n=e.parentNode;n;){if(t=n[Ja]||n[Qa]){if(n=t.alternate,null!==t.child||null!==n&&null!==n.child)for(e=Wa(e);null!==e;){if(n=e[Qa])return n;e=Wa(e)}return t}n=(e=n).parentNode}return null}function nr(e){return!(e=e[Qa]||e[Ja])||5!==e.tag&&6!==e.tag&&13!==e.tag&&3!==e.tag?null:e}function ar(e){if(5===e.tag||6===e.tag)return e.stateNode;throw Error(i(33))}function rr(e){return e[Xa]||null}function or(e){var t=e[er];return void 0===t&&(t=e[er]=new Set),t}var ir=[],sr=-1;function lr(e){return{current:e}}function cr(e){0>sr||(e.current=ir[sr],ir[sr]=null,sr--)}function ur(e,t){sr++,ir[sr]=e.current,e.current=t}var dr={},fr=lr(dr),pr=lr(!1),mr=dr;function hr(e,t){var n=e.type.contextTypes;if(!n)return dr;var a=e.stateNode;if(a&&a.__reactInternalMemoizedUnmaskedChildContext===t)return a.__reactInternalMemoizedMaskedChildContext;var r,o={};for(r in n)o[r]=t[r];return a&&((e=e.stateNode).__reactInternalMemoizedUnmaskedChildContext=t,e.__reactInternalMemoizedMaskedChildContext=o),o}function gr(e){return null!=(e=e.childContextTypes)}function _r(){cr(pr),cr(fr)}function br(e,t,n){if(fr.current!==dr)throw Error(i(168));ur(fr,t),ur(pr,n)}function kr(e,t,n){var a=e.stateNode;if(e=t.childContextTypes,"function"!=typeof a.getChildContext)return n;for(var o in a=a.getChildContext())if(!(o in e))throw Error(i(108,Z(t)||"Unknown",o));return r({},n,a)}function vr(e){return e=(e=e.stateNode)&&e.__reactInternalMemoizedMergedChildContext||dr,mr=fr.current,ur(fr,e),ur(pr,pr.current),!0}function yr(e,t,n){var a=e.stateNode;if(!a)throw Error(i(169));n?(e=kr(e,t,mr),a.__reactInternalMemoizedMergedChildContext=e,cr(pr),cr(fr),ur(fr,e)):cr(pr),ur(pr,n)}var wr=null,Sr=null,Er=o.unstable_runWithPriority,xr=o.unstable_scheduleCallback,Cr=o.unstable_cancelCallback,Tr=o.unstable_shouldYield,Ar=o.unstable_requestPaint,Gr=o.unstable_now,Lr=o.unstable_getCurrentPriorityLevel,Nr=o.unstable_ImmediatePriority,Rr=o.unstable_UserBlockingPriority,Pr=o.unstable_NormalPriority,Ir=o.unstable_LowPriority,Or=o.unstable_IdlePriority,Br={},Dr=void 0!==Ar?Ar:function(){},Mr=null,Fr=null,Ur=!1,jr=Gr(),zr=1e4>jr?Gr:function(){return Gr()-jr};function Kr(){switch(Lr()){case Nr:return 99;case Rr:return 98;case Pr:return 97;case Ir:return 96;case Or:return 95;default:throw Error(i(332))}}function $r(e){switch(e){case 99:return Nr;case 98:return Rr;case 97:return Pr;case 96:return Ir;case 95:return Or;default:throw Error(i(332))}}function Hr(e,t){return e=$r(e),Er(e,t)}function qr(e,t,n){return e=$r(e),xr(e,t,n)}function Zr(){if(null!==Fr){var e=Fr;Fr=null,Cr(e)}Wr()}function Wr(){if(!Ur&&null!==Mr){Ur=!0;var e=0;try{var t=Mr;Hr(99,(function(){for(;e<t.length;e++){var n=t[e];do{n=n(!0)}while(null!==n)}})),Mr=null}catch(n){throw null!==Mr&&(Mr=Mr.slice(e+1)),xr(Nr,Zr),n}finally{Ur=!1}}}var Vr=y.ReactCurrentBatchConfig;function Yr(e,t){if(e&&e.defaultProps){for(var n in t=r({},t),e=e.defaultProps)void 0===t[n]&&(t[n]=e[n]);return t}return t}var Qr=lr(null),Xr=null,Jr=null,eo=null;function to(){eo=Jr=Xr=null}function no(e){var t=Qr.current;cr(Qr),e.type._context._currentValue=t}function ao(e,t){for(;null!==e;){var n=e.alternate;if((e.childLanes&t)===t){if(null===n||(n.childLanes&t)===t)break;n.childLanes|=t}else e.childLanes|=t,null!==n&&(n.childLanes|=t);e=e.return}}function ro(e,t){Xr=e,eo=Jr=null,null!==(e=e.dependencies)&&null!==e.firstContext&&(0!=(e.lanes&t)&&(Bi=!0),e.firstContext=null)}function oo(e,t){if(eo!==e&&!1!==t&&0!==t)if("number"==typeof t&&1073741823!==t||(eo=e,t=1073741823),t={context:e,observedBits:t,next:null},null===Jr){if(null===Xr)throw Error(i(308));Jr=t,Xr.dependencies={lanes:0,firstContext:t,responders:null}}else Jr=Jr.next=t;return e._currentValue}var io=!1;function so(e){e.updateQueue={baseState:e.memoizedState,firstBaseUpdate:null,lastBaseUpdate:null,shared:{pending:null},effects:null}}function lo(e,t){e=e.updateQueue,t.updateQueue===e&&(t.updateQueue={baseState:e.baseState,firstBaseUpdate:e.firstBaseUpdate,lastBaseUpdate:e.lastBaseUpdate,shared:e.shared,effects:e.effects})}function co(e,t){return{eventTime:e,lane:t,tag:0,payload:null,callback:null,next:null}}function uo(e,t){if(null!==(e=e.updateQueue)){var n=(e=e.shared).pending;null===n?t.next=t:(t.next=n.next,n.next=t),e.pending=t}}function fo(e,t){var n=e.updateQueue,a=e.alternate;if(null!==a&&n===(a=a.updateQueue)){var r=null,o=null;if(null!==(n=n.firstBaseUpdate)){do{var i={eventTime:n.eventTime,lane:n.lane,tag:n.tag,payload:n.payload,callback:n.callback,next:null};null===o?r=o=i:o=o.next=i,n=n.next}while(null!==n);null===o?r=o=t:o=o.next=t}else r=o=t;return n={baseState:a.baseState,firstBaseUpdate:r,lastBaseUpdate:o,shared:a.shared,effects:a.effects},void(e.updateQueue=n)}null===(e=n.lastBaseUpdate)?n.firstBaseUpdate=t:e.next=t,n.lastBaseUpdate=t}function po(e,t,n,a){var o=e.updateQueue;io=!1;var i=o.firstBaseUpdate,s=o.lastBaseUpdate,l=o.shared.pending;if(null!==l){o.shared.pending=null;var c=l,u=c.next;c.next=null,null===s?i=u:s.next=u,s=c;var d=e.alternate;if(null!==d){var f=(d=d.updateQueue).lastBaseUpdate;f!==s&&(null===f?d.firstBaseUpdate=u:f.next=u,d.lastBaseUpdate=c)}}if(null!==i){for(f=o.baseState,s=0,d=u=c=null;;){l=i.lane;var p=i.eventTime;if((a&l)===l){null!==d&&(d=d.next={eventTime:p,lane:0,tag:i.tag,payload:i.payload,callback:i.callback,next:null});e:{var m=e,h=i;switch(l=t,p=n,h.tag){case 1:if("function"==typeof(m=h.payload)){f=m.call(p,f,l);break e}f=m;break e;case 3:m.flags=-4097&m.flags|64;case 0:if(null==(l="function"==typeof(m=h.payload)?m.call(p,f,l):m))break e;f=r({},f,l);break e;case 2:io=!0}}null!==i.callback&&(e.flags|=32,null===(l=o.effects)?o.effects=[i]:l.push(i))}else p={eventTime:p,lane:l,tag:i.tag,payload:i.payload,callback:i.callback,next:null},null===d?(u=d=p,c=f):d=d.next=p,s|=l;if(null===(i=i.next)){if(null===(l=o.shared.pending))break;i=l.next,l.next=null,o.lastBaseUpdate=l,o.shared.pending=null}}null===d&&(c=f),o.baseState=c,o.firstBaseUpdate=u,o.lastBaseUpdate=d,js|=s,e.lanes=s,e.memoizedState=f}}function mo(e,t,n){if(e=t.effects,t.effects=null,null!==e)for(t=0;t<e.length;t++){var a=e[t],r=a.callback;if(null!==r){if(a.callback=null,a=n,"function"!=typeof r)throw Error(i(191,r));r.call(a)}}}var ho=(new a.Component).refs;function go(e,t,n,a){n=null==(n=n(a,t=e.memoizedState))?t:r({},t,n),e.memoizedState=n,0===e.lanes&&(e.updateQueue.baseState=n)}var _o={isMounted:function(e){return!!(e=e._reactInternals)&&Ve(e)===e},enqueueSetState:function(e,t,n){e=e._reactInternals;var a=fl(),r=pl(e),o=co(a,r);o.payload=t,null!=n&&(o.callback=n),uo(e,o),ml(e,r,a)},enqueueReplaceState:function(e,t,n){e=e._reactInternals;var a=fl(),r=pl(e),o=co(a,r);o.tag=1,o.payload=t,null!=n&&(o.callback=n),uo(e,o),ml(e,r,a)},enqueueForceUpdate:function(e,t){e=e._reactInternals;var n=fl(),a=pl(e),r=co(n,a);r.tag=2,null!=t&&(r.callback=t),uo(e,r),ml(e,a,n)}};function bo(e,t,n,a,r,o,i){return"function"==typeof(e=e.stateNode).shouldComponentUpdate?e.shouldComponentUpdate(a,o,i):!t.prototype||!t.prototype.isPureReactComponent||(!da(n,a)||!da(r,o))}function ko(e,t,n){var a=!1,r=dr,o=t.contextType;return"object"==typeof o&&null!==o?o=oo(o):(r=gr(t)?mr:fr.current,o=(a=null!=(a=t.contextTypes))?hr(e,r):dr),t=new t(n,o),e.memoizedState=null!==t.state&&void 0!==t.state?t.state:null,t.updater=_o,e.stateNode=t,t._reactInternals=e,a&&((e=e.stateNode).__reactInternalMemoizedUnmaskedChildContext=r,e.__reactInternalMemoizedMaskedChildContext=o),t}function vo(e,t,n,a){e=t.state,"function"==typeof t.componentWillReceiveProps&&t.componentWillReceiveProps(n,a),"function"==typeof t.UNSAFE_componentWillReceiveProps&&t.UNSAFE_componentWillReceiveProps(n,a),t.state!==e&&_o.enqueueReplaceState(t,t.state,null)}function yo(e,t,n,a){var r=e.stateNode;r.props=n,r.state=e.memoizedState,r.refs=ho,so(e);var o=t.contextType;"object"==typeof o&&null!==o?r.context=oo(o):(o=gr(t)?mr:fr.current,r.context=hr(e,o)),po(e,n,r,a),r.state=e.memoizedState,"function"==typeof(o=t.getDerivedStateFromProps)&&(go(e,t,o,n),r.state=e.memoizedState),"function"==typeof t.getDerivedStateFromProps||"function"==typeof r.getSnapshotBeforeUpdate||"function"!=typeof r.UNSAFE_componentWillMount&&"function"!=typeof r.componentWillMount||(t=r.state,"function"==typeof r.componentWillMount&&r.componentWillMount(),"function"==typeof r.UNSAFE_componentWillMount&&r.UNSAFE_componentWillMount(),t!==r.state&&_o.enqueueReplaceState(r,r.state,null),po(e,n,r,a),r.state=e.memoizedState),"function"==typeof r.componentDidMount&&(e.flags|=4)}var wo=Array.isArray;function So(e,t,n){if(null!==(e=n.ref)&&"function"!=typeof e&&"object"!=typeof e){if(n._owner){if(n=n._owner){if(1!==n.tag)throw Error(i(309));var a=n.stateNode}if(!a)throw Error(i(147,e));var r=""+e;return null!==t&&null!==t.ref&&"function"==typeof t.ref&&t.ref._stringRef===r?t.ref:(t=function(e){var t=a.refs;t===ho&&(t=a.refs={}),null===e?delete t[r]:t[r]=e},t._stringRef=r,t)}if("string"!=typeof e)throw Error(i(284));if(!n._owner)throw Error(i(290,e))}return e}function Eo(e,t){if("textarea"!==e.type)throw Error(i(31,"[object Object]"===Object.prototype.toString.call(t)?"object with keys {"+Object.keys(t).join(", ")+"}":t))}function xo(e){function t(t,n){if(e){var a=t.lastEffect;null!==a?(a.nextEffect=n,t.lastEffect=n):t.firstEffect=t.lastEffect=n,n.nextEffect=null,n.flags=8}}function n(n,a){if(!e)return null;for(;null!==a;)t(n,a),a=a.sibling;return null}function a(e,t){for(e=new Map;null!==t;)null!==t.key?e.set(t.key,t):e.set(t.index,t),t=t.sibling;return e}function r(e,t){return(e=ql(e,t)).index=0,e.sibling=null,e}function o(t,n,a){return t.index=a,e?null!==(a=t.alternate)?(a=a.index)<n?(t.flags=2,n):a:(t.flags=2,n):n}function s(t){return e&&null===t.alternate&&(t.flags=2),t}function l(e,t,n,a){return null===t||6!==t.tag?((t=Yl(n,e.mode,a)).return=e,t):((t=r(t,n)).return=e,t)}function c(e,t,n,a){return null!==t&&t.elementType===n.type?((a=r(t,n.props)).ref=So(e,t,n),a.return=e,a):((a=Zl(n.type,n.key,n.props,null,e.mode,a)).ref=So(e,t,n),a.return=e,a)}function u(e,t,n,a){return null===t||4!==t.tag||t.stateNode.containerInfo!==n.containerInfo||t.stateNode.implementation!==n.implementation?((t=Ql(n,e.mode,a)).return=e,t):((t=r(t,n.children||[])).return=e,t)}function d(e,t,n,a,o){return null===t||7!==t.tag?((t=Wl(n,e.mode,a,o)).return=e,t):((t=r(t,n)).return=e,t)}function f(e,t,n){if("string"==typeof t||"number"==typeof t)return(t=Yl(""+t,e.mode,n)).return=e,t;if("object"==typeof t&&null!==t){switch(t.$$typeof){case w:return(n=Zl(t.type,t.key,t.props,null,e.mode,n)).ref=So(e,null,t),n.return=e,n;case S:return(t=Ql(t,e.mode,n)).return=e,t}if(wo(t)||z(t))return(t=Wl(t,e.mode,n,null)).return=e,t;Eo(e,t)}return null}function p(e,t,n,a){var r=null!==t?t.key:null;if("string"==typeof n||"number"==typeof n)return null!==r?null:l(e,t,""+n,a);if("object"==typeof n&&null!==n){switch(n.$$typeof){case w:return n.key===r?n.type===E?d(e,t,n.props.children,a,r):c(e,t,n,a):null;case S:return n.key===r?u(e,t,n,a):null}if(wo(n)||z(n))return null!==r?null:d(e,t,n,a,null);Eo(e,n)}return null}function m(e,t,n,a,r){if("string"==typeof a||"number"==typeof a)return l(t,e=e.get(n)||null,""+a,r);if("object"==typeof a&&null!==a){switch(a.$$typeof){case w:return e=e.get(null===a.key?n:a.key)||null,a.type===E?d(t,e,a.props.children,r,a.key):c(t,e,a,r);case S:return u(t,e=e.get(null===a.key?n:a.key)||null,a,r)}if(wo(a)||z(a))return d(t,e=e.get(n)||null,a,r,null);Eo(t,a)}return null}function h(r,i,s,l){for(var c=null,u=null,d=i,h=i=0,g=null;null!==d&&h<s.length;h++){d.index>h?(g=d,d=null):g=d.sibling;var _=p(r,d,s[h],l);if(null===_){null===d&&(d=g);break}e&&d&&null===_.alternate&&t(r,d),i=o(_,i,h),null===u?c=_:u.sibling=_,u=_,d=g}if(h===s.length)return n(r,d),c;if(null===d){for(;h<s.length;h++)null!==(d=f(r,s[h],l))&&(i=o(d,i,h),null===u?c=d:u.sibling=d,u=d);return c}for(d=a(r,d);h<s.length;h++)null!==(g=m(d,r,h,s[h],l))&&(e&&null!==g.alternate&&d.delete(null===g.key?h:g.key),i=o(g,i,h),null===u?c=g:u.sibling=g,u=g);return e&&d.forEach((function(e){return t(r,e)})),c}function g(r,s,l,c){var u=z(l);if("function"!=typeof u)throw Error(i(150));if(null==(l=u.call(l)))throw Error(i(151));for(var d=u=null,h=s,g=s=0,_=null,b=l.next();null!==h&&!b.done;g++,b=l.next()){h.index>g?(_=h,h=null):_=h.sibling;var k=p(r,h,b.value,c);if(null===k){null===h&&(h=_);break}e&&h&&null===k.alternate&&t(r,h),s=o(k,s,g),null===d?u=k:d.sibling=k,d=k,h=_}if(b.done)return n(r,h),u;if(null===h){for(;!b.done;g++,b=l.next())null!==(b=f(r,b.value,c))&&(s=o(b,s,g),null===d?u=b:d.sibling=b,d=b);return u}for(h=a(r,h);!b.done;g++,b=l.next())null!==(b=m(h,r,g,b.value,c))&&(e&&null!==b.alternate&&h.delete(null===b.key?g:b.key),s=o(b,s,g),null===d?u=b:d.sibling=b,d=b);return e&&h.forEach((function(e){return t(r,e)})),u}return function(e,a,o,l){var c="object"==typeof o&&null!==o&&o.type===E&&null===o.key;c&&(o=o.props.children);var u="object"==typeof o&&null!==o;if(u)switch(o.$$typeof){case w:e:{for(u=o.key,c=a;null!==c;){if(c.key===u){if(7===c.tag){if(o.type===E){n(e,c.sibling),(a=r(c,o.props.children)).return=e,e=a;break e}}else if(c.elementType===o.type){n(e,c.sibling),(a=r(c,o.props)).ref=So(e,c,o),a.return=e,e=a;break e}n(e,c);break}t(e,c),c=c.sibling}o.type===E?((a=Wl(o.props.children,e.mode,l,o.key)).return=e,e=a):((l=Zl(o.type,o.key,o.props,null,e.mode,l)).ref=So(e,a,o),l.return=e,e=l)}return s(e);case S:e:{for(c=o.key;null!==a;){if(a.key===c){if(4===a.tag&&a.stateNode.containerInfo===o.containerInfo&&a.stateNode.implementation===o.implementation){n(e,a.sibling),(a=r(a,o.children||[])).return=e,e=a;break e}n(e,a);break}t(e,a),a=a.sibling}(a=Ql(o,e.mode,l)).return=e,e=a}return s(e)}if("string"==typeof o||"number"==typeof o)return o=""+o,null!==a&&6===a.tag?(n(e,a.sibling),(a=r(a,o)).return=e,e=a):(n(e,a),(a=Yl(o,e.mode,l)).return=e,e=a),s(e);if(wo(o))return h(e,a,o,l);if(z(o))return g(e,a,o,l);if(u&&Eo(e,o),void 0===o&&!c)switch(e.tag){case 1:case 22:case 0:case 11:case 15:throw Error(i(152,Z(e.type)||"Component"))}return n(e,a)}}var Co=xo(!0),To=xo(!1),Ao={},Go=lr(Ao),Lo=lr(Ao),No=lr(Ao);function Ro(e){if(e===Ao)throw Error(i(174));return e}function Po(e,t){switch(ur(No,t),ur(Lo,e),ur(Go,Ao),e=t.nodeType){case 9:case 11:t=(t=t.documentElement)?t.namespaceURI:pe(null,"");break;default:t=pe(t=(e=8===e?t.parentNode:t).namespaceURI||null,e=e.tagName)}cr(Go),ur(Go,t)}function Io(){cr(Go),cr(Lo),cr(No)}function Oo(e){Ro(No.current);var t=Ro(Go.current),n=pe(t,e.type);t!==n&&(ur(Lo,e),ur(Go,n))}function Bo(e){Lo.current===e&&(cr(Go),cr(Lo))}var Do=lr(0);function Mo(e){for(var t=e;null!==t;){if(13===t.tag){var n=t.memoizedState;if(null!==n&&(null===(n=n.dehydrated)||"$?"===n.data||"$!"===n.data))return t}else if(19===t.tag&&void 0!==t.memoizedProps.revealOrder){if(0!=(64&t.flags))return t}else if(null!==t.child){t.child.return=t,t=t.child;continue}if(t===e)break;for(;null===t.sibling;){if(null===t.return||t.return===e)return null;t=t.return}t.sibling.return=t.return,t=t.sibling}return null}var Fo=null,Uo=null,jo=!1;function zo(e,t){var n=$l(5,null,null,0);n.elementType="DELETED",n.type="DELETED",n.stateNode=t,n.return=e,n.flags=8,null!==e.lastEffect?(e.lastEffect.nextEffect=n,e.lastEffect=n):e.firstEffect=e.lastEffect=n}function Ko(e,t){switch(e.tag){case 5:var n=e.type;return null!==(t=1!==t.nodeType||n.toLowerCase()!==t.nodeName.toLowerCase()?null:t)&&(e.stateNode=t,!0);case 6:return null!==(t=""===e.pendingProps||3!==t.nodeType?null:t)&&(e.stateNode=t,!0);default:return!1}}function $o(e){if(jo){var t=Uo;if(t){var n=t;if(!Ko(e,t)){if(!(t=Za(n.nextSibling))||!Ko(e,t))return e.flags=-1025&e.flags|2,jo=!1,void(Fo=e);zo(Fo,n)}Fo=e,Uo=Za(t.firstChild)}else e.flags=-1025&e.flags|2,jo=!1,Fo=e}}function Ho(e){for(e=e.return;null!==e&&5!==e.tag&&3!==e.tag&&13!==e.tag;)e=e.return;Fo=e}function qo(e){if(e!==Fo)return!1;if(!jo)return Ho(e),jo=!0,!1;var t=e.type;if(5!==e.tag||"head"!==t&&"body"!==t&&!Ka(t,e.memoizedProps))for(t=Uo;t;)zo(e,t),t=Za(t.nextSibling);if(Ho(e),13===e.tag){if(!(e=null!==(e=e.memoizedState)?e.dehydrated:null))throw Error(i(317));e:{for(e=e.nextSibling,t=0;e;){if(8===e.nodeType){var n=e.data;if("/$"===n){if(0===t){Uo=Za(e.nextSibling);break e}t--}else"$"!==n&&"$!"!==n&&"$?"!==n||t++}e=e.nextSibling}Uo=null}}else Uo=Fo?Za(e.stateNode.nextSibling):null;return!0}function Zo(){Uo=Fo=null,jo=!1}var Wo=[];function Vo(){for(var e=0;e<Wo.length;e++)Wo[e]._workInProgressVersionPrimary=null;Wo.length=0}var Yo=y.ReactCurrentDispatcher,Qo=y.ReactCurrentBatchConfig,Xo=0,Jo=null,ei=null,ti=null,ni=!1,ai=!1;function ri(){throw Error(i(321))}function oi(e,t){if(null===t)return!1;for(var n=0;n<t.length&&n<e.length;n++)if(!ca(e[n],t[n]))return!1;return!0}function ii(e,t,n,a,r,o){if(Xo=o,Jo=t,t.memoizedState=null,t.updateQueue=null,t.lanes=0,Yo.current=null===e||null===e.memoizedState?Ri:Pi,e=n(a,r),ai){o=0;do{if(ai=!1,!(25>o))throw Error(i(301));o+=1,ti=ei=null,t.updateQueue=null,Yo.current=Ii,e=n(a,r)}while(ai)}if(Yo.current=Ni,t=null!==ei&&null!==ei.next,Xo=0,ti=ei=Jo=null,ni=!1,t)throw Error(i(300));return e}function si(){var e={memoizedState:null,baseState:null,baseQueue:null,queue:null,next:null};return null===ti?Jo.memoizedState=ti=e:ti=ti.next=e,ti}function li(){if(null===ei){var e=Jo.alternate;e=null!==e?e.memoizedState:null}else e=ei.next;var t=null===ti?Jo.memoizedState:ti.next;if(null!==t)ti=t,ei=e;else{if(null===e)throw Error(i(310));e={memoizedState:(ei=e).memoizedState,baseState:ei.baseState,baseQueue:ei.baseQueue,queue:ei.queue,next:null},null===ti?Jo.memoizedState=ti=e:ti=ti.next=e}return ti}function ci(e,t){return"function"==typeof t?t(e):t}function ui(e){var t=li(),n=t.queue;if(null===n)throw Error(i(311));n.lastRenderedReducer=e;var a=ei,r=a.baseQueue,o=n.pending;if(null!==o){if(null!==r){var s=r.next;r.next=o.next,o.next=s}a.baseQueue=r=o,n.pending=null}if(null!==r){r=r.next,a=a.baseState;var l=s=o=null,c=r;do{var u=c.lane;if((Xo&u)===u)null!==l&&(l=l.next={lane:0,action:c.action,eagerReducer:c.eagerReducer,eagerState:c.eagerState,next:null}),a=c.eagerReducer===e?c.eagerState:e(a,c.action);else{var d={lane:u,action:c.action,eagerReducer:c.eagerReducer,eagerState:c.eagerState,next:null};null===l?(s=l=d,o=a):l=l.next=d,Jo.lanes|=u,js|=u}c=c.next}while(null!==c&&c!==r);null===l?o=a:l.next=s,ca(a,t.memoizedState)||(Bi=!0),t.memoizedState=a,t.baseState=o,t.baseQueue=l,n.lastRenderedState=a}return[t.memoizedState,n.dispatch]}function di(e){var t=li(),n=t.queue;if(null===n)throw Error(i(311));n.lastRenderedReducer=e;var a=n.dispatch,r=n.pending,o=t.memoizedState;if(null!==r){n.pending=null;var s=r=r.next;do{o=e(o,s.action),s=s.next}while(s!==r);ca(o,t.memoizedState)||(Bi=!0),t.memoizedState=o,null===t.baseQueue&&(t.baseState=o),n.lastRenderedState=o}return[o,a]}function fi(e,t,n){var a=t._getVersion;a=a(t._source);var r=t._workInProgressVersionPrimary;if(null!==r?e=r===a:(e=e.mutableReadLanes,(e=(Xo&e)===e)&&(t._workInProgressVersionPrimary=a,Wo.push(t))),e)return n(t._source);throw Wo.push(t),Error(i(350))}function pi(e,t,n,a){var r=Ps;if(null===r)throw Error(i(349));var o=t._getVersion,s=o(t._source),l=Yo.current,c=l.useState((function(){return fi(r,t,n)})),u=c[1],d=c[0];c=ti;var f=e.memoizedState,p=f.refs,m=p.getSnapshot,h=f.source;f=f.subscribe;var g=Jo;return e.memoizedState={refs:p,source:t,subscribe:a},l.useEffect((function(){p.getSnapshot=n,p.setSnapshot=u;var e=o(t._source);if(!ca(s,e)){e=n(t._source),ca(d,e)||(u(e),e=pl(g),r.mutableReadLanes|=e&r.pendingLanes),e=r.mutableReadLanes,r.entangledLanes|=e;for(var a=r.entanglements,i=e;0<i;){var l=31-Kt(i),c=1<<l;a[l]|=e,i&=~c}}}),[n,t,a]),l.useEffect((function(){return a(t._source,(function(){var e=p.getSnapshot,n=p.setSnapshot;try{n(e(t._source));var a=pl(g);r.mutableReadLanes|=a&r.pendingLanes}catch(o){n((function(){throw o}))}}))}),[t,a]),ca(m,n)&&ca(h,t)&&ca(f,a)||((e={pending:null,dispatch:null,lastRenderedReducer:ci,lastRenderedState:d}).dispatch=u=Li.bind(null,Jo,e),c.queue=e,c.baseQueue=null,d=fi(r,t,n),c.memoizedState=c.baseState=d),d}function mi(e,t,n){return pi(li(),e,t,n)}function hi(e){var t=si();return"function"==typeof e&&(e=e()),t.memoizedState=t.baseState=e,e=(e=t.queue={pending:null,dispatch:null,lastRenderedReducer:ci,lastRenderedState:e}).dispatch=Li.bind(null,Jo,e),[t.memoizedState,e]}function gi(e,t,n,a){return e={tag:e,create:t,destroy:n,deps:a,next:null},null===(t=Jo.updateQueue)?(t={lastEffect:null},Jo.updateQueue=t,t.lastEffect=e.next=e):null===(n=t.lastEffect)?t.lastEffect=e.next=e:(a=n.next,n.next=e,e.next=a,t.lastEffect=e),e}function _i(e){return e={current:e},si().memoizedState=e}function bi(){return li().memoizedState}function ki(e,t,n,a){var r=si();Jo.flags|=e,r.memoizedState=gi(1|t,n,void 0,void 0===a?null:a)}function vi(e,t,n,a){var r=li();a=void 0===a?null:a;var o=void 0;if(null!==ei){var i=ei.memoizedState;if(o=i.destroy,null!==a&&oi(a,i.deps))return void gi(t,n,o,a)}Jo.flags|=e,r.memoizedState=gi(1|t,n,o,a)}function yi(e,t){return ki(516,4,e,t)}function wi(e,t){return vi(516,4,e,t)}function Si(e,t){return vi(4,2,e,t)}function Ei(e,t){return"function"==typeof t?(e=e(),t(e),function(){t(null)}):null!=t?(e=e(),t.current=e,function(){t.current=null}):void 0}function xi(e,t,n){return n=null!=n?n.concat([e]):null,vi(4,2,Ei.bind(null,t,e),n)}function Ci(){}function Ti(e,t){var n=li();t=void 0===t?null:t;var a=n.memoizedState;return null!==a&&null!==t&&oi(t,a[1])?a[0]:(n.memoizedState=[e,t],e)}function Ai(e,t){var n=li();t=void 0===t?null:t;var a=n.memoizedState;return null!==a&&null!==t&&oi(t,a[1])?a[0]:(e=e(),n.memoizedState=[e,t],e)}function Gi(e,t){var n=Kr();Hr(98>n?98:n,(function(){e(!0)})),Hr(97<n?97:n,(function(){var n=Qo.transition;Qo.transition=1;try{e(!1),t()}finally{Qo.transition=n}}))}function Li(e,t,n){var a=fl(),r=pl(e),o={lane:r,action:n,eagerReducer:null,eagerState:null,next:null},i=t.pending;if(null===i?o.next=o:(o.next=i.next,i.next=o),t.pending=o,i=e.alternate,e===Jo||null!==i&&i===Jo)ai=ni=!0;else{if(0===e.lanes&&(null===i||0===i.lanes)&&null!==(i=t.lastRenderedReducer))try{var s=t.lastRenderedState,l=i(s,n);if(o.eagerReducer=i,o.eagerState=l,ca(l,s))return}catch(c){}ml(e,r,a)}}var Ni={readContext:oo,useCallback:ri,useContext:ri,useEffect:ri,useImperativeHandle:ri,useLayoutEffect:ri,useMemo:ri,useReducer:ri,useRef:ri,useState:ri,useDebugValue:ri,useDeferredValue:ri,useTransition:ri,useMutableSource:ri,useOpaqueIdentifier:ri,unstable_isNewReconciler:!1},Ri={readContext:oo,useCallback:function(e,t){return si().memoizedState=[e,void 0===t?null:t],e},useContext:oo,useEffect:yi,useImperativeHandle:function(e,t,n){return n=null!=n?n.concat([e]):null,ki(4,2,Ei.bind(null,t,e),n)},useLayoutEffect:function(e,t){return ki(4,2,e,t)},useMemo:function(e,t){var n=si();return t=void 0===t?null:t,e=e(),n.memoizedState=[e,t],e},useReducer:function(e,t,n){var a=si();return t=void 0!==n?n(t):t,a.memoizedState=a.baseState=t,e=(e=a.queue={pending:null,dispatch:null,lastRenderedReducer:e,lastRenderedState:t}).dispatch=Li.bind(null,Jo,e),[a.memoizedState,e]},useRef:_i,useState:hi,useDebugValue:Ci,useDeferredValue:function(e){var t=hi(e),n=t[0],a=t[1];return yi((function(){var t=Qo.transition;Qo.transition=1;try{a(e)}finally{Qo.transition=t}}),[e]),n},useTransition:function(){var e=hi(!1),t=e[0];return _i(e=Gi.bind(null,e[1])),[e,t]},useMutableSource:function(e,t,n){var a=si();return a.memoizedState={refs:{getSnapshot:t,setSnapshot:null},source:e,subscribe:n},pi(a,e,t,n)},useOpaqueIdentifier:function(){if(jo){var e=!1,t=function(e){return{$$typeof:O,toString:e,valueOf:e}}((function(){throw e||(e=!0,n("r:"+(Va++).toString(36))),Error(i(355))})),n=hi(t)[1];return 0==(2&Jo.mode)&&(Jo.flags|=516,gi(5,(function(){n("r:"+(Va++).toString(36))}),void 0,null)),t}return hi(t="r:"+(Va++).toString(36)),t},unstable_isNewReconciler:!1},Pi={readContext:oo,useCallback:Ti,useContext:oo,useEffect:wi,useImperativeHandle:xi,useLayoutEffect:Si,useMemo:Ai,useReducer:ui,useRef:bi,useState:function(){return ui(ci)},useDebugValue:Ci,useDeferredValue:function(e){var t=ui(ci),n=t[0],a=t[1];return wi((function(){var t=Qo.transition;Qo.transition=1;try{a(e)}finally{Qo.transition=t}}),[e]),n},useTransition:function(){var e=ui(ci)[0];return[bi().current,e]},useMutableSource:mi,useOpaqueIdentifier:function(){return ui(ci)[0]},unstable_isNewReconciler:!1},Ii={readContext:oo,useCallback:Ti,useContext:oo,useEffect:wi,useImperativeHandle:xi,useLayoutEffect:Si,useMemo:Ai,useReducer:di,useRef:bi,useState:function(){return di(ci)},useDebugValue:Ci,useDeferredValue:function(e){var t=di(ci),n=t[0],a=t[1];return wi((function(){var t=Qo.transition;Qo.transition=1;try{a(e)}finally{Qo.transition=t}}),[e]),n},useTransition:function(){var e=di(ci)[0];return[bi().current,e]},useMutableSource:mi,useOpaqueIdentifier:function(){return di(ci)[0]},unstable_isNewReconciler:!1},Oi=y.ReactCurrentOwner,Bi=!1;function Di(e,t,n,a){t.child=null===e?To(t,null,n,a):Co(t,e.child,n,a)}function Mi(e,t,n,a,r){n=n.render;var o=t.ref;return ro(t,r),a=ii(e,t,n,a,o,r),null===e||Bi?(t.flags|=1,Di(e,t,a,r),t.child):(t.updateQueue=e.updateQueue,t.flags&=-517,e.lanes&=~r,os(e,t,r))}function Fi(e,t,n,a,r,o){if(null===e){var i=n.type;return"function"!=typeof i||Hl(i)||void 0!==i.defaultProps||null!==n.compare||void 0!==n.defaultProps?((e=Zl(n.type,null,a,t,t.mode,o)).ref=t.ref,e.return=t,t.child=e):(t.tag=15,t.type=i,Ui(e,t,i,a,r,o))}return i=e.child,0==(r&o)&&(r=i.memoizedProps,(n=null!==(n=n.compare)?n:da)(r,a)&&e.ref===t.ref)?os(e,t,o):(t.flags|=1,(e=ql(i,a)).ref=t.ref,e.return=t,t.child=e)}function Ui(e,t,n,a,r,o){if(null!==e&&da(e.memoizedProps,a)&&e.ref===t.ref){if(Bi=!1,0==(o&r))return t.lanes=e.lanes,os(e,t,o);0!=(16384&e.flags)&&(Bi=!0)}return Ki(e,t,n,a,o)}function ji(e,t,n){var a=t.pendingProps,r=a.children,o=null!==e?e.memoizedState:null;if("hidden"===a.mode||"unstable-defer-without-hiding"===a.mode)if(0==(4&t.mode))t.memoizedState={baseLanes:0},wl(t,n);else{if(0==(1073741824&n))return e=null!==o?o.baseLanes|n:n,t.lanes=t.childLanes=1073741824,t.memoizedState={baseLanes:e},wl(t,e),null;t.memoizedState={baseLanes:0},wl(t,null!==o?o.baseLanes:n)}else null!==o?(a=o.baseLanes|n,t.memoizedState=null):a=n,wl(t,a);return Di(e,t,r,n),t.child}function zi(e,t){var n=t.ref;(null===e&&null!==n||null!==e&&e.ref!==n)&&(t.flags|=128)}function Ki(e,t,n,a,r){var o=gr(n)?mr:fr.current;return o=hr(t,o),ro(t,r),n=ii(e,t,n,a,o,r),null===e||Bi?(t.flags|=1,Di(e,t,n,r),t.child):(t.updateQueue=e.updateQueue,t.flags&=-517,e.lanes&=~r,os(e,t,r))}function $i(e,t,n,a,r){if(gr(n)){var o=!0;vr(t)}else o=!1;if(ro(t,r),null===t.stateNode)null!==e&&(e.alternate=null,t.alternate=null,t.flags|=2),ko(t,n,a),yo(t,n,a,r),a=!0;else if(null===e){var i=t.stateNode,s=t.memoizedProps;i.props=s;var l=i.context,c=n.contextType;"object"==typeof c&&null!==c?c=oo(c):c=hr(t,c=gr(n)?mr:fr.current);var u=n.getDerivedStateFromProps,d="function"==typeof u||"function"==typeof i.getSnapshotBeforeUpdate;d||"function"!=typeof i.UNSAFE_componentWillReceiveProps&&"function"!=typeof i.componentWillReceiveProps||(s!==a||l!==c)&&vo(t,i,a,c),io=!1;var f=t.memoizedState;i.state=f,po(t,a,i,r),l=t.memoizedState,s!==a||f!==l||pr.current||io?("function"==typeof u&&(go(t,n,u,a),l=t.memoizedState),(s=io||bo(t,n,s,a,f,l,c))?(d||"function"!=typeof i.UNSAFE_componentWillMount&&"function"!=typeof i.componentWillMount||("function"==typeof i.componentWillMount&&i.componentWillMount(),"function"==typeof i.UNSAFE_componentWillMount&&i.UNSAFE_componentWillMount()),"function"==typeof i.componentDidMount&&(t.flags|=4)):("function"==typeof i.componentDidMount&&(t.flags|=4),t.memoizedProps=a,t.memoizedState=l),i.props=a,i.state=l,i.context=c,a=s):("function"==typeof i.componentDidMount&&(t.flags|=4),a=!1)}else{i=t.stateNode,lo(e,t),s=t.memoizedProps,c=t.type===t.elementType?s:Yr(t.type,s),i.props=c,d=t.pendingProps,f=i.context,"object"==typeof(l=n.contextType)&&null!==l?l=oo(l):l=hr(t,l=gr(n)?mr:fr.current);var p=n.getDerivedStateFromProps;(u="function"==typeof p||"function"==typeof i.getSnapshotBeforeUpdate)||"function"!=typeof i.UNSAFE_componentWillReceiveProps&&"function"!=typeof i.componentWillReceiveProps||(s!==d||f!==l)&&vo(t,i,a,l),io=!1,f=t.memoizedState,i.state=f,po(t,a,i,r);var m=t.memoizedState;s!==d||f!==m||pr.current||io?("function"==typeof p&&(go(t,n,p,a),m=t.memoizedState),(c=io||bo(t,n,c,a,f,m,l))?(u||"function"!=typeof i.UNSAFE_componentWillUpdate&&"function"!=typeof i.componentWillUpdate||("function"==typeof i.componentWillUpdate&&i.componentWillUpdate(a,m,l),"function"==typeof i.UNSAFE_componentWillUpdate&&i.UNSAFE_componentWillUpdate(a,m,l)),"function"==typeof i.componentDidUpdate&&(t.flags|=4),"function"==typeof i.getSnapshotBeforeUpdate&&(t.flags|=256)):("function"!=typeof i.componentDidUpdate||s===e.memoizedProps&&f===e.memoizedState||(t.flags|=4),"function"!=typeof i.getSnapshotBeforeUpdate||s===e.memoizedProps&&f===e.memoizedState||(t.flags|=256),t.memoizedProps=a,t.memoizedState=m),i.props=a,i.state=m,i.context=l,a=c):("function"!=typeof i.componentDidUpdate||s===e.memoizedProps&&f===e.memoizedState||(t.flags|=4),"function"!=typeof i.getSnapshotBeforeUpdate||s===e.memoizedProps&&f===e.memoizedState||(t.flags|=256),a=!1)}return Hi(e,t,n,a,o,r)}function Hi(e,t,n,a,r,o){zi(e,t);var i=0!=(64&t.flags);if(!a&&!i)return r&&yr(t,n,!1),os(e,t,o);a=t.stateNode,Oi.current=t;var s=i&&"function"!=typeof n.getDerivedStateFromError?null:a.render();return t.flags|=1,null!==e&&i?(t.child=Co(t,e.child,null,o),t.child=Co(t,null,s,o)):Di(e,t,s,o),t.memoizedState=a.state,r&&yr(t,n,!0),t.child}function qi(e){var t=e.stateNode;t.pendingContext?br(0,t.pendingContext,t.pendingContext!==t.context):t.context&&br(0,t.context,!1),Po(e,t.containerInfo)}var Zi,Wi,Vi,Yi,Qi={dehydrated:null,retryLane:0};function Xi(e,t,n){var a,r=t.pendingProps,o=Do.current,i=!1;return(a=0!=(64&t.flags))||(a=(null===e||null!==e.memoizedState)&&0!=(2&o)),a?(i=!0,t.flags&=-65):null!==e&&null===e.memoizedState||void 0===r.fallback||!0===r.unstable_avoidThisFallback||(o|=1),ur(Do,1&o),null===e?(void 0!==r.fallback&&$o(t),e=r.children,o=r.fallback,i?(e=Ji(t,e,o,n),t.child.memoizedState={baseLanes:n},t.memoizedState=Qi,e):"number"==typeof r.unstable_expectedLoadTime?(e=Ji(t,e,o,n),t.child.memoizedState={baseLanes:n},t.memoizedState=Qi,t.lanes=33554432,e):((n=Vl({mode:"visible",children:e},t.mode,n,null)).return=t,t.child=n)):(e.memoizedState,i?(r=ts(e,t,r.children,r.fallback,n),i=t.child,o=e.child.memoizedState,i.memoizedState=null===o?{baseLanes:n}:{baseLanes:o.baseLanes|n},i.childLanes=e.childLanes&~n,t.memoizedState=Qi,r):(n=es(e,t,r.children,n),t.memoizedState=null,n))}function Ji(e,t,n,a){var r=e.mode,o=e.child;return t={mode:"hidden",children:t},0==(2&r)&&null!==o?(o.childLanes=0,o.pendingProps=t):o=Vl(t,r,0,null),n=Wl(n,r,a,null),o.return=e,n.return=e,o.sibling=n,e.child=o,n}function es(e,t,n,a){var r=e.child;return e=r.sibling,n=ql(r,{mode:"visible",children:n}),0==(2&t.mode)&&(n.lanes=a),n.return=t,n.sibling=null,null!==e&&(e.nextEffect=null,e.flags=8,t.firstEffect=t.lastEffect=e),t.child=n}function ts(e,t,n,a,r){var o=t.mode,i=e.child;e=i.sibling;var s={mode:"hidden",children:n};return 0==(2&o)&&t.child!==i?((n=t.child).childLanes=0,n.pendingProps=s,null!==(i=n.lastEffect)?(t.firstEffect=n.firstEffect,t.lastEffect=i,i.nextEffect=null):t.firstEffect=t.lastEffect=null):n=ql(i,s),null!==e?a=ql(e,a):(a=Wl(a,o,r,null)).flags|=2,a.return=t,n.return=t,n.sibling=a,t.child=n,a}function ns(e,t){e.lanes|=t;var n=e.alternate;null!==n&&(n.lanes|=t),ao(e.return,t)}function as(e,t,n,a,r,o){var i=e.memoizedState;null===i?e.memoizedState={isBackwards:t,rendering:null,renderingStartTime:0,last:a,tail:n,tailMode:r,lastEffect:o}:(i.isBackwards=t,i.rendering=null,i.renderingStartTime=0,i.last=a,i.tail=n,i.tailMode=r,i.lastEffect=o)}function rs(e,t,n){var a=t.pendingProps,r=a.revealOrder,o=a.tail;if(Di(e,t,a.children,n),0!=(2&(a=Do.current)))a=1&a|2,t.flags|=64;else{if(null!==e&&0!=(64&e.flags))e:for(e=t.child;null!==e;){if(13===e.tag)null!==e.memoizedState&&ns(e,n);else if(19===e.tag)ns(e,n);else if(null!==e.child){e.child.return=e,e=e.child;continue}if(e===t)break e;for(;null===e.sibling;){if(null===e.return||e.return===t)break e;e=e.return}e.sibling.return=e.return,e=e.sibling}a&=1}if(ur(Do,a),0==(2&t.mode))t.memoizedState=null;else switch(r){case"forwards":for(n=t.child,r=null;null!==n;)null!==(e=n.alternate)&&null===Mo(e)&&(r=n),n=n.sibling;null===(n=r)?(r=t.child,t.child=null):(r=n.sibling,n.sibling=null),as(t,!1,r,n,o,t.lastEffect);break;case"backwards":for(n=null,r=t.child,t.child=null;null!==r;){if(null!==(e=r.alternate)&&null===Mo(e)){t.child=r;break}e=r.sibling,r.sibling=n,n=r,r=e}as(t,!0,n,null,o,t.lastEffect);break;case"together":as(t,!1,null,null,void 0,t.lastEffect);break;default:t.memoizedState=null}return t.child}function os(e,t,n){if(null!==e&&(t.dependencies=e.dependencies),js|=t.lanes,0!=(n&t.childLanes)){if(null!==e&&t.child!==e.child)throw Error(i(153));if(null!==t.child){for(n=ql(e=t.child,e.pendingProps),t.child=n,n.return=t;null!==e.sibling;)e=e.sibling,(n=n.sibling=ql(e,e.pendingProps)).return=t;n.sibling=null}return t.child}return null}function is(e,t){if(!jo)switch(e.tailMode){case"hidden":t=e.tail;for(var n=null;null!==t;)null!==t.alternate&&(n=t),t=t.sibling;null===n?e.tail=null:n.sibling=null;break;case"collapsed":n=e.tail;for(var a=null;null!==n;)null!==n.alternate&&(a=n),n=n.sibling;null===a?t||null===e.tail?e.tail=null:e.tail.sibling=null:a.sibling=null}}function ss(e,t,n){var a=t.pendingProps;switch(t.tag){case 2:case 16:case 15:case 0:case 11:case 7:case 8:case 12:case 9:case 14:return null;case 1:case 17:return gr(t.type)&&_r(),null;case 3:return Io(),cr(pr),cr(fr),Vo(),(a=t.stateNode).pendingContext&&(a.context=a.pendingContext,a.pendingContext=null),null!==e&&null!==e.child||(qo(t)?t.flags|=4:a.hydrate||(t.flags|=256)),Wi(t),null;case 5:Bo(t);var o=Ro(No.current);if(n=t.type,null!==e&&null!=t.stateNode)Vi(e,t,n,a,o),e.ref!==t.ref&&(t.flags|=128);else{if(!a){if(null===t.stateNode)throw Error(i(166));return null}if(e=Ro(Go.current),qo(t)){a=t.stateNode,n=t.type;var s=t.memoizedProps;switch(a[Qa]=t,a[Xa]=s,n){case"dialog":Ga("cancel",a),Ga("close",a);break;case"iframe":case"object":case"embed":Ga("load",a);break;case"video":case"audio":for(e=0;e<xa.length;e++)Ga(xa[e],a);break;case"source":Ga("error",a);break;case"img":case"image":case"link":Ga("error",a),Ga("load",a);break;case"details":Ga("toggle",a);break;case"input":ee(a,s),Ga("invalid",a);break;case"select":a._wrapperState={wasMultiple:!!s.multiple},Ga("invalid",a);break;case"textarea":le(a,s),Ga("invalid",a)}for(var c in Se(n,s),e=null,s)s.hasOwnProperty(c)&&(o=s[c],"children"===c?"string"==typeof o?a.textContent!==o&&(e=["children",o]):"number"==typeof o&&a.textContent!==""+o&&(e=["children",""+o]):l.hasOwnProperty(c)&&null!=o&&"onScroll"===c&&Ga("scroll",a));switch(n){case"input":Y(a),ae(a,s,!0);break;case"textarea":Y(a),ue(a);break;case"select":case"option":break;default:"function"==typeof s.onClick&&(a.onclick=Fa)}a=e,t.updateQueue=a,null!==a&&(t.flags|=4)}else{switch(c=9===o.nodeType?o:o.ownerDocument,e===de.html&&(e=fe(n)),e===de.html?"script"===n?((e=c.createElement("div")).innerHTML="<script><\/script>",e=e.removeChild(e.firstChild)):"string"==typeof a.is?e=c.createElement(n,{is:a.is}):(e=c.createElement(n),"select"===n&&(c=e,a.multiple?c.multiple=!0:a.size&&(c.size=a.size))):e=c.createElementNS(e,n),e[Qa]=t,e[Xa]=a,Zi(e,t,!1,!1),t.stateNode=e,c=Ee(n,a),n){case"dialog":Ga("cancel",e),Ga("close",e),o=a;break;case"iframe":case"object":case"embed":Ga("load",e),o=a;break;case"video":case"audio":for(o=0;o<xa.length;o++)Ga(xa[o],e);o=a;break;case"source":Ga("error",e),o=a;break;case"img":case"image":case"link":Ga("error",e),Ga("load",e),o=a;break;case"details":Ga("toggle",e),o=a;break;case"input":ee(e,a),o=J(e,a),Ga("invalid",e);break;case"option":o=oe(e,a);break;case"select":e._wrapperState={wasMultiple:!!a.multiple},o=r({},a,{value:void 0}),Ga("invalid",e);break;case"textarea":le(e,a),o=se(e,a),Ga("invalid",e);break;default:o=a}Se(n,o);var u=o;for(s in u)if(u.hasOwnProperty(s)){var d=u[s];"style"===s?ye(e,d):"dangerouslySetInnerHTML"===s?null!=(d=d?d.__html:void 0)&&ge(e,d):"children"===s?"string"==typeof d?("textarea"!==n||""!==d)&&_e(e,d):"number"==typeof d&&_e(e,""+d):"suppressContentEditableWarning"!==s&&"suppressHydrationWarning"!==s&&"autoFocus"!==s&&(l.hasOwnProperty(s)?null!=d&&"onScroll"===s&&Ga("scroll",e):null!=d&&v(e,s,d,c))}switch(n){case"input":Y(e),ae(e,a,!1);break;case"textarea":Y(e),ue(e);break;case"option":null!=a.value&&e.setAttribute("value",""+W(a.value));break;case"select":e.multiple=!!a.multiple,null!=(s=a.value)?ie(e,!!a.multiple,s,!1):null!=a.defaultValue&&ie(e,!!a.multiple,a.defaultValue,!0);break;default:"function"==typeof o.onClick&&(e.onclick=Fa)}za(n,a)&&(t.flags|=4)}null!==t.ref&&(t.flags|=128)}return null;case 6:if(e&&null!=t.stateNode)Yi(e,t,e.memoizedProps,a);else{if("string"!=typeof a&&null===t.stateNode)throw Error(i(166));n=Ro(No.current),Ro(Go.current),qo(t)?(a=t.stateNode,n=t.memoizedProps,a[Qa]=t,a.nodeValue!==n&&(t.flags|=4)):((a=(9===n.nodeType?n:n.ownerDocument).createTextNode(a))[Qa]=t,t.stateNode=a)}return null;case 13:return cr(Do),a=t.memoizedState,0!=(64&t.flags)?(t.lanes=n,t):(a=null!==a,n=!1,null===e?void 0!==t.memoizedProps.fallback&&qo(t):n=null!==e.memoizedState,a&&!n&&0!=(2&t.mode)&&(null===e&&!0!==t.memoizedProps.unstable_avoidThisFallback||0!=(1&Do.current)?0===Ms&&(Ms=3):(0!==Ms&&3!==Ms||(Ms=4),null===Ps||0==(134217727&js)&&0==(134217727&zs)||bl(Ps,Os))),(a||n)&&(t.flags|=4),null);case 4:return Io(),Wi(t),null===e&&Na(t.stateNode.containerInfo),null;case 10:return no(t),null;case 19:if(cr(Do),null===(a=t.memoizedState))return null;if(s=0!=(64&t.flags),null===(c=a.rendering))if(s)is(a,!1);else{if(0!==Ms||null!==e&&0!=(64&e.flags))for(e=t.child;null!==e;){if(null!==(c=Mo(e))){for(t.flags|=64,is(a,!1),null!==(s=c.updateQueue)&&(t.updateQueue=s,t.flags|=4),null===a.lastEffect&&(t.firstEffect=null),t.lastEffect=a.lastEffect,a=n,n=t.child;null!==n;)e=a,(s=n).flags&=2,s.nextEffect=null,s.firstEffect=null,s.lastEffect=null,null===(c=s.alternate)?(s.childLanes=0,s.lanes=e,s.child=null,s.memoizedProps=null,s.memoizedState=null,s.updateQueue=null,s.dependencies=null,s.stateNode=null):(s.childLanes=c.childLanes,s.lanes=c.lanes,s.child=c.child,s.memoizedProps=c.memoizedProps,s.memoizedState=c.memoizedState,s.updateQueue=c.updateQueue,s.type=c.type,e=c.dependencies,s.dependencies=null===e?null:{lanes:e.lanes,firstContext:e.firstContext}),n=n.sibling;return ur(Do,1&Do.current|2),t.child}e=e.sibling}null!==a.tail&&zr()>qs&&(t.flags|=64,s=!0,is(a,!1),t.lanes=33554432)}else{if(!s)if(null!==(e=Mo(c))){if(t.flags|=64,s=!0,null!==(n=e.updateQueue)&&(t.updateQueue=n,t.flags|=4),is(a,!0),null===a.tail&&"hidden"===a.tailMode&&!c.alternate&&!jo)return null!==(t=t.lastEffect=a.lastEffect)&&(t.nextEffect=null),null}else 2*zr()-a.renderingStartTime>qs&&1073741824!==n&&(t.flags|=64,s=!0,is(a,!1),t.lanes=33554432);a.isBackwards?(c.sibling=t.child,t.child=c):(null!==(n=a.last)?n.sibling=c:t.child=c,a.last=c)}return null!==a.tail?(n=a.tail,a.rendering=n,a.tail=n.sibling,a.lastEffect=t.lastEffect,a.renderingStartTime=zr(),n.sibling=null,t=Do.current,ur(Do,s?1&t|2:1&t),n):null;case 23:case 24:return Sl(),null!==e&&null!==e.memoizedState!=(null!==t.memoizedState)&&"unstable-defer-without-hiding"!==a.mode&&(t.flags|=4),null}throw Error(i(156,t.tag))}function ls(e){switch(e.tag){case 1:gr(e.type)&&_r();var t=e.flags;return 4096&t?(e.flags=-4097&t|64,e):null;case 3:if(Io(),cr(pr),cr(fr),Vo(),0!=(64&(t=e.flags)))throw Error(i(285));return e.flags=-4097&t|64,e;case 5:return Bo(e),null;case 13:return cr(Do),4096&(t=e.flags)?(e.flags=-4097&t|64,e):null;case 19:return cr(Do),null;case 4:return Io(),null;case 10:return no(e),null;case 23:case 24:return Sl(),null;default:return null}}function cs(e,t){try{var n="",a=t;do{n+=q(a),a=a.return}while(a);var r=n}catch(o){r="\nError generating stack: "+o.message+"\n"+o.stack}return{value:e,source:t,stack:r}}function us(e,t){try{console.error(t.value)}catch(n){setTimeout((function(){throw n}))}}Zi=function(e,t){for(var n=t.child;null!==n;){if(5===n.tag||6===n.tag)e.appendChild(n.stateNode);else if(4!==n.tag&&null!==n.child){n.child.return=n,n=n.child;continue}if(n===t)break;for(;null===n.sibling;){if(null===n.return||n.return===t)return;n=n.return}n.sibling.return=n.return,n=n.sibling}},Wi=function(){},Vi=function(e,t,n,a){var o=e.memoizedProps;if(o!==a){e=t.stateNode,Ro(Go.current);var i,s=null;switch(n){case"input":o=J(e,o),a=J(e,a),s=[];break;case"option":o=oe(e,o),a=oe(e,a),s=[];break;case"select":o=r({},o,{value:void 0}),a=r({},a,{value:void 0}),s=[];break;case"textarea":o=se(e,o),a=se(e,a),s=[];break;default:"function"!=typeof o.onClick&&"function"==typeof a.onClick&&(e.onclick=Fa)}for(d in Se(n,a),n=null,o)if(!a.hasOwnProperty(d)&&o.hasOwnProperty(d)&&null!=o[d])if("style"===d){var c=o[d];for(i in c)c.hasOwnProperty(i)&&(n||(n={}),n[i]="")}else"dangerouslySetInnerHTML"!==d&&"children"!==d&&"suppressContentEditableWarning"!==d&&"suppressHydrationWarning"!==d&&"autoFocus"!==d&&(l.hasOwnProperty(d)?s||(s=[]):(s=s||[]).push(d,null));for(d in a){var u=a[d];if(c=null!=o?o[d]:void 0,a.hasOwnProperty(d)&&u!==c&&(null!=u||null!=c))if("style"===d)if(c){for(i in c)!c.hasOwnProperty(i)||u&&u.hasOwnProperty(i)||(n||(n={}),n[i]="");for(i in u)u.hasOwnProperty(i)&&c[i]!==u[i]&&(n||(n={}),n[i]=u[i])}else n||(s||(s=[]),s.push(d,n)),n=u;else"dangerouslySetInnerHTML"===d?(u=u?u.__html:void 0,c=c?c.__html:void 0,null!=u&&c!==u&&(s=s||[]).push(d,u)):"children"===d?"string"!=typeof u&&"number"!=typeof u||(s=s||[]).push(d,""+u):"suppressContentEditableWarning"!==d&&"suppressHydrationWarning"!==d&&(l.hasOwnProperty(d)?(null!=u&&"onScroll"===d&&Ga("scroll",e),s||c===u||(s=[])):"object"==typeof u&&null!==u&&u.$$typeof===O?u.toString():(s=s||[]).push(d,u))}n&&(s=s||[]).push("style",n);var d=s;(t.updateQueue=d)&&(t.flags|=4)}},Yi=function(e,t,n,a){n!==a&&(t.flags|=4)};var ds="function"==typeof WeakMap?WeakMap:Map;function fs(e,t,n){(n=co(-1,n)).tag=3,n.payload={element:null};var a=t.value;return n.callback=function(){Ys||(Ys=!0,Qs=a),us(0,t)},n}function ps(e,t,n){(n=co(-1,n)).tag=3;var a=e.type.getDerivedStateFromError;if("function"==typeof a){var r=t.value;n.payload=function(){return us(0,t),a(r)}}var o=e.stateNode;return null!==o&&"function"==typeof o.componentDidCatch&&(n.callback=function(){"function"!=typeof a&&(null===Xs?Xs=new Set([this]):Xs.add(this),us(0,t));var e=t.stack;this.componentDidCatch(t.value,{componentStack:null!==e?e:""})}),n}var ms="function"==typeof WeakSet?WeakSet:Set;function hs(e){var t=e.ref;if(null!==t)if("function"==typeof t)try{t(null)}catch(n){Ul(e,n)}else t.current=null}function gs(e,t){switch(t.tag){case 0:case 11:case 15:case 22:case 5:case 6:case 4:case 17:return;case 1:if(256&t.flags&&null!==e){var n=e.memoizedProps,a=e.memoizedState;t=(e=t.stateNode).getSnapshotBeforeUpdate(t.elementType===t.type?n:Yr(t.type,n),a),e.__reactInternalSnapshotBeforeUpdate=t}return;case 3:return void(256&t.flags&&qa(t.stateNode.containerInfo))}throw Error(i(163))}function _s(e,t,n){switch(n.tag){case 0:case 11:case 15:case 22:if(null!==(t=null!==(t=n.updateQueue)?t.lastEffect:null)){e=t=t.next;do{if(3==(3&e.tag)){var a=e.create;e.destroy=a()}e=e.next}while(e!==t)}if(null!==(t=null!==(t=n.updateQueue)?t.lastEffect:null)){e=t=t.next;do{var r=e;a=r.next,0!=(4&(r=r.tag))&&0!=(1&r)&&(Dl(n,e),Bl(n,e)),e=a}while(e!==t)}return;case 1:return e=n.stateNode,4&n.flags&&(null===t?e.componentDidMount():(a=n.elementType===n.type?t.memoizedProps:Yr(n.type,t.memoizedProps),e.componentDidUpdate(a,t.memoizedState,e.__reactInternalSnapshotBeforeUpdate))),void(null!==(t=n.updateQueue)&&mo(n,t,e));case 3:if(null!==(t=n.updateQueue)){if(e=null,null!==n.child)switch(n.child.tag){case 5:case 1:e=n.child.stateNode}mo(n,t,e)}return;case 5:return e=n.stateNode,void(null===t&&4&n.flags&&za(n.type,n.memoizedProps)&&e.focus());case 6:case 4:case 12:case 19:case 17:case 20:case 21:case 23:case 24:return;case 13:return void(null===n.memoizedState&&(n=n.alternate,null!==n&&(n=n.memoizedState,null!==n&&(n=n.dehydrated,null!==n&&yt(n)))))}throw Error(i(163))}function bs(e,t){for(var n=e;;){if(5===n.tag){var a=n.stateNode;if(t)"function"==typeof(a=a.style).setProperty?a.setProperty("display","none","important"):a.display="none";else{a=n.stateNode;var r=n.memoizedProps.style;r=null!=r&&r.hasOwnProperty("display")?r.display:null,a.style.display=ve("display",r)}}else if(6===n.tag)n.stateNode.nodeValue=t?"":n.memoizedProps;else if((23!==n.tag&&24!==n.tag||null===n.memoizedState||n===e)&&null!==n.child){n.child.return=n,n=n.child;continue}if(n===e)break;for(;null===n.sibling;){if(null===n.return||n.return===e)return;n=n.return}n.sibling.return=n.return,n=n.sibling}}function ks(e,t){if(Sr&&"function"==typeof Sr.onCommitFiberUnmount)try{Sr.onCommitFiberUnmount(wr,t)}catch(o){}switch(t.tag){case 0:case 11:case 14:case 15:case 22:if(null!==(e=t.updateQueue)&&null!==(e=e.lastEffect)){var n=e=e.next;do{var a=n,r=a.destroy;if(a=a.tag,void 0!==r)if(0!=(4&a))Dl(t,n);else{a=t;try{r()}catch(o){Ul(a,o)}}n=n.next}while(n!==e)}break;case 1:if(hs(t),"function"==typeof(e=t.stateNode).componentWillUnmount)try{e.props=t.memoizedProps,e.state=t.memoizedState,e.componentWillUnmount()}catch(o){Ul(t,o)}break;case 5:hs(t);break;case 4:xs(e,t)}}function vs(e){e.alternate=null,e.child=null,e.dependencies=null,e.firstEffect=null,e.lastEffect=null,e.memoizedProps=null,e.memoizedState=null,e.pendingProps=null,e.return=null,e.updateQueue=null}function ys(e){return 5===e.tag||3===e.tag||4===e.tag}function ws(e){e:{for(var t=e.return;null!==t;){if(ys(t))break e;t=t.return}throw Error(i(160))}var n=t;switch(t=n.stateNode,n.tag){case 5:var a=!1;break;case 3:case 4:t=t.containerInfo,a=!0;break;default:throw Error(i(161))}16&n.flags&&(_e(t,""),n.flags&=-17);e:t:for(n=e;;){for(;null===n.sibling;){if(null===n.return||ys(n.return)){n=null;break e}n=n.return}for(n.sibling.return=n.return,n=n.sibling;5!==n.tag&&6!==n.tag&&18!==n.tag;){if(2&n.flags)continue t;if(null===n.child||4===n.tag)continue t;n.child.return=n,n=n.child}if(!(2&n.flags)){n=n.stateNode;break e}}a?Ss(e,n,t):Es(e,n,t)}function Ss(e,t,n){var a=e.tag,r=5===a||6===a;if(r)e=r?e.stateNode:e.stateNode.instance,t?8===n.nodeType?n.parentNode.insertBefore(e,t):n.insertBefore(e,t):(8===n.nodeType?(t=n.parentNode).insertBefore(e,n):(t=n).appendChild(e),null!=(n=n._reactRootContainer)||null!==t.onclick||(t.onclick=Fa));else if(4!==a&&null!==(e=e.child))for(Ss(e,t,n),e=e.sibling;null!==e;)Ss(e,t,n),e=e.sibling}function Es(e,t,n){var a=e.tag,r=5===a||6===a;if(r)e=r?e.stateNode:e.stateNode.instance,t?n.insertBefore(e,t):n.appendChild(e);else if(4!==a&&null!==(e=e.child))for(Es(e,t,n),e=e.sibling;null!==e;)Es(e,t,n),e=e.sibling}function xs(e,t){for(var n,a,r=t,o=!1;;){if(!o){o=r.return;e:for(;;){if(null===o)throw Error(i(160));switch(n=o.stateNode,o.tag){case 5:a=!1;break e;case 3:case 4:n=n.containerInfo,a=!0;break e}o=o.return}o=!0}if(5===r.tag||6===r.tag){e:for(var s=e,l=r,c=l;;)if(ks(s,c),null!==c.child&&4!==c.tag)c.child.return=c,c=c.child;else{if(c===l)break e;for(;null===c.sibling;){if(null===c.return||c.return===l)break e;c=c.return}c.sibling.return=c.return,c=c.sibling}a?(s=n,l=r.stateNode,8===s.nodeType?s.parentNode.removeChild(l):s.removeChild(l)):n.removeChild(r.stateNode)}else if(4===r.tag){if(null!==r.child){n=r.stateNode.containerInfo,a=!0,r.child.return=r,r=r.child;continue}}else if(ks(e,r),null!==r.child){r.child.return=r,r=r.child;continue}if(r===t)break;for(;null===r.sibling;){if(null===r.return||r.return===t)return;4===(r=r.return).tag&&(o=!1)}r.sibling.return=r.return,r=r.sibling}}function Cs(e,t){switch(t.tag){case 0:case 11:case 14:case 15:case 22:var n=t.updateQueue;if(null!==(n=null!==n?n.lastEffect:null)){var a=n=n.next;do{3==(3&a.tag)&&(e=a.destroy,a.destroy=void 0,void 0!==e&&e()),a=a.next}while(a!==n)}return;case 1:case 12:case 17:return;case 5:if(null!=(n=t.stateNode)){a=t.memoizedProps;var r=null!==e?e.memoizedProps:a;e=t.type;var o=t.updateQueue;if(t.updateQueue=null,null!==o){for(n[Xa]=a,"input"===e&&"radio"===a.type&&null!=a.name&&te(n,a),Ee(e,r),t=Ee(e,a),r=0;r<o.length;r+=2){var s=o[r],l=o[r+1];"style"===s?ye(n,l):"dangerouslySetInnerHTML"===s?ge(n,l):"children"===s?_e(n,l):v(n,s,l,t)}switch(e){case"input":ne(n,a);break;case"textarea":ce(n,a);break;case"select":e=n._wrapperState.wasMultiple,n._wrapperState.wasMultiple=!!a.multiple,null!=(o=a.value)?ie(n,!!a.multiple,o,!1):e!==!!a.multiple&&(null!=a.defaultValue?ie(n,!!a.multiple,a.defaultValue,!0):ie(n,!!a.multiple,a.multiple?[]:"",!1))}}}return;case 6:if(null===t.stateNode)throw Error(i(162));return void(t.stateNode.nodeValue=t.memoizedProps);case 3:return void((n=t.stateNode).hydrate&&(n.hydrate=!1,yt(n.containerInfo)));case 13:return null!==t.memoizedState&&(Hs=zr(),bs(t.child,!0)),void Ts(t);case 19:return void Ts(t);case 23:case 24:return void bs(t,null!==t.memoizedState)}throw Error(i(163))}function Ts(e){var t=e.updateQueue;if(null!==t){e.updateQueue=null;var n=e.stateNode;null===n&&(n=e.stateNode=new ms),t.forEach((function(t){var a=zl.bind(null,e,t);n.has(t)||(n.add(t),t.then(a,a))}))}}function As(e,t){return null!==e&&(null===(e=e.memoizedState)||null!==e.dehydrated)&&(null!==(t=t.memoizedState)&&null===t.dehydrated)}var Gs=Math.ceil,Ls=y.ReactCurrentDispatcher,Ns=y.ReactCurrentOwner,Rs=0,Ps=null,Is=null,Os=0,Bs=0,Ds=lr(0),Ms=0,Fs=null,Us=0,js=0,zs=0,Ks=0,$s=null,Hs=0,qs=1/0;function Zs(){qs=zr()+500}var Ws,Vs=null,Ys=!1,Qs=null,Xs=null,Js=!1,el=null,tl=90,nl=[],al=[],rl=null,ol=0,il=null,sl=-1,ll=0,cl=0,ul=null,dl=!1;function fl(){return 0!=(48&Rs)?zr():-1!==sl?sl:sl=zr()}function pl(e){if(0==(2&(e=e.mode)))return 1;if(0==(4&e))return 99===Kr()?1:2;if(0===ll&&(ll=Us),0!==Vr.transition){0!==cl&&(cl=null!==$s?$s.pendingLanes:0),e=ll;var t=4186112&~cl;return 0===(t&=-t)&&(0===(t=(e=4186112&~e)&-e)&&(t=8192)),t}return e=Kr(),0!=(4&Rs)&&98===e?e=Ft(12,ll):e=Ft(e=function(e){switch(e){case 99:return 15;case 98:return 10;case 97:case 96:return 8;case 95:return 2;default:return 0}}(e),ll),e}function ml(e,t,n){if(50<ol)throw ol=0,il=null,Error(i(185));if(null===(e=hl(e,t)))return null;zt(e,t,n),e===Ps&&(zs|=t,4===Ms&&bl(e,Os));var a=Kr();1===t?0!=(8&Rs)&&0==(48&Rs)?kl(e):(gl(e,n),0===Rs&&(Zs(),Zr())):(0==(4&Rs)||98!==a&&99!==a||(null===rl?rl=new Set([e]):rl.add(e)),gl(e,n)),$s=e}function hl(e,t){e.lanes|=t;var n=e.alternate;for(null!==n&&(n.lanes|=t),n=e,e=e.return;null!==e;)e.childLanes|=t,null!==(n=e.alternate)&&(n.childLanes|=t),n=e,e=e.return;return 3===n.tag?n.stateNode:null}function gl(e,t){for(var n=e.callbackNode,a=e.suspendedLanes,r=e.pingedLanes,o=e.expirationTimes,s=e.pendingLanes;0<s;){var l=31-Kt(s),c=1<<l,u=o[l];if(-1===u){if(0==(c&a)||0!=(c&r)){u=t,Bt(c);var d=Ot;o[l]=10<=d?u+250:6<=d?u+5e3:-1}}else u<=t&&(e.expiredLanes|=c);s&=~c}if(a=Dt(e,e===Ps?Os:0),t=Ot,0===a)null!==n&&(n!==Br&&Cr(n),e.callbackNode=null,e.callbackPriority=0);else{if(null!==n){if(e.callbackPriority===t)return;n!==Br&&Cr(n)}15===t?(n=kl.bind(null,e),null===Mr?(Mr=[n],Fr=xr(Nr,Wr)):Mr.push(n),n=Br):14===t?n=qr(99,kl.bind(null,e)):(n=function(e){switch(e){case 15:case 14:return 99;case 13:case 12:case 11:case 10:return 98;case 9:case 8:case 7:case 6:case 4:case 5:return 97;case 3:case 2:case 1:return 95;case 0:return 90;default:throw Error(i(358,e))}}(t),n=qr(n,_l.bind(null,e))),e.callbackPriority=t,e.callbackNode=n}}function _l(e){if(sl=-1,cl=ll=0,0!=(48&Rs))throw Error(i(327));var t=e.callbackNode;if(Ol()&&e.callbackNode!==t)return null;var n=Dt(e,e===Ps?Os:0);if(0===n)return null;var a=n,r=Rs;Rs|=16;var o=Cl();for(Ps===e&&Os===a||(Zs(),El(e,a));;)try{Gl();break}catch(l){xl(e,l)}if(to(),Ls.current=o,Rs=r,null!==Is?a=0:(Ps=null,Os=0,a=Ms),0!=(Us&zs))El(e,0);else if(0!==a){if(2===a&&(Rs|=64,e.hydrate&&(e.hydrate=!1,qa(e.containerInfo)),0!==(n=Mt(e))&&(a=Tl(e,n))),1===a)throw t=Fs,El(e,0),bl(e,n),gl(e,zr()),t;switch(e.finishedWork=e.current.alternate,e.finishedLanes=n,a){case 0:case 1:throw Error(i(345));case 2:case 5:Rl(e);break;case 3:if(bl(e,n),(62914560&n)===n&&10<(a=Hs+500-zr())){if(0!==Dt(e,0))break;if(((r=e.suspendedLanes)&n)!==n){fl(),e.pingedLanes|=e.suspendedLanes&r;break}e.timeoutHandle=$a(Rl.bind(null,e),a);break}Rl(e);break;case 4:if(bl(e,n),(4186112&n)===n)break;for(a=e.eventTimes,r=-1;0<n;){var s=31-Kt(n);o=1<<s,(s=a[s])>r&&(r=s),n&=~o}if(n=r,10<(n=(120>(n=zr()-n)?120:480>n?480:1080>n?1080:1920>n?1920:3e3>n?3e3:4320>n?4320:1960*Gs(n/1960))-n)){e.timeoutHandle=$a(Rl.bind(null,e),n);break}Rl(e);break;default:throw Error(i(329))}}return gl(e,zr()),e.callbackNode===t?_l.bind(null,e):null}function bl(e,t){for(t&=~Ks,t&=~zs,e.suspendedLanes|=t,e.pingedLanes&=~t,e=e.expirationTimes;0<t;){var n=31-Kt(t),a=1<<n;e[n]=-1,t&=~a}}function kl(e){if(0!=(48&Rs))throw Error(i(327));if(Ol(),e===Ps&&0!=(e.expiredLanes&Os)){var t=Os,n=Tl(e,t);0!=(Us&zs)&&(n=Tl(e,t=Dt(e,t)))}else n=Tl(e,t=Dt(e,0));if(0!==e.tag&&2===n&&(Rs|=64,e.hydrate&&(e.hydrate=!1,qa(e.containerInfo)),0!==(t=Mt(e))&&(n=Tl(e,t))),1===n)throw n=Fs,El(e,0),bl(e,t),gl(e,zr()),n;return e.finishedWork=e.current.alternate,e.finishedLanes=t,Rl(e),gl(e,zr()),null}function vl(e,t){var n=Rs;Rs|=1;try{return e(t)}finally{0===(Rs=n)&&(Zs(),Zr())}}function yl(e,t){var n=Rs;Rs&=-2,Rs|=8;try{return e(t)}finally{0===(Rs=n)&&(Zs(),Zr())}}function wl(e,t){ur(Ds,Bs),Bs|=t,Us|=t}function Sl(){Bs=Ds.current,cr(Ds)}function El(e,t){e.finishedWork=null,e.finishedLanes=0;var n=e.timeoutHandle;if(-1!==n&&(e.timeoutHandle=-1,Ha(n)),null!==Is)for(n=Is.return;null!==n;){var a=n;switch(a.tag){case 1:null!=(a=a.type.childContextTypes)&&_r();break;case 3:Io(),cr(pr),cr(fr),Vo();break;case 5:Bo(a);break;case 4:Io();break;case 13:case 19:cr(Do);break;case 10:no(a);break;case 23:case 24:Sl()}n=n.return}Ps=e,Is=ql(e.current,null),Os=Bs=Us=t,Ms=0,Fs=null,Ks=zs=js=0}function xl(e,t){for(;;){var n=Is;try{if(to(),Yo.current=Ni,ni){for(var a=Jo.memoizedState;null!==a;){var r=a.queue;null!==r&&(r.pending=null),a=a.next}ni=!1}if(Xo=0,ti=ei=Jo=null,ai=!1,Ns.current=null,null===n||null===n.return){Ms=1,Fs=t,Is=null;break}e:{var o=e,i=n.return,s=n,l=t;if(t=Os,s.flags|=2048,s.firstEffect=s.lastEffect=null,null!==l&&"object"==typeof l&&"function"==typeof l.then){var c=l;if(0==(2&s.mode)){var u=s.alternate;u?(s.updateQueue=u.updateQueue,s.memoizedState=u.memoizedState,s.lanes=u.lanes):(s.updateQueue=null,s.memoizedState=null)}var d=0!=(1&Do.current),f=i;do{var p;if(p=13===f.tag){var m=f.memoizedState;if(null!==m)p=null!==m.dehydrated;else{var h=f.memoizedProps;p=void 0!==h.fallback&&(!0!==h.unstable_avoidThisFallback||!d)}}if(p){var g=f.updateQueue;if(null===g){var _=new Set;_.add(c),f.updateQueue=_}else g.add(c);if(0==(2&f.mode)){if(f.flags|=64,s.flags|=16384,s.flags&=-2981,1===s.tag)if(null===s.alternate)s.tag=17;else{var b=co(-1,1);b.tag=2,uo(s,b)}s.lanes|=1;break e}l=void 0,s=t;var k=o.pingCache;if(null===k?(k=o.pingCache=new ds,l=new Set,k.set(c,l)):void 0===(l=k.get(c))&&(l=new Set,k.set(c,l)),!l.has(s)){l.add(s);var v=jl.bind(null,o,c,s);c.then(v,v)}f.flags|=4096,f.lanes=t;break e}f=f.return}while(null!==f);l=Error((Z(s.type)||"A React component")+" suspended while rendering, but no fallback UI was specified.\n\nAdd a <Suspense fallback=...> component higher in the tree to provide a loading indicator or placeholder to display.")}5!==Ms&&(Ms=2),l=cs(l,s),f=i;do{switch(f.tag){case 3:o=l,f.flags|=4096,t&=-t,f.lanes|=t,fo(f,fs(0,o,t));break e;case 1:o=l;var y=f.type,w=f.stateNode;if(0==(64&f.flags)&&("function"==typeof y.getDerivedStateFromError||null!==w&&"function"==typeof w.componentDidCatch&&(null===Xs||!Xs.has(w)))){f.flags|=4096,t&=-t,f.lanes|=t,fo(f,ps(f,o,t));break e}}f=f.return}while(null!==f)}Nl(n)}catch(S){t=S,Is===n&&null!==n&&(Is=n=n.return);continue}break}}function Cl(){var e=Ls.current;return Ls.current=Ni,null===e?Ni:e}function Tl(e,t){var n=Rs;Rs|=16;var a=Cl();for(Ps===e&&Os===t||El(e,t);;)try{Al();break}catch(r){xl(e,r)}if(to(),Rs=n,Ls.current=a,null!==Is)throw Error(i(261));return Ps=null,Os=0,Ms}function Al(){for(;null!==Is;)Ll(Is)}function Gl(){for(;null!==Is&&!Tr();)Ll(Is)}function Ll(e){var t=Ws(e.alternate,e,Bs);e.memoizedProps=e.pendingProps,null===t?Nl(e):Is=t,Ns.current=null}function Nl(e){var t=e;do{var n=t.alternate;if(e=t.return,0==(2048&t.flags)){if(null!==(n=ss(n,t,Bs)))return void(Is=n);if(24!==(n=t).tag&&23!==n.tag||null===n.memoizedState||0!=(1073741824&Bs)||0==(4&n.mode)){for(var a=0,r=n.child;null!==r;)a|=r.lanes|r.childLanes,r=r.sibling;n.childLanes=a}null!==e&&0==(2048&e.flags)&&(null===e.firstEffect&&(e.firstEffect=t.firstEffect),null!==t.lastEffect&&(null!==e.lastEffect&&(e.lastEffect.nextEffect=t.firstEffect),e.lastEffect=t.lastEffect),1<t.flags&&(null!==e.lastEffect?e.lastEffect.nextEffect=t:e.firstEffect=t,e.lastEffect=t))}else{if(null!==(n=ls(t)))return n.flags&=2047,void(Is=n);null!==e&&(e.firstEffect=e.lastEffect=null,e.flags|=2048)}if(null!==(t=t.sibling))return void(Is=t);Is=t=e}while(null!==t);0===Ms&&(Ms=5)}function Rl(e){var t=Kr();return Hr(99,Pl.bind(null,e,t)),null}function Pl(e,t){do{Ol()}while(null!==el);if(0!=(48&Rs))throw Error(i(327));var n=e.finishedWork;if(null===n)return null;if(e.finishedWork=null,e.finishedLanes=0,n===e.current)throw Error(i(177));e.callbackNode=null;var a=n.lanes|n.childLanes,r=a,o=e.pendingLanes&~r;e.pendingLanes=r,e.suspendedLanes=0,e.pingedLanes=0,e.expiredLanes&=r,e.mutableReadLanes&=r,e.entangledLanes&=r,r=e.entanglements;for(var s=e.eventTimes,l=e.expirationTimes;0<o;){var c=31-Kt(o),u=1<<c;r[c]=0,s[c]=-1,l[c]=-1,o&=~u}if(null!==rl&&0==(24&a)&&rl.has(e)&&rl.delete(e),e===Ps&&(Is=Ps=null,Os=0),1<n.flags?null!==n.lastEffect?(n.lastEffect.nextEffect=n,a=n.firstEffect):a=n:a=n.firstEffect,null!==a){if(r=Rs,Rs|=32,Ns.current=null,Ua=Wt,ga(s=ha())){if("selectionStart"in s)l={start:s.selectionStart,end:s.selectionEnd};else e:if(l=(l=s.ownerDocument)&&l.defaultView||window,(u=l.getSelection&&l.getSelection())&&0!==u.rangeCount){l=u.anchorNode,o=u.anchorOffset,c=u.focusNode,u=u.focusOffset;try{l.nodeType,c.nodeType}catch(C){l=null;break e}var d=0,f=-1,p=-1,m=0,h=0,g=s,_=null;t:for(;;){for(var b;g!==l||0!==o&&3!==g.nodeType||(f=d+o),g!==c||0!==u&&3!==g.nodeType||(p=d+u),3===g.nodeType&&(d+=g.nodeValue.length),null!==(b=g.firstChild);)_=g,g=b;for(;;){if(g===s)break t;if(_===l&&++m===o&&(f=d),_===c&&++h===u&&(p=d),null!==(b=g.nextSibling))break;_=(g=_).parentNode}g=b}l=-1===f||-1===p?null:{start:f,end:p}}else l=null;l=l||{start:0,end:0}}else l=null;ja={focusedElem:s,selectionRange:l},Wt=!1,ul=null,dl=!1,Vs=a;do{try{Il()}catch(C){if(null===Vs)throw Error(i(330));Ul(Vs,C),Vs=Vs.nextEffect}}while(null!==Vs);ul=null,Vs=a;do{try{for(s=e;null!==Vs;){var k=Vs.flags;if(16&k&&_e(Vs.stateNode,""),128&k){var v=Vs.alternate;if(null!==v){var y=v.ref;null!==y&&("function"==typeof y?y(null):y.current=null)}}switch(1038&k){case 2:ws(Vs),Vs.flags&=-3;break;case 6:ws(Vs),Vs.flags&=-3,Cs(Vs.alternate,Vs);break;case 1024:Vs.flags&=-1025;break;case 1028:Vs.flags&=-1025,Cs(Vs.alternate,Vs);break;case 4:Cs(Vs.alternate,Vs);break;case 8:xs(s,l=Vs);var w=l.alternate;vs(l),null!==w&&vs(w)}Vs=Vs.nextEffect}}catch(C){if(null===Vs)throw Error(i(330));Ul(Vs,C),Vs=Vs.nextEffect}}while(null!==Vs);if(y=ja,v=ha(),k=y.focusedElem,s=y.selectionRange,v!==k&&k&&k.ownerDocument&&ma(k.ownerDocument.documentElement,k)){null!==s&&ga(k)&&(v=s.start,void 0===(y=s.end)&&(y=v),"selectionStart"in k?(k.selectionStart=v,k.selectionEnd=Math.min(y,k.value.length)):(y=(v=k.ownerDocument||document)&&v.defaultView||window).getSelection&&(y=y.getSelection(),l=k.textContent.length,w=Math.min(s.start,l),s=void 0===s.end?w:Math.min(s.end,l),!y.extend&&w>s&&(l=s,s=w,w=l),l=pa(k,w),o=pa(k,s),l&&o&&(1!==y.rangeCount||y.anchorNode!==l.node||y.anchorOffset!==l.offset||y.focusNode!==o.node||y.focusOffset!==o.offset)&&((v=v.createRange()).setStart(l.node,l.offset),y.removeAllRanges(),w>s?(y.addRange(v),y.extend(o.node,o.offset)):(v.setEnd(o.node,o.offset),y.addRange(v))))),v=[];for(y=k;y=y.parentNode;)1===y.nodeType&&v.push({element:y,left:y.scrollLeft,top:y.scrollTop});for("function"==typeof k.focus&&k.focus(),k=0;k<v.length;k++)(y=v[k]).element.scrollLeft=y.left,y.element.scrollTop=y.top}Wt=!!Ua,ja=Ua=null,e.current=n,Vs=a;do{try{for(k=e;null!==Vs;){var S=Vs.flags;if(36&S&&_s(k,Vs.alternate,Vs),128&S){v=void 0;var E=Vs.ref;if(null!==E){var x=Vs.stateNode;Vs.tag,v=x,"function"==typeof E?E(v):E.current=v}}Vs=Vs.nextEffect}}catch(C){if(null===Vs)throw Error(i(330));Ul(Vs,C),Vs=Vs.nextEffect}}while(null!==Vs);Vs=null,Dr(),Rs=r}else e.current=n;if(Js)Js=!1,el=e,tl=t;else for(Vs=a;null!==Vs;)t=Vs.nextEffect,Vs.nextEffect=null,8&Vs.flags&&((S=Vs).sibling=null,S.stateNode=null),Vs=t;if(0===(a=e.pendingLanes)&&(Xs=null),1===a?e===il?ol++:(ol=0,il=e):ol=0,n=n.stateNode,Sr&&"function"==typeof Sr.onCommitFiberRoot)try{Sr.onCommitFiberRoot(wr,n,void 0,64==(64&n.current.flags))}catch(C){}if(gl(e,zr()),Ys)throw Ys=!1,e=Qs,Qs=null,e;return 0!=(8&Rs)||Zr(),null}function Il(){for(;null!==Vs;){var e=Vs.alternate;dl||null===ul||(0!=(8&Vs.flags)?Je(Vs,ul)&&(dl=!0):13===Vs.tag&&As(e,Vs)&&Je(Vs,ul)&&(dl=!0));var t=Vs.flags;0!=(256&t)&&gs(e,Vs),0==(512&t)||Js||(Js=!0,qr(97,(function(){return Ol(),null}))),Vs=Vs.nextEffect}}function Ol(){if(90!==tl){var e=97<tl?97:tl;return tl=90,Hr(e,Ml)}return!1}function Bl(e,t){nl.push(t,e),Js||(Js=!0,qr(97,(function(){return Ol(),null})))}function Dl(e,t){al.push(t,e),Js||(Js=!0,qr(97,(function(){return Ol(),null})))}function Ml(){if(null===el)return!1;var e=el;if(el=null,0!=(48&Rs))throw Error(i(331));var t=Rs;Rs|=32;var n=al;al=[];for(var a=0;a<n.length;a+=2){var r=n[a],o=n[a+1],s=r.destroy;if(r.destroy=void 0,"function"==typeof s)try{s()}catch(c){if(null===o)throw Error(i(330));Ul(o,c)}}for(n=nl,nl=[],a=0;a<n.length;a+=2){r=n[a],o=n[a+1];try{var l=r.create;r.destroy=l()}catch(c){if(null===o)throw Error(i(330));Ul(o,c)}}for(l=e.current.firstEffect;null!==l;)e=l.nextEffect,l.nextEffect=null,8&l.flags&&(l.sibling=null,l.stateNode=null),l=e;return Rs=t,Zr(),!0}function Fl(e,t,n){uo(e,t=fs(0,t=cs(n,t),1)),t=fl(),null!==(e=hl(e,1))&&(zt(e,1,t),gl(e,t))}function Ul(e,t){if(3===e.tag)Fl(e,e,t);else for(var n=e.return;null!==n;){if(3===n.tag){Fl(n,e,t);break}if(1===n.tag){var a=n.stateNode;if("function"==typeof n.type.getDerivedStateFromError||"function"==typeof a.componentDidCatch&&(null===Xs||!Xs.has(a))){var r=ps(n,e=cs(t,e),1);if(uo(n,r),r=fl(),null!==(n=hl(n,1)))zt(n,1,r),gl(n,r);else if("function"==typeof a.componentDidCatch&&(null===Xs||!Xs.has(a)))try{a.componentDidCatch(t,e)}catch(o){}break}}n=n.return}}function jl(e,t,n){var a=e.pingCache;null!==a&&a.delete(t),t=fl(),e.pingedLanes|=e.suspendedLanes&n,Ps===e&&(Os&n)===n&&(4===Ms||3===Ms&&(62914560&Os)===Os&&500>zr()-Hs?El(e,0):Ks|=n),gl(e,t)}function zl(e,t){var n=e.stateNode;null!==n&&n.delete(t),0===(t=0)&&(0==(2&(t=e.mode))?t=1:0==(4&t)?t=99===Kr()?1:2:(0===ll&&(ll=Us),0===(t=Ut(62914560&~ll))&&(t=4194304))),n=fl(),null!==(e=hl(e,t))&&(zt(e,t,n),gl(e,n))}function Kl(e,t,n,a){this.tag=e,this.key=n,this.sibling=this.child=this.return=this.stateNode=this.type=this.elementType=null,this.index=0,this.ref=null,this.pendingProps=t,this.dependencies=this.memoizedState=this.updateQueue=this.memoizedProps=null,this.mode=a,this.flags=0,this.lastEffect=this.firstEffect=this.nextEffect=null,this.childLanes=this.lanes=0,this.alternate=null}function $l(e,t,n,a){return new Kl(e,t,n,a)}function Hl(e){return!(!(e=e.prototype)||!e.isReactComponent)}function ql(e,t){var n=e.alternate;return null===n?((n=$l(e.tag,t,e.key,e.mode)).elementType=e.elementType,n.type=e.type,n.stateNode=e.stateNode,n.alternate=e,e.alternate=n):(n.pendingProps=t,n.type=e.type,n.flags=0,n.nextEffect=null,n.firstEffect=null,n.lastEffect=null),n.childLanes=e.childLanes,n.lanes=e.lanes,n.child=e.child,n.memoizedProps=e.memoizedProps,n.memoizedState=e.memoizedState,n.updateQueue=e.updateQueue,t=e.dependencies,n.dependencies=null===t?null:{lanes:t.lanes,firstContext:t.firstContext},n.sibling=e.sibling,n.index=e.index,n.ref=e.ref,n}function Zl(e,t,n,a,r,o){var s=2;if(a=e,"function"==typeof e)Hl(e)&&(s=1);else if("string"==typeof e)s=5;else e:switch(e){case E:return Wl(n.children,r,o,t);case B:s=8,r|=16;break;case x:s=8,r|=1;break;case C:return(e=$l(12,n,t,8|r)).elementType=C,e.type=C,e.lanes=o,e;case L:return(e=$l(13,n,t,r)).type=L,e.elementType=L,e.lanes=o,e;case N:return(e=$l(19,n,t,r)).elementType=N,e.lanes=o,e;case D:return Vl(n,r,o,t);case M:return(e=$l(24,n,t,r)).elementType=M,e.lanes=o,e;default:if("object"==typeof e&&null!==e)switch(e.$$typeof){case T:s=10;break e;case A:s=9;break e;case G:s=11;break e;case R:s=14;break e;case P:s=16,a=null;break e;case I:s=22;break e}throw Error(i(130,null==e?e:typeof e,""))}return(t=$l(s,n,t,r)).elementType=e,t.type=a,t.lanes=o,t}function Wl(e,t,n,a){return(e=$l(7,e,a,t)).lanes=n,e}function Vl(e,t,n,a){return(e=$l(23,e,a,t)).elementType=D,e.lanes=n,e}function Yl(e,t,n){return(e=$l(6,e,null,t)).lanes=n,e}function Ql(e,t,n){return(t=$l(4,null!==e.children?e.children:[],e.key,t)).lanes=n,t.stateNode={containerInfo:e.containerInfo,pendingChildren:null,implementation:e.implementation},t}function Xl(e,t,n){this.tag=t,this.containerInfo=e,this.finishedWork=this.pingCache=this.current=this.pendingChildren=null,this.timeoutHandle=-1,this.pendingContext=this.context=null,this.hydrate=n,this.callbackNode=null,this.callbackPriority=0,this.eventTimes=jt(0),this.expirationTimes=jt(-1),this.entangledLanes=this.finishedLanes=this.mutableReadLanes=this.expiredLanes=this.pingedLanes=this.suspendedLanes=this.pendingLanes=0,this.entanglements=jt(0),this.mutableSourceEagerHydrationData=null}function Jl(e,t,n,a){var r=t.current,o=fl(),s=pl(r);e:if(n){t:{if(Ve(n=n._reactInternals)!==n||1!==n.tag)throw Error(i(170));var l=n;do{switch(l.tag){case 3:l=l.stateNode.context;break t;case 1:if(gr(l.type)){l=l.stateNode.__reactInternalMemoizedMergedChildContext;break t}}l=l.return}while(null!==l);throw Error(i(171))}if(1===n.tag){var c=n.type;if(gr(c)){n=kr(n,c,l);break e}}n=l}else n=dr;return null===t.context?t.context=n:t.pendingContext=n,(t=co(o,s)).payload={element:e},null!==(a=void 0===a?null:a)&&(t.callback=a),uo(r,t),ml(r,s,o),s}function ec(e){return(e=e.current).child?(e.child.tag,e.child.stateNode):null}function tc(e,t){if(null!==(e=e.memoizedState)&&null!==e.dehydrated){var n=e.retryLane;e.retryLane=0!==n&&n<t?n:t}}function nc(e,t){tc(e,t),(e=e.alternate)&&tc(e,t)}function ac(e,t,n){var a=null!=n&&null!=n.hydrationOptions&&n.hydrationOptions.mutableSources||null;if(n=new Xl(e,t,null!=n&&!0===n.hydrate),t=$l(3,null,null,2===t?7:1===t?3:0),n.current=t,t.stateNode=n,so(t),e[Ja]=n.current,Na(8===e.nodeType?e.parentNode:e),a)for(e=0;e<a.length;e++){var r=(t=a[e])._getVersion;r=r(t._source),null==n.mutableSourceEagerHydrationData?n.mutableSourceEagerHydrationData=[t,r]:n.mutableSourceEagerHydrationData.push(t,r)}this._internalRoot=n}function rc(e){return!(!e||1!==e.nodeType&&9!==e.nodeType&&11!==e.nodeType&&(8!==e.nodeType||" react-mount-point-unstable "!==e.nodeValue))}function oc(e,t,n,a,r){var o=n._reactRootContainer;if(o){var i=o._internalRoot;if("function"==typeof r){var s=r;r=function(){var e=ec(i);s.call(e)}}Jl(t,i,e,r)}else{if(o=n._reactRootContainer=function(e,t){if(t||(t=!(!(t=e?9===e.nodeType?e.documentElement:e.firstChild:null)||1!==t.nodeType||!t.hasAttribute("data-reactroot"))),!t)for(var n;n=e.lastChild;)e.removeChild(n);return new ac(e,0,t?{hydrate:!0}:void 0)}(n,a),i=o._internalRoot,"function"==typeof r){var l=r;r=function(){var e=ec(i);l.call(e)}}yl((function(){Jl(t,i,e,r)}))}return ec(i)}function ic(e,t){var n=2<arguments.length&&void 0!==arguments[2]?arguments[2]:null;if(!rc(t))throw Error(i(200));return function(e,t,n){var a=3<arguments.length&&void 0!==arguments[3]?arguments[3]:null;return{$$typeof:S,key:null==a?null:""+a,children:e,containerInfo:t,implementation:n}}(e,t,null,n)}Ws=function(e,t,n){var a=t.lanes;if(null!==e)if(e.memoizedProps!==t.pendingProps||pr.current)Bi=!0;else{if(0==(n&a)){switch(Bi=!1,t.tag){case 3:qi(t),Zo();break;case 5:Oo(t);break;case 1:gr(t.type)&&vr(t);break;case 4:Po(t,t.stateNode.containerInfo);break;case 10:a=t.memoizedProps.value;var r=t.type._context;ur(Qr,r._currentValue),r._currentValue=a;break;case 13:if(null!==t.memoizedState)return 0!=(n&t.child.childLanes)?Xi(e,t,n):(ur(Do,1&Do.current),null!==(t=os(e,t,n))?t.sibling:null);ur(Do,1&Do.current);break;case 19:if(a=0!=(n&t.childLanes),0!=(64&e.flags)){if(a)return rs(e,t,n);t.flags|=64}if(null!==(r=t.memoizedState)&&(r.rendering=null,r.tail=null,r.lastEffect=null),ur(Do,Do.current),a)break;return null;case 23:case 24:return t.lanes=0,ji(e,t,n)}return os(e,t,n)}Bi=0!=(16384&e.flags)}else Bi=!1;switch(t.lanes=0,t.tag){case 2:if(a=t.type,null!==e&&(e.alternate=null,t.alternate=null,t.flags|=2),e=t.pendingProps,r=hr(t,fr.current),ro(t,n),r=ii(null,t,a,e,r,n),t.flags|=1,"object"==typeof r&&null!==r&&"function"==typeof r.render&&void 0===r.$$typeof){if(t.tag=1,t.memoizedState=null,t.updateQueue=null,gr(a)){var o=!0;vr(t)}else o=!1;t.memoizedState=null!==r.state&&void 0!==r.state?r.state:null,so(t);var s=a.getDerivedStateFromProps;"function"==typeof s&&go(t,a,s,e),r.updater=_o,t.stateNode=r,r._reactInternals=t,yo(t,a,e,n),t=Hi(null,t,a,!0,o,n)}else t.tag=0,Di(null,t,r,n),t=t.child;return t;case 16:r=t.elementType;e:{switch(null!==e&&(e.alternate=null,t.alternate=null,t.flags|=2),e=t.pendingProps,r=(o=r._init)(r._payload),t.type=r,o=t.tag=function(e){if("function"==typeof e)return Hl(e)?1:0;if(null!=e){if((e=e.$$typeof)===G)return 11;if(e===R)return 14}return 2}(r),e=Yr(r,e),o){case 0:t=Ki(null,t,r,e,n);break e;case 1:t=$i(null,t,r,e,n);break e;case 11:t=Mi(null,t,r,e,n);break e;case 14:t=Fi(null,t,r,Yr(r.type,e),a,n);break e}throw Error(i(306,r,""))}return t;case 0:return a=t.type,r=t.pendingProps,Ki(e,t,a,r=t.elementType===a?r:Yr(a,r),n);case 1:return a=t.type,r=t.pendingProps,$i(e,t,a,r=t.elementType===a?r:Yr(a,r),n);case 3:if(qi(t),a=t.updateQueue,null===e||null===a)throw Error(i(282));if(a=t.pendingProps,r=null!==(r=t.memoizedState)?r.element:null,lo(e,t),po(t,a,null,n),(a=t.memoizedState.element)===r)Zo(),t=os(e,t,n);else{if((o=(r=t.stateNode).hydrate)&&(Uo=Za(t.stateNode.containerInfo.firstChild),Fo=t,o=jo=!0),o){if(null!=(e=r.mutableSourceEagerHydrationData))for(r=0;r<e.length;r+=2)(o=e[r])._workInProgressVersionPrimary=e[r+1],Wo.push(o);for(n=To(t,null,a,n),t.child=n;n;)n.flags=-3&n.flags|1024,n=n.sibling}else Di(e,t,a,n),Zo();t=t.child}return t;case 5:return Oo(t),null===e&&$o(t),a=t.type,r=t.pendingProps,o=null!==e?e.memoizedProps:null,s=r.children,Ka(a,r)?s=null:null!==o&&Ka(a,o)&&(t.flags|=16),zi(e,t),Di(e,t,s,n),t.child;case 6:return null===e&&$o(t),null;case 13:return Xi(e,t,n);case 4:return Po(t,t.stateNode.containerInfo),a=t.pendingProps,null===e?t.child=Co(t,null,a,n):Di(e,t,a,n),t.child;case 11:return a=t.type,r=t.pendingProps,Mi(e,t,a,r=t.elementType===a?r:Yr(a,r),n);case 7:return Di(e,t,t.pendingProps,n),t.child;case 8:case 12:return Di(e,t,t.pendingProps.children,n),t.child;case 10:e:{a=t.type._context,r=t.pendingProps,s=t.memoizedProps,o=r.value;var l=t.type._context;if(ur(Qr,l._currentValue),l._currentValue=o,null!==s)if(l=s.value,0===(o=ca(l,o)?0:0|("function"==typeof a._calculateChangedBits?a._calculateChangedBits(l,o):1073741823))){if(s.children===r.children&&!pr.current){t=os(e,t,n);break e}}else for(null!==(l=t.child)&&(l.return=t);null!==l;){var c=l.dependencies;if(null!==c){s=l.child;for(var u=c.firstContext;null!==u;){if(u.context===a&&0!=(u.observedBits&o)){1===l.tag&&((u=co(-1,n&-n)).tag=2,uo(l,u)),l.lanes|=n,null!==(u=l.alternate)&&(u.lanes|=n),ao(l.return,n),c.lanes|=n;break}u=u.next}}else s=10===l.tag&&l.type===t.type?null:l.child;if(null!==s)s.return=l;else for(s=l;null!==s;){if(s===t){s=null;break}if(null!==(l=s.sibling)){l.return=s.return,s=l;break}s=s.return}l=s}Di(e,t,r.children,n),t=t.child}return t;case 9:return r=t.type,a=(o=t.pendingProps).children,ro(t,n),a=a(r=oo(r,o.unstable_observedBits)),t.flags|=1,Di(e,t,a,n),t.child;case 14:return o=Yr(r=t.type,t.pendingProps),Fi(e,t,r,o=Yr(r.type,o),a,n);case 15:return Ui(e,t,t.type,t.pendingProps,a,n);case 17:return a=t.type,r=t.pendingProps,r=t.elementType===a?r:Yr(a,r),null!==e&&(e.alternate=null,t.alternate=null,t.flags|=2),t.tag=1,gr(a)?(e=!0,vr(t)):e=!1,ro(t,n),ko(t,a,r),yo(t,a,r,n),Hi(null,t,a,!0,e,n);case 19:return rs(e,t,n);case 23:case 24:return ji(e,t,n)}throw Error(i(156,t.tag))},ac.prototype.render=function(e){Jl(e,this._internalRoot,null,null)},ac.prototype.unmount=function(){var e=this._internalRoot,t=e.containerInfo;Jl(null,e,null,(function(){t[Ja]=null}))},et=function(e){13===e.tag&&(ml(e,4,fl()),nc(e,4))},tt=function(e){13===e.tag&&(ml(e,67108864,fl()),nc(e,67108864))},nt=function(e){if(13===e.tag){var t=fl(),n=pl(e);ml(e,n,t),nc(e,n)}},at=function(e,t){return t()},Ce=function(e,t,n){switch(t){case"input":if(ne(e,n),t=n.name,"radio"===n.type&&null!=t){for(n=e;n.parentNode;)n=n.parentNode;for(n=n.querySelectorAll("input[name="+JSON.stringify(""+t)+'][type="radio"]'),t=0;t<n.length;t++){var a=n[t];if(a!==e&&a.form===e.form){var r=rr(a);if(!r)throw Error(i(90));Q(a),ne(a,r)}}}break;case"textarea":ce(e,n);break;case"select":null!=(t=n.value)&&ie(e,!!n.multiple,t,!1)}},Re=vl,Pe=function(e,t,n,a,r){var o=Rs;Rs|=4;try{return Hr(98,e.bind(null,t,n,a,r))}finally{0===(Rs=o)&&(Zs(),Zr())}},Ie=function(){0==(49&Rs)&&(function(){if(null!==rl){var e=rl;rl=null,e.forEach((function(e){e.expiredLanes|=24&e.pendingLanes,gl(e,zr())}))}Zr()}(),Ol())},Oe=function(e,t){var n=Rs;Rs|=2;try{return e(t)}finally{0===(Rs=n)&&(Zs(),Zr())}};var sc={Events:[nr,ar,rr,Le,Ne,Ol,{current:!1}]},lc={findFiberByHostInstance:tr,bundleType:0,version:"17.0.2",rendererPackageName:"react-dom"},cc={bundleType:lc.bundleType,version:lc.version,rendererPackageName:lc.rendererPackageName,rendererConfig:lc.rendererConfig,overrideHookState:null,overrideHookStateDeletePath:null,overrideHookStateRenamePath:null,overrideProps:null,overridePropsDeletePath:null,overridePropsRenamePath:null,setSuspenseHandler:null,scheduleUpdate:null,currentDispatcherRef:y.ReactCurrentDispatcher,findHostInstanceByFiber:function(e){return null===(e=Xe(e))?null:e.stateNode},findFiberByHostInstance:lc.findFiberByHostInstance||function(){return null},findHostInstancesForRefresh:null,scheduleRefresh:null,scheduleRoot:null,setRefreshHandler:null,getCurrentFiber:null};if("undefined"!=typeof __REACT_DEVTOOLS_GLOBAL_HOOK__){var uc=__REACT_DEVTOOLS_GLOBAL_HOOK__;if(!uc.isDisabled&&uc.supportsFiber)try{wr=uc.inject(cc),Sr=uc}catch(he){}}t.createPortal=ic,t.hydrate=function(e,t,n){if(!rc(t))throw Error(i(200));return oc(null,e,t,!0,n)}},3935:(e,t,n)=>{"use strict";!function e(){if("undefined"!=typeof __REACT_DEVTOOLS_GLOBAL_HOOK__&&"function"==typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE)try{__REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE(e)}catch(t){console.error(t)}}(),e.exports=n(4448)},9590:e=>{var t="undefined"!=typeof Element,n="function"==typeof Map,a="function"==typeof Set,r="function"==typeof ArrayBuffer&&!!ArrayBuffer.isView;function o(e,i){if(e===i)return!0;if(e&&i&&"object"==typeof e&&"object"==typeof i){if(e.constructor!==i.constructor)return!1;var s,l,c,u;if(Array.isArray(e)){if((s=e.length)!=i.length)return!1;for(l=s;0!=l--;)if(!o(e[l],i[l]))return!1;return!0}if(n&&e instanceof Map&&i instanceof Map){if(e.size!==i.size)return!1;for(u=e.entries();!(l=u.next()).done;)if(!i.has(l.value[0]))return!1;for(u=e.entries();!(l=u.next()).done;)if(!o(l.value[1],i.get(l.value[0])))return!1;return!0}if(a&&e instanceof Set&&i instanceof Set){if(e.size!==i.size)return!1;for(u=e.entries();!(l=u.next()).done;)if(!i.has(l.value[0]))return!1;return!0}if(r&&ArrayBuffer.isView(e)&&ArrayBuffer.isView(i)){if((s=e.length)!=i.length)return!1;for(l=s;0!=l--;)if(e[l]!==i[l])return!1;return!0}if(e.constructor===RegExp)return e.source===i.source&&e.flags===i.flags;if(e.valueOf!==Object.prototype.valueOf&&"function"==typeof e.valueOf&&"function"==typeof i.valueOf)return e.valueOf()===i.valueOf();if(e.toString!==Object.prototype.toString&&"function"==typeof e.toString&&"function"==typeof i.toString)return e.toString()===i.toString();if((s=(c=Object.keys(e)).length)!==Object.keys(i).length)return!1;for(l=s;0!=l--;)if(!Object.prototype.hasOwnProperty.call(i,c[l]))return!1;if(t&&e instanceof Element)return!1;for(l=s;0!=l--;)if(("_owner"!==c[l]&&"__v"!==c[l]&&"__o"!==c[l]||!e.$$typeof)&&!o(e[c[l]],i[c[l]]))return!1;return!0}return e!=e&&i!=i}e.exports=function(e,t){try{return o(e,t)}catch(n){if((n.message||"").match(/stack|recursion/i))return console.warn("react-fast-compare cannot handle circular refs"),!1;throw n}}},405:(e,t,n)=>{"use strict";n.d(t,{B6:()=>H,ql:()=>J});var a=n(7294),r=n(5697),o=n.n(r),i=n(9590),s=n.n(i),l=n(1143),c=n.n(l),u=n(6774),d=n.n(u);function f(){return f=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var a in n)Object.prototype.hasOwnProperty.call(n,a)&&(e[a]=n[a])}return e},f.apply(this,arguments)}function p(e,t){e.prototype=Object.create(t.prototype),e.prototype.constructor=e,m(e,t)}function m(e,t){return m=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e},m(e,t)}function h(e,t){if(null==e)return{};var n,a,r={},o=Object.keys(e);for(a=0;a<o.length;a++)t.indexOf(n=o[a])>=0||(r[n]=e[n]);return r}var g={BASE:"base",BODY:"body",HEAD:"head",HTML:"html",LINK:"link",META:"meta",NOSCRIPT:"noscript",SCRIPT:"script",STYLE:"style",TITLE:"title",FRAGMENT:"Symbol(react.fragment)"},_={rel:["amphtml","canonical","alternate"]},b={type:["application/ld+json"]},k={charset:"",name:["robots","description"],property:["og:type","og:title","og:url","og:image","og:image:alt","og:description","twitter:url","twitter:title","twitter:description","twitter:image","twitter:image:alt","twitter:card","twitter:site"]},v=Object.keys(g).map((function(e){return g[e]})),y={accesskey:"accessKey",charset:"charSet",class:"className",contenteditable:"contentEditable",contextmenu:"contextMenu","http-equiv":"httpEquiv",itemprop:"itemProp",tabindex:"tabIndex"},w=Object.keys(y).reduce((function(e,t){return e[y[t]]=t,e}),{}),S=function(e,t){for(var n=e.length-1;n>=0;n-=1){var a=e[n];if(Object.prototype.hasOwnProperty.call(a,t))return a[t]}return null},E=function(e){var t=S(e,g.TITLE),n=S(e,"titleTemplate");if(Array.isArray(t)&&(t=t.join("")),n&&t)return n.replace(/%s/g,(function(){return t}));var a=S(e,"defaultTitle");return t||a||void 0},x=function(e){return S(e,"onChangeClientState")||function(){}},C=function(e,t){return t.filter((function(t){return void 0!==t[e]})).map((function(t){return t[e]})).reduce((function(e,t){return f({},e,t)}),{})},T=function(e,t){return t.filter((function(e){return void 0!==e[g.BASE]})).map((function(e){return e[g.BASE]})).reverse().reduce((function(t,n){if(!t.length)for(var a=Object.keys(n),r=0;r<a.length;r+=1){var o=a[r].toLowerCase();if(-1!==e.indexOf(o)&&n[o])return t.concat(n)}return t}),[])},A=function(e,t,n){var a={};return n.filter((function(t){return!!Array.isArray(t[e])||(void 0!==t[e]&&console&&"function"==typeof console.warn&&console.warn("Helmet: "+e+' should be of type "Array". Instead found type "'+typeof t[e]+'"'),!1)})).map((function(t){return t[e]})).reverse().reduce((function(e,n){var r={};n.filter((function(e){for(var n,o=Object.keys(e),i=0;i<o.length;i+=1){var s=o[i],l=s.toLowerCase();-1===t.indexOf(l)||"rel"===n&&"canonical"===e[n].toLowerCase()||"rel"===l&&"stylesheet"===e[l].toLowerCase()||(n=l),-1===t.indexOf(s)||"innerHTML"!==s&&"cssText"!==s&&"itemprop"!==s||(n=s)}if(!n||!e[n])return!1;var c=e[n].toLowerCase();return a[n]||(a[n]={}),r[n]||(r[n]={}),!a[n][c]&&(r[n][c]=!0,!0)})).reverse().forEach((function(t){return e.push(t)}));for(var o=Object.keys(r),i=0;i<o.length;i+=1){var s=o[i],l=f({},a[s],r[s]);a[s]=l}return e}),[]).reverse()},G=function(e,t){if(Array.isArray(e)&&e.length)for(var n=0;n<e.length;n+=1)if(e[n][t])return!0;return!1},L=function(e){return Array.isArray(e)?e.join(""):e},N=function(e,t){return Array.isArray(e)?e.reduce((function(e,n){return function(e,t){for(var n=Object.keys(e),a=0;a<n.length;a+=1)if(t[n[a]]&&t[n[a]].includes(e[n[a]]))return!0;return!1}(n,t)?e.priority.push(n):e.default.push(n),e}),{priority:[],default:[]}):{default:e}},R=function(e,t){var n;return f({},e,((n={})[t]=void 0,n))},P=[g.NOSCRIPT,g.SCRIPT,g.STYLE],I=function(e,t){return void 0===t&&(t=!0),!1===t?String(e):String(e).replace(/&/g,"&").replace(/</g,"<").replace(/>/g,">").replace(/"/g,""").replace(/'/g,"'")},O=function(e){return Object.keys(e).reduce((function(t,n){var a=void 0!==e[n]?n+'="'+e[n]+'"':""+n;return t?t+" "+a:a}),"")},B=function(e,t){return void 0===t&&(t={}),Object.keys(e).reduce((function(t,n){return t[y[n]||n]=e[n],t}),t)},D=function(e,t){return t.map((function(t,n){var r,o=((r={key:n})["data-rh"]=!0,r);return Object.keys(t).forEach((function(e){var n=y[e]||e;"innerHTML"===n||"cssText"===n?o.dangerouslySetInnerHTML={__html:t.innerHTML||t.cssText}:o[n]=t[e]})),a.createElement(e,o)}))},M=function(e,t,n){switch(e){case g.TITLE:return{toComponent:function(){return n=t.titleAttributes,(r={key:e=t.title})["data-rh"]=!0,o=B(n,r),[a.createElement(g.TITLE,o,e)];var e,n,r,o},toString:function(){return function(e,t,n,a){var r=O(n),o=L(t);return r?"<"+e+' data-rh="true" '+r+">"+I(o,a)+"</"+e+">":"<"+e+' data-rh="true">'+I(o,a)+"</"+e+">"}(e,t.title,t.titleAttributes,n)}};case"bodyAttributes":case"htmlAttributes":return{toComponent:function(){return B(t)},toString:function(){return O(t)}};default:return{toComponent:function(){return D(e,t)},toString:function(){return function(e,t,n){return t.reduce((function(t,a){var r=Object.keys(a).filter((function(e){return!("innerHTML"===e||"cssText"===e)})).reduce((function(e,t){var r=void 0===a[t]?t:t+'="'+I(a[t],n)+'"';return e?e+" "+r:r}),""),o=a.innerHTML||a.cssText||"",i=-1===P.indexOf(e);return t+"<"+e+' data-rh="true" '+r+(i?"/>":">"+o+"</"+e+">")}),"")}(e,t,n)}}}},F=function(e){var t=e.baseTag,n=e.bodyAttributes,a=e.encode,r=e.htmlAttributes,o=e.noscriptTags,i=e.styleTags,s=e.title,l=void 0===s?"":s,c=e.titleAttributes,u=e.linkTags,d=e.metaTags,f=e.scriptTags,p={toComponent:function(){},toString:function(){return""}};if(e.prioritizeSeoTags){var m=function(e){var t=e.linkTags,n=e.scriptTags,a=e.encode,r=N(e.metaTags,k),o=N(t,_),i=N(n,b);return{priorityMethods:{toComponent:function(){return[].concat(D(g.META,r.priority),D(g.LINK,o.priority),D(g.SCRIPT,i.priority))},toString:function(){return M(g.META,r.priority,a)+" "+M(g.LINK,o.priority,a)+" "+M(g.SCRIPT,i.priority,a)}},metaTags:r.default,linkTags:o.default,scriptTags:i.default}}(e);p=m.priorityMethods,u=m.linkTags,d=m.metaTags,f=m.scriptTags}return{priority:p,base:M(g.BASE,t,a),bodyAttributes:M("bodyAttributes",n,a),htmlAttributes:M("htmlAttributes",r,a),link:M(g.LINK,u,a),meta:M(g.META,d,a),noscript:M(g.NOSCRIPT,o,a),script:M(g.SCRIPT,f,a),style:M(g.STYLE,i,a),title:M(g.TITLE,{title:l,titleAttributes:c},a)}},U=[],j=function(e,t){var n=this;void 0===t&&(t="undefined"!=typeof document),this.instances=[],this.value={setHelmet:function(e){n.context.helmet=e},helmetInstances:{get:function(){return n.canUseDOM?U:n.instances},add:function(e){(n.canUseDOM?U:n.instances).push(e)},remove:function(e){var t=(n.canUseDOM?U:n.instances).indexOf(e);(n.canUseDOM?U:n.instances).splice(t,1)}}},this.context=e,this.canUseDOM=t,t||(e.helmet=F({baseTag:[],bodyAttributes:{},encodeSpecialCharacters:!0,htmlAttributes:{},linkTags:[],metaTags:[],noscriptTags:[],scriptTags:[],styleTags:[],title:"",titleAttributes:{}}))},z=a.createContext({}),K=o().shape({setHelmet:o().func,helmetInstances:o().shape({get:o().func,add:o().func,remove:o().func})}),$="undefined"!=typeof document,H=function(e){function t(n){var a;return(a=e.call(this,n)||this).helmetData=new j(a.props.context,t.canUseDOM),a}return p(t,e),t.prototype.render=function(){return a.createElement(z.Provider,{value:this.helmetData.value},this.props.children)},t}(a.Component);H.canUseDOM=$,H.propTypes={context:o().shape({helmet:o().shape()}),children:o().node.isRequired},H.defaultProps={context:{}},H.displayName="HelmetProvider";var q=function(e,t){var n,a=document.head||document.querySelector(g.HEAD),r=a.querySelectorAll(e+"[data-rh]"),o=[].slice.call(r),i=[];return t&&t.length&&t.forEach((function(t){var a=document.createElement(e);for(var r in t)Object.prototype.hasOwnProperty.call(t,r)&&("innerHTML"===r?a.innerHTML=t.innerHTML:"cssText"===r?a.styleSheet?a.styleSheet.cssText=t.cssText:a.appendChild(document.createTextNode(t.cssText)):a.setAttribute(r,void 0===t[r]?"":t[r]));a.setAttribute("data-rh","true"),o.some((function(e,t){return n=t,a.isEqualNode(e)}))?o.splice(n,1):i.push(a)})),o.forEach((function(e){return e.parentNode.removeChild(e)})),i.forEach((function(e){return a.appendChild(e)})),{oldTags:o,newTags:i}},Z=function(e,t){var n=document.getElementsByTagName(e)[0];if(n){for(var a=n.getAttribute("data-rh"),r=a?a.split(","):[],o=[].concat(r),i=Object.keys(t),s=0;s<i.length;s+=1){var l=i[s],c=t[l]||"";n.getAttribute(l)!==c&&n.setAttribute(l,c),-1===r.indexOf(l)&&r.push(l);var u=o.indexOf(l);-1!==u&&o.splice(u,1)}for(var d=o.length-1;d>=0;d-=1)n.removeAttribute(o[d]);r.length===o.length?n.removeAttribute("data-rh"):n.getAttribute("data-rh")!==i.join(",")&&n.setAttribute("data-rh",i.join(","))}},W=function(e,t){var n=e.baseTag,a=e.htmlAttributes,r=e.linkTags,o=e.metaTags,i=e.noscriptTags,s=e.onChangeClientState,l=e.scriptTags,c=e.styleTags,u=e.title,d=e.titleAttributes;Z(g.BODY,e.bodyAttributes),Z(g.HTML,a),function(e,t){void 0!==e&&document.title!==e&&(document.title=L(e)),Z(g.TITLE,t)}(u,d);var f={baseTag:q(g.BASE,n),linkTags:q(g.LINK,r),metaTags:q(g.META,o),noscriptTags:q(g.NOSCRIPT,i),scriptTags:q(g.SCRIPT,l),styleTags:q(g.STYLE,c)},p={},m={};Object.keys(f).forEach((function(e){var t=f[e],n=t.newTags,a=t.oldTags;n.length&&(p[e]=n),a.length&&(m[e]=f[e].oldTags)})),t&&t(),s(e,p,m)},V=null,Y=function(e){function t(){for(var t,n=arguments.length,a=new Array(n),r=0;r<n;r++)a[r]=arguments[r];return(t=e.call.apply(e,[this].concat(a))||this).rendered=!1,t}p(t,e);var n=t.prototype;return n.shouldComponentUpdate=function(e){return!d()(e,this.props)},n.componentDidUpdate=function(){this.emitChange()},n.componentWillUnmount=function(){this.props.context.helmetInstances.remove(this),this.emitChange()},n.emitChange=function(){var e,t,n=this.props.context,a=n.setHelmet,r=null,o=(e=n.helmetInstances.get().map((function(e){var t=f({},e.props);return delete t.context,t})),{baseTag:T(["href"],e),bodyAttributes:C("bodyAttributes",e),defer:S(e,"defer"),encode:S(e,"encodeSpecialCharacters"),htmlAttributes:C("htmlAttributes",e),linkTags:A(g.LINK,["rel","href"],e),metaTags:A(g.META,["name","charset","http-equiv","property","itemprop"],e),noscriptTags:A(g.NOSCRIPT,["innerHTML"],e),onChangeClientState:x(e),scriptTags:A(g.SCRIPT,["src","innerHTML"],e),styleTags:A(g.STYLE,["cssText"],e),title:E(e),titleAttributes:C("titleAttributes",e),prioritizeSeoTags:G(e,"prioritizeSeoTags")});H.canUseDOM?(t=o,V&&cancelAnimationFrame(V),t.defer?V=requestAnimationFrame((function(){W(t,(function(){V=null}))})):(W(t),V=null)):F&&(r=F(o)),a(r)},n.init=function(){this.rendered||(this.rendered=!0,this.props.context.helmetInstances.add(this),this.emitChange())},n.render=function(){return this.init(),null},t}(a.Component);Y.propTypes={context:K.isRequired},Y.displayName="HelmetDispatcher";var Q=["children"],X=["children"],J=function(e){function t(){return e.apply(this,arguments)||this}p(t,e);var n=t.prototype;return n.shouldComponentUpdate=function(e){return!s()(R(this.props,"helmetData"),R(e,"helmetData"))},n.mapNestedChildrenToProps=function(e,t){if(!t)return null;switch(e.type){case g.SCRIPT:case g.NOSCRIPT:return{innerHTML:t};case g.STYLE:return{cssText:t};default:throw new Error("<"+e.type+" /> elements are self-closing and can not contain children. Refer to our API for more information.")}},n.flattenArrayTypeChildren=function(e){var t,n=e.child,a=e.arrayTypeChildren;return f({},a,((t={})[n.type]=[].concat(a[n.type]||[],[f({},e.newChildProps,this.mapNestedChildrenToProps(n,e.nestedChildren))]),t))},n.mapObjectTypeChildren=function(e){var t,n,a=e.child,r=e.newProps,o=e.newChildProps,i=e.nestedChildren;switch(a.type){case g.TITLE:return f({},r,((t={})[a.type]=i,t.titleAttributes=f({},o),t));case g.BODY:return f({},r,{bodyAttributes:f({},o)});case g.HTML:return f({},r,{htmlAttributes:f({},o)});default:return f({},r,((n={})[a.type]=f({},o),n))}},n.mapArrayTypeChildrenToProps=function(e,t){var n=f({},t);return Object.keys(e).forEach((function(t){var a;n=f({},n,((a={})[t]=e[t],a))})),n},n.warnOnInvalidChildren=function(e,t){return c()(v.some((function(t){return e.type===t})),"function"==typeof e.type?"You may be attempting to nest <Helmet> components within each other, which is not allowed. Refer to our API for more information.":"Only elements types "+v.join(", ")+" are allowed. Helmet does not support rendering <"+e.type+"> elements. Refer to our API for more information."),c()(!t||"string"==typeof t||Array.isArray(t)&&!t.some((function(e){return"string"!=typeof e})),"Helmet expects a string as a child of <"+e.type+">. Did you forget to wrap your children in braces? ( <"+e.type+">{``}</"+e.type+"> ) Refer to our API for more information."),!0},n.mapChildrenToProps=function(e,t){var n=this,r={};return a.Children.forEach(e,(function(e){if(e&&e.props){var a=e.props,o=a.children,i=h(a,Q),s=Object.keys(i).reduce((function(e,t){return e[w[t]||t]=i[t],e}),{}),l=e.type;switch("symbol"==typeof l?l=l.toString():n.warnOnInvalidChildren(e,o),l){case g.FRAGMENT:t=n.mapChildrenToProps(o,t);break;case g.LINK:case g.META:case g.NOSCRIPT:case g.SCRIPT:case g.STYLE:r=n.flattenArrayTypeChildren({child:e,arrayTypeChildren:r,newChildProps:s,nestedChildren:o});break;default:t=n.mapObjectTypeChildren({child:e,newProps:t,newChildProps:s,nestedChildren:o})}}})),this.mapArrayTypeChildrenToProps(r,t)},n.render=function(){var e=this.props,t=e.children,n=h(e,X),r=f({},n),o=n.helmetData;return t&&(r=this.mapChildrenToProps(t,r)),!o||o instanceof j||(o=new j(o.context,o.instances)),o?a.createElement(Y,f({},r,{context:o.value,helmetData:void 0})):a.createElement(z.Consumer,null,(function(e){return a.createElement(Y,f({},r,{context:e}))}))},t}(a.Component);J.propTypes={base:o().object,bodyAttributes:o().object,children:o().oneOfType([o().arrayOf(o().node),o().node]),defaultTitle:o().string,defer:o().bool,encodeSpecialCharacters:o().bool,htmlAttributes:o().object,link:o().arrayOf(o().object),meta:o().arrayOf(o().object),noscript:o().arrayOf(o().object),onChangeClientState:o().func,script:o().arrayOf(o().object),style:o().arrayOf(o().object),title:o().string,titleAttributes:o().object,titleTemplate:o().string,prioritizeSeoTags:o().bool,helmetData:o().object},J.defaultProps={defer:!0,encodeSpecialCharacters:!0,prioritizeSeoTags:!1},J.displayName="Helmet"},9921:(e,t)=>{"use strict";var n="function"==typeof Symbol&&Symbol.for,a=n?Symbol.for("react.element"):60103,r=n?Symbol.for("react.portal"):60106,o=n?Symbol.for("react.fragment"):60107,i=n?Symbol.for("react.strict_mode"):60108,s=n?Symbol.for("react.profiler"):60114,l=n?Symbol.for("react.provider"):60109,c=n?Symbol.for("react.context"):60110,u=n?Symbol.for("react.async_mode"):60111,d=n?Symbol.for("react.concurrent_mode"):60111,f=n?Symbol.for("react.forward_ref"):60112,p=n?Symbol.for("react.suspense"):60113,m=n?Symbol.for("react.suspense_list"):60120,h=n?Symbol.for("react.memo"):60115,g=n?Symbol.for("react.lazy"):60116,_=n?Symbol.for("react.block"):60121,b=n?Symbol.for("react.fundamental"):60117,k=n?Symbol.for("react.responder"):60118,v=n?Symbol.for("react.scope"):60119;function y(e){if("object"==typeof e&&null!==e){var t=e.$$typeof;switch(t){case a:switch(e=e.type){case u:case d:case o:case s:case i:case p:return e;default:switch(e=e&&e.$$typeof){case c:case f:case g:case h:case l:return e;default:return t}}case r:return t}}}function w(e){return y(e)===d}t.AsyncMode=u,t.ConcurrentMode=d,t.ContextConsumer=c,t.ContextProvider=l,t.Element=a,t.ForwardRef=f,t.Fragment=o,t.Lazy=g,t.Memo=h,t.Portal=r,t.Profiler=s,t.StrictMode=i,t.Suspense=p,t.isAsyncMode=function(e){return w(e)||y(e)===u},t.isConcurrentMode=w,t.isContextConsumer=function(e){return y(e)===c},t.isContextProvider=function(e){return y(e)===l},t.isElement=function(e){return"object"==typeof e&&null!==e&&e.$$typeof===a},t.isForwardRef=function(e){return y(e)===f},t.isFragment=function(e){return y(e)===o},t.isLazy=function(e){return y(e)===g},t.isMemo=function(e){return y(e)===h},t.isPortal=function(e){return y(e)===r},t.isProfiler=function(e){return y(e)===s},t.isStrictMode=function(e){return y(e)===i},t.isSuspense=function(e){return y(e)===p},t.isValidElementType=function(e){return"string"==typeof e||"function"==typeof e||e===o||e===d||e===s||e===i||e===p||e===m||"object"==typeof e&&null!==e&&(e.$$typeof===g||e.$$typeof===h||e.$$typeof===l||e.$$typeof===c||e.$$typeof===f||e.$$typeof===b||e.$$typeof===k||e.$$typeof===v||e.$$typeof===_)},t.typeOf=y},9864:(e,t,n)=>{"use strict";e.exports=n(9921)},8356:(e,t,n)=>{"use strict";function a(e,t){e.prototype=Object.create(t.prototype),e.prototype.constructor=e,e.__proto__=t}function r(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(){return i=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var a in n)Object.prototype.hasOwnProperty.call(n,a)&&(e[a]=n[a])}return e},i.apply(this,arguments)}var s=n(7294),l=n(5697),c=[],u=[];function d(e){var t=e(),n={loading:!0,loaded:null,error:null};return n.promise=t.then((function(e){return n.loading=!1,n.loaded=e,e})).catch((function(e){throw n.loading=!1,n.error=e,e})),n}function f(e){var t={loading:!1,loaded:{},error:null},n=[];try{Object.keys(e).forEach((function(a){var r=d(e[a]);r.loading?t.loading=!0:(t.loaded[a]=r.loaded,t.error=r.error),n.push(r.promise),r.promise.then((function(e){t.loaded[a]=e})).catch((function(e){t.error=e}))}))}catch(a){t.error=a}return t.promise=Promise.all(n).then((function(e){return t.loading=!1,e})).catch((function(e){throw t.loading=!1,e})),t}function p(e,t){return s.createElement((n=e)&&n.__esModule?n.default:n,t);var n}function m(e,t){var d,f;if(!t.loading)throw new Error("react-loadable requires a `loading` component");var m=i({loader:null,loading:null,delay:200,timeout:null,render:p,webpack:null,modules:null},t),h=null;function g(){return h||(h=e(m.loader)),h.promise}return c.push(g),"function"==typeof m.webpack&&u.push((function(){if((0,m.webpack)().every((function(e){return void 0!==e&&void 0!==n.m[e]})))return g()})),f=d=function(t){function n(n){var a;return o(r(r(a=t.call(this,n)||this)),"retry",(function(){a.setState({error:null,loading:!0,timedOut:!1}),h=e(m.loader),a._loadModule()})),g(),a.state={error:h.error,pastDelay:!1,timedOut:!1,loading:h.loading,loaded:h.loaded},a}a(n,t),n.preload=function(){return g()};var i=n.prototype;return i.UNSAFE_componentWillMount=function(){this._loadModule()},i.componentDidMount=function(){this._mounted=!0},i._loadModule=function(){var e=this;if(this.context.loadable&&Array.isArray(m.modules)&&m.modules.forEach((function(t){e.context.loadable.report(t)})),h.loading){var t=function(t){e._mounted&&e.setState(t)};"number"==typeof m.delay&&(0===m.delay?this.setState({pastDelay:!0}):this._delay=setTimeout((function(){t({pastDelay:!0})}),m.delay)),"number"==typeof m.timeout&&(this._timeout=setTimeout((function(){t({timedOut:!0})}),m.timeout));var n=function(){t({error:h.error,loaded:h.loaded,loading:h.loading}),e._clearTimeouts()};h.promise.then((function(){return n(),null})).catch((function(e){return n(),null}))}},i.componentWillUnmount=function(){this._mounted=!1,this._clearTimeouts()},i._clearTimeouts=function(){clearTimeout(this._delay),clearTimeout(this._timeout)},i.render=function(){return this.state.loading||this.state.error?s.createElement(m.loading,{isLoading:this.state.loading,pastDelay:this.state.pastDelay,timedOut:this.state.timedOut,error:this.state.error,retry:this.retry}):this.state.loaded?m.render(this.state.loaded,this.props):null},n}(s.Component),o(d,"contextTypes",{loadable:l.shape({report:l.func.isRequired})}),f}function h(e){return m(d,e)}h.Map=function(e){if("function"!=typeof e.render)throw new Error("LoadableMap requires a `render(loaded, props)` function");return m(f,e)};var g=function(e){function t(){return e.apply(this,arguments)||this}a(t,e);var n=t.prototype;return n.getChildContext=function(){return{loadable:{report:this.props.report}}},n.render=function(){return s.Children.only(this.props.children)},t}(s.Component);function _(e){for(var t=[];e.length;){var n=e.pop();t.push(n())}return Promise.all(t).then((function(){if(e.length)return _(e)}))}o(g,"propTypes",{report:l.func.isRequired}),o(g,"childContextTypes",{loadable:l.shape({report:l.func.isRequired}).isRequired}),h.Capture=g,h.preloadAll=function(){return new Promise((function(e,t){_(c).then(e,t)}))},h.preloadReady=function(){return new Promise((function(e,t){_(u).then(e,e)}))},e.exports=h},8790:(e,t,n)=>{"use strict";n.d(t,{H:()=>s,f:()=>i});var a=n(6550),r=n(7462),o=n(7294);function i(e,t,n){return void 0===n&&(n=[]),e.some((function(e){var r=e.path?(0,a.LX)(t,e):n.length?n[n.length-1].match:a.F0.computeRootMatch(t);return r&&(n.push({route:e,match:r}),e.routes&&i(e.routes,t,n)),r})),n}function s(e,t,n){return void 0===t&&(t={}),void 0===n&&(n={}),e?o.createElement(a.rs,n,e.map((function(e,n){return o.createElement(a.AW,{key:e.key||n,path:e.path,exact:e.exact,strict:e.strict,render:function(n){return e.render?e.render((0,r.Z)({},n,{},t,{route:e})):o.createElement(e.component,(0,r.Z)({},n,t,{route:e}))}})}))):null}},3727:(e,t,n)=>{"use strict";n.d(t,{OL:()=>k,VK:()=>u,rU:()=>g});var a=n(6550),r=n(5068),o=n(7294),i=n(9318),s=n(7462),l=n(3366),c=n(8776),u=function(e){function t(){for(var t,n=arguments.length,a=new Array(n),r=0;r<n;r++)a[r]=arguments[r];return(t=e.call.apply(e,[this].concat(a))||this).history=(0,i.lX)(t.props),t}return(0,r.Z)(t,e),t.prototype.render=function(){return o.createElement(a.F0,{history:this.history,children:this.props.children})},t}(o.Component);o.Component;var d=function(e,t){return"function"==typeof e?e(t):e},f=function(e,t){return"string"==typeof e?(0,i.ob)(e,null,null,t):e},p=function(e){return e},m=o.forwardRef;void 0===m&&(m=p);var h=m((function(e,t){var n=e.innerRef,a=e.navigate,r=e.onClick,i=(0,l.Z)(e,["innerRef","navigate","onClick"]),c=i.target,u=(0,s.Z)({},i,{onClick:function(e){try{r&&r(e)}catch(t){throw e.preventDefault(),t}e.defaultPrevented||0!==e.button||c&&"_self"!==c||function(e){return!!(e.metaKey||e.altKey||e.ctrlKey||e.shiftKey)}(e)||(e.preventDefault(),a())}});return u.ref=p!==m&&t||n,o.createElement("a",u)}));var g=m((function(e,t){var n=e.component,r=void 0===n?h:n,u=e.replace,g=e.to,_=e.innerRef,b=(0,l.Z)(e,["component","replace","to","innerRef"]);return o.createElement(a.s6.Consumer,null,(function(e){e||(0,c.Z)(!1);var n=e.history,a=f(d(g,e.location),e.location),l=a?n.createHref(a):"",h=(0,s.Z)({},b,{href:l,navigate:function(){var t=d(g,e.location),a=(0,i.Ep)(e.location)===(0,i.Ep)(f(t));(u||a?n.replace:n.push)(t)}});return p!==m?h.ref=t||_:h.innerRef=_,o.createElement(r,h)}))})),_=function(e){return e},b=o.forwardRef;void 0===b&&(b=_);var k=b((function(e,t){var n=e["aria-current"],r=void 0===n?"page":n,i=e.activeClassName,u=void 0===i?"active":i,p=e.activeStyle,m=e.className,h=e.exact,k=e.isActive,v=e.location,y=e.sensitive,w=e.strict,S=e.style,E=e.to,x=e.innerRef,C=(0,l.Z)(e,["aria-current","activeClassName","activeStyle","className","exact","isActive","location","sensitive","strict","style","to","innerRef"]);return o.createElement(a.s6.Consumer,null,(function(e){e||(0,c.Z)(!1);var n=v||e.location,i=f(d(E,n),n),l=i.pathname,T=l&&l.replace(/([.+*?=^!:${}()[\]|/\\])/g,"\\$1"),A=T?(0,a.LX)(n.pathname,{path:T,exact:h,sensitive:y,strict:w}):null,G=!!(k?k(A,n):A),L="function"==typeof m?m(G):m,N="function"==typeof S?S(G):S;G&&(L=function(){for(var e=arguments.length,t=new Array(e),n=0;n<e;n++)t[n]=arguments[n];return t.filter((function(e){return e})).join(" ")}(L,u),N=(0,s.Z)({},N,p));var R=(0,s.Z)({"aria-current":G&&r||null,className:L,style:N,to:i},C);return _!==b?R.ref=t||x:R.innerRef=x,o.createElement(g,R)}))}))},6550:(e,t,n)=>{"use strict";n.d(t,{AW:()=>E,F0:()=>k,LX:()=>S,TH:()=>P,k6:()=>R,rs:()=>L,s6:()=>b});var a=n(5068),r=n(7294),o=n(5697),i=n.n(o),s=n(9318),l=n(8776),c=n(7462),u=n(4779),d=n.n(u),f=(n(9864),n(3366)),p=(n(8679),1073741823),m="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:void 0!==n.g?n.g:{};var h=r.createContext||function(e,t){var n,o,s="__create-react-context-"+function(){var e="__global_unique_id__";return m[e]=(m[e]||0)+1}()+"__",l=function(e){function n(){for(var t,n,a,r=arguments.length,o=new Array(r),i=0;i<r;i++)o[i]=arguments[i];return(t=e.call.apply(e,[this].concat(o))||this).emitter=(n=t.props.value,a=[],{on:function(e){a.push(e)},off:function(e){a=a.filter((function(t){return t!==e}))},get:function(){return n},set:function(e,t){n=e,a.forEach((function(e){return e(n,t)}))}}),t}(0,a.Z)(n,e);var r=n.prototype;return r.getChildContext=function(){var e;return(e={})[s]=this.emitter,e},r.componentWillReceiveProps=function(e){if(this.props.value!==e.value){var n,a=this.props.value,r=e.value;((o=a)===(i=r)?0!==o||1/o==1/i:o!=o&&i!=i)?n=0:(n="function"==typeof t?t(a,r):p,0!==(n|=0)&&this.emitter.set(e.value,n))}var o,i},r.render=function(){return this.props.children},n}(r.Component);l.childContextTypes=((n={})[s]=i().object.isRequired,n);var c=function(t){function n(){for(var e,n=arguments.length,a=new Array(n),r=0;r<n;r++)a[r]=arguments[r];return(e=t.call.apply(t,[this].concat(a))||this).observedBits=void 0,e.state={value:e.getValue()},e.onUpdate=function(t,n){0!=((0|e.observedBits)&n)&&e.setState({value:e.getValue()})},e}(0,a.Z)(n,t);var r=n.prototype;return r.componentWillReceiveProps=function(e){var t=e.observedBits;this.observedBits=null==t?p:t},r.componentDidMount=function(){this.context[s]&&this.context[s].on(this.onUpdate);var e=this.props.observedBits;this.observedBits=null==e?p:e},r.componentWillUnmount=function(){this.context[s]&&this.context[s].off(this.onUpdate)},r.getValue=function(){return this.context[s]?this.context[s].get():e},r.render=function(){return(e=this.props.children,Array.isArray(e)?e[0]:e)(this.state.value);var e},n}(r.Component);return c.contextTypes=((o={})[s]=i().object,o),{Provider:l,Consumer:c}},g=function(e){var t=h();return t.displayName=e,t},_=g("Router-History"),b=g("Router"),k=function(e){function t(t){var n;return(n=e.call(this,t)||this).state={location:t.history.location},n._isMounted=!1,n._pendingLocation=null,t.staticContext||(n.unlisten=t.history.listen((function(e){n._pendingLocation=e}))),n}(0,a.Z)(t,e),t.computeRootMatch=function(e){return{path:"/",url:"/",params:{},isExact:"/"===e}};var n=t.prototype;return n.componentDidMount=function(){var e=this;this._isMounted=!0,this.unlisten&&this.unlisten(),this.props.staticContext||(this.unlisten=this.props.history.listen((function(t){e._isMounted&&e.setState({location:t})}))),this._pendingLocation&&this.setState({location:this._pendingLocation})},n.componentWillUnmount=function(){this.unlisten&&(this.unlisten(),this._isMounted=!1,this._pendingLocation=null)},n.render=function(){return r.createElement(b.Provider,{value:{history:this.props.history,location:this.state.location,match:t.computeRootMatch(this.state.location.pathname),staticContext:this.props.staticContext}},r.createElement(_.Provider,{children:this.props.children||null,value:this.props.history}))},t}(r.Component);r.Component;r.Component;var v={},y=1e4,w=0;function S(e,t){void 0===t&&(t={}),("string"==typeof t||Array.isArray(t))&&(t={path:t});var n=t,a=n.path,r=n.exact,o=void 0!==r&&r,i=n.strict,s=void 0!==i&&i,l=n.sensitive,c=void 0!==l&&l;return[].concat(a).reduce((function(t,n){if(!n&&""!==n)return null;if(t)return t;var a=function(e,t){var n=""+t.end+t.strict+t.sensitive,a=v[n]||(v[n]={});if(a[e])return a[e];var r=[],o={regexp:d()(e,r,t),keys:r};return w<y&&(a[e]=o,w++),o}(n,{end:o,strict:s,sensitive:c}),r=a.regexp,i=a.keys,l=r.exec(e);if(!l)return null;var u=l[0],f=l.slice(1),p=e===u;return o&&!p?null:{path:n,url:"/"===n&&""===u?"/":u,isExact:p,params:i.reduce((function(e,t,n){return e[t.name]=f[n],e}),{})}}),null)}var E=function(e){function t(){return e.apply(this,arguments)||this}return(0,a.Z)(t,e),t.prototype.render=function(){var e=this;return r.createElement(b.Consumer,null,(function(t){t||(0,l.Z)(!1);var n=e.props.location||t.location,a=e.props.computedMatch?e.props.computedMatch:e.props.path?S(n.pathname,e.props):t.match,o=(0,c.Z)({},t,{location:n,match:a}),i=e.props,s=i.children,u=i.component,d=i.render;return Array.isArray(s)&&function(e){return 0===r.Children.count(e)}(s)&&(s=null),r.createElement(b.Provider,{value:o},o.match?s?"function"==typeof s?s(o):s:u?r.createElement(u,o):d?d(o):null:"function"==typeof s?s(o):null)}))},t}(r.Component);function x(e){return"/"===e.charAt(0)?e:"/"+e}function C(e,t){if(!e)return t;var n=x(e);return 0!==t.pathname.indexOf(n)?t:(0,c.Z)({},t,{pathname:t.pathname.substr(n.length)})}function T(e){return"string"==typeof e?e:(0,s.Ep)(e)}function A(e){return function(){(0,l.Z)(!1)}}function G(){}r.Component;var L=function(e){function t(){return e.apply(this,arguments)||this}return(0,a.Z)(t,e),t.prototype.render=function(){var e=this;return r.createElement(b.Consumer,null,(function(t){t||(0,l.Z)(!1);var n,a,o=e.props.location||t.location;return r.Children.forEach(e.props.children,(function(e){if(null==a&&r.isValidElement(e)){n=e;var i=e.props.path||e.props.from;a=i?S(o.pathname,(0,c.Z)({},e.props,{path:i})):t.match}})),a?r.cloneElement(n,{location:o,computedMatch:a}):null}))},t}(r.Component);var N=r.useContext;function R(){return N(_)}function P(){return N(b).location}},2408:(e,t,n)=>{"use strict";var a=n(7418),r=60103,o=60106;t.Fragment=60107,t.StrictMode=60108,t.Profiler=60114;var i=60109,s=60110,l=60112;t.Suspense=60113;var c=60115,u=60116;if("function"==typeof Symbol&&Symbol.for){var d=Symbol.for;r=d("react.element"),o=d("react.portal"),t.Fragment=d("react.fragment"),t.StrictMode=d("react.strict_mode"),t.Profiler=d("react.profiler"),i=d("react.provider"),s=d("react.context"),l=d("react.forward_ref"),t.Suspense=d("react.suspense"),c=d("react.memo"),u=d("react.lazy")}var f="function"==typeof Symbol&&Symbol.iterator;function p(e){for(var t="https://reactjs.org/docs/error-decoder.html?invariant="+e,n=1;n<arguments.length;n++)t+="&args[]="+encodeURIComponent(arguments[n]);return"Minified React error #"+e+"; visit "+t+" for the full message or use the non-minified dev environment for full errors and additional helpful warnings."}var m={isMounted:function(){return!1},enqueueForceUpdate:function(){},enqueueReplaceState:function(){},enqueueSetState:function(){}},h={};function g(e,t,n){this.props=e,this.context=t,this.refs=h,this.updater=n||m}function _(){}function b(e,t,n){this.props=e,this.context=t,this.refs=h,this.updater=n||m}g.prototype.isReactComponent={},g.prototype.setState=function(e,t){if("object"!=typeof e&&"function"!=typeof e&&null!=e)throw Error(p(85));this.updater.enqueueSetState(this,e,t,"setState")},g.prototype.forceUpdate=function(e){this.updater.enqueueForceUpdate(this,e,"forceUpdate")},_.prototype=g.prototype;var k=b.prototype=new _;k.constructor=b,a(k,g.prototype),k.isPureReactComponent=!0;var v={current:null},y=Object.prototype.hasOwnProperty,w={key:!0,ref:!0,__self:!0,__source:!0};function S(e,t,n){var a,o={},i=null,s=null;if(null!=t)for(a in void 0!==t.ref&&(s=t.ref),void 0!==t.key&&(i=""+t.key),t)y.call(t,a)&&!w.hasOwnProperty(a)&&(o[a]=t[a]);var l=arguments.length-2;if(1===l)o.children=n;else if(1<l){for(var c=Array(l),u=0;u<l;u++)c[u]=arguments[u+2];o.children=c}if(e&&e.defaultProps)for(a in l=e.defaultProps)void 0===o[a]&&(o[a]=l[a]);return{$$typeof:r,type:e,key:i,ref:s,props:o,_owner:v.current}}function E(e){return"object"==typeof e&&null!==e&&e.$$typeof===r}var x=/\/+/g;function C(e,t){return"object"==typeof e&&null!==e&&null!=e.key?function(e){var t={"=":"=0",":":"=2"};return"$"+e.replace(/[=:]/g,(function(e){return t[e]}))}(""+e.key):t.toString(36)}function T(e,t,n,a,i){var s=typeof e;"undefined"!==s&&"boolean"!==s||(e=null);var l=!1;if(null===e)l=!0;else switch(s){case"string":case"number":l=!0;break;case"object":switch(e.$$typeof){case r:case o:l=!0}}if(l)return i=i(l=e),e=""===a?"."+C(l,0):a,Array.isArray(i)?(n="",null!=e&&(n=e.replace(x,"$&/")+"/"),T(i,t,n,"",(function(e){return e}))):null!=i&&(E(i)&&(i=function(e,t){return{$$typeof:r,type:e.type,key:t,ref:e.ref,props:e.props,_owner:e._owner}}(i,n+(!i.key||l&&l.key===i.key?"":(""+i.key).replace(x,"$&/")+"/")+e)),t.push(i)),1;if(l=0,a=""===a?".":a+":",Array.isArray(e))for(var c=0;c<e.length;c++){var u=a+C(s=e[c],c);l+=T(s,t,n,u,i)}else if(u=function(e){return null===e||"object"!=typeof e?null:"function"==typeof(e=f&&e[f]||e["@@iterator"])?e:null}(e),"function"==typeof u)for(e=u.call(e),c=0;!(s=e.next()).done;)l+=T(s=s.value,t,n,u=a+C(s,c++),i);else if("object"===s)throw t=""+e,Error(p(31,"[object Object]"===t?"object with keys {"+Object.keys(e).join(", ")+"}":t));return l}function A(e,t,n){if(null==e)return e;var a=[],r=0;return T(e,a,"","",(function(e){return t.call(n,e,r++)})),a}function G(e){if(-1===e._status){var t=e._result;t=t(),e._status=0,e._result=t,t.then((function(t){0===e._status&&(t=t.default,e._status=1,e._result=t)}),(function(t){0===e._status&&(e._status=2,e._result=t)}))}if(1===e._status)return e._result;throw e._result}var L={current:null};function N(){var e=L.current;if(null===e)throw Error(p(321));return e}var R={ReactCurrentDispatcher:L,ReactCurrentBatchConfig:{transition:0},ReactCurrentOwner:v,IsSomeRendererActing:{current:!1},assign:a};t.Children={map:A,forEach:function(e,t,n){A(e,(function(){t.apply(this,arguments)}),n)},count:function(e){var t=0;return A(e,(function(){t++})),t},toArray:function(e){return A(e,(function(e){return e}))||[]},only:function(e){if(!E(e))throw Error(p(143));return e}},t.Component=g,t.PureComponent=b,t.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED=R,t.cloneElement=function(e,t,n){if(null==e)throw Error(p(267,e));var o=a({},e.props),i=e.key,s=e.ref,l=e._owner;if(null!=t){if(void 0!==t.ref&&(s=t.ref,l=v.current),void 0!==t.key&&(i=""+t.key),e.type&&e.type.defaultProps)var c=e.type.defaultProps;for(u in t)y.call(t,u)&&!w.hasOwnProperty(u)&&(o[u]=void 0===t[u]&&void 0!==c?c[u]:t[u])}var u=arguments.length-2;if(1===u)o.children=n;else if(1<u){c=Array(u);for(var d=0;d<u;d++)c[d]=arguments[d+2];o.children=c}return{$$typeof:r,type:e.type,key:i,ref:s,props:o,_owner:l}},t.createContext=function(e,t){return void 0===t&&(t=null),(e={$$typeof:s,_calculateChangedBits:t,_currentValue:e,_currentValue2:e,_threadCount:0,Provider:null,Consumer:null}).Provider={$$typeof:i,_context:e},e.Consumer=e},t.createElement=S,t.createFactory=function(e){var t=S.bind(null,e);return t.type=e,t},t.createRef=function(){return{current:null}},t.forwardRef=function(e){return{$$typeof:l,render:e}},t.isValidElement=E,t.lazy=function(e){return{$$typeof:u,_payload:{_status:-1,_result:e},_init:G}},t.memo=function(e,t){return{$$typeof:c,type:e,compare:void 0===t?null:t}},t.useCallback=function(e,t){return N().useCallback(e,t)},t.useContext=function(e,t){return N().useContext(e,t)},t.useDebugValue=function(){},t.useEffect=function(e,t){return N().useEffect(e,t)},t.useImperativeHandle=function(e,t,n){return N().useImperativeHandle(e,t,n)},t.useLayoutEffect=function(e,t){return N().useLayoutEffect(e,t)},t.useMemo=function(e,t){return N().useMemo(e,t)},t.useReducer=function(e,t,n){return N().useReducer(e,t,n)},t.useRef=function(e){return N().useRef(e)},t.useState=function(e){return N().useState(e)},t.version="17.0.2"},7294:(e,t,n)=>{"use strict";e.exports=n(2408)},53:(e,t)=>{"use strict";var n,a,r,o;if("object"==typeof performance&&"function"==typeof performance.now){var i=performance;t.unstable_now=function(){return i.now()}}else{var s=Date,l=s.now();t.unstable_now=function(){return s.now()-l}}if("undefined"==typeof window||"function"!=typeof MessageChannel){var c=null,u=null,d=function(){if(null!==c)try{var e=t.unstable_now();c(!0,e),c=null}catch(n){throw setTimeout(d,0),n}};n=function(e){null!==c?setTimeout(n,0,e):(c=e,setTimeout(d,0))},a=function(e,t){u=setTimeout(e,t)},r=function(){clearTimeout(u)},t.unstable_shouldYield=function(){return!1},o=t.unstable_forceFrameRate=function(){}}else{var f=window.setTimeout,p=window.clearTimeout;if("undefined"!=typeof console){var m=window.cancelAnimationFrame;"function"!=typeof window.requestAnimationFrame&&console.error("This browser doesn't support requestAnimationFrame. Make sure that you load a polyfill in older browsers. https://reactjs.org/link/react-polyfills"),"function"!=typeof m&&console.error("This browser doesn't support cancelAnimationFrame. Make sure that you load a polyfill in older browsers. https://reactjs.org/link/react-polyfills")}var h=!1,g=null,_=-1,b=5,k=0;t.unstable_shouldYield=function(){return t.unstable_now()>=k},o=function(){},t.unstable_forceFrameRate=function(e){0>e||125<e?console.error("forceFrameRate takes a positive int between 0 and 125, forcing frame rates higher than 125 fps is not supported"):b=0<e?Math.floor(1e3/e):5};var v=new MessageChannel,y=v.port2;v.port1.onmessage=function(){if(null!==g){var e=t.unstable_now();k=e+b;try{g(!0,e)?y.postMessage(null):(h=!1,g=null)}catch(n){throw y.postMessage(null),n}}else h=!1},n=function(e){g=e,h||(h=!0,y.postMessage(null))},a=function(e,n){_=f((function(){e(t.unstable_now())}),n)},r=function(){p(_),_=-1}}function w(e,t){var n=e.length;e.push(t);e:for(;;){var a=n-1>>>1,r=e[a];if(!(void 0!==r&&0<x(r,t)))break e;e[a]=t,e[n]=r,n=a}}function S(e){return void 0===(e=e[0])?null:e}function E(e){var t=e[0];if(void 0!==t){var n=e.pop();if(n!==t){e[0]=n;e:for(var a=0,r=e.length;a<r;){var o=2*(a+1)-1,i=e[o],s=o+1,l=e[s];if(void 0!==i&&0>x(i,n))void 0!==l&&0>x(l,i)?(e[a]=l,e[s]=n,a=s):(e[a]=i,e[o]=n,a=o);else{if(!(void 0!==l&&0>x(l,n)))break e;e[a]=l,e[s]=n,a=s}}}return t}return null}function x(e,t){var n=e.sortIndex-t.sortIndex;return 0!==n?n:e.id-t.id}var C=[],T=[],A=1,G=null,L=3,N=!1,R=!1,P=!1;function I(e){for(var t=S(T);null!==t;){if(null===t.callback)E(T);else{if(!(t.startTime<=e))break;E(T),t.sortIndex=t.expirationTime,w(C,t)}t=S(T)}}function O(e){if(P=!1,I(e),!R)if(null!==S(C))R=!0,n(B);else{var t=S(T);null!==t&&a(O,t.startTime-e)}}function B(e,n){R=!1,P&&(P=!1,r()),N=!0;var o=L;try{for(I(n),G=S(C);null!==G&&(!(G.expirationTime>n)||e&&!t.unstable_shouldYield());){var i=G.callback;if("function"==typeof i){G.callback=null,L=G.priorityLevel;var s=i(G.expirationTime<=n);n=t.unstable_now(),"function"==typeof s?G.callback=s:G===S(C)&&E(C),I(n)}else E(C);G=S(C)}if(null!==G)var l=!0;else{var c=S(T);null!==c&&a(O,c.startTime-n),l=!1}return l}finally{G=null,L=o,N=!1}}var D=o;t.unstable_IdlePriority=5,t.unstable_ImmediatePriority=1,t.unstable_LowPriority=4,t.unstable_NormalPriority=3,t.unstable_Profiling=null,t.unstable_UserBlockingPriority=2,t.unstable_cancelCallback=function(e){e.callback=null},t.unstable_continueExecution=function(){R||N||(R=!0,n(B))},t.unstable_getCurrentPriorityLevel=function(){return L},t.unstable_getFirstCallbackNode=function(){return S(C)},t.unstable_next=function(e){switch(L){case 1:case 2:case 3:var t=3;break;default:t=L}var n=L;L=t;try{return e()}finally{L=n}},t.unstable_pauseExecution=function(){},t.unstable_requestPaint=D,t.unstable_runWithPriority=function(e,t){switch(e){case 1:case 2:case 3:case 4:case 5:break;default:e=3}var n=L;L=e;try{return t()}finally{L=n}},t.unstable_scheduleCallback=function(e,o,i){var s=t.unstable_now();switch("object"==typeof i&&null!==i?i="number"==typeof(i=i.delay)&&0<i?s+i:s:i=s,e){case 1:var l=-1;break;case 2:l=250;break;case 5:l=1073741823;break;case 4:l=1e4;break;default:l=5e3}return e={id:A++,callback:o,priorityLevel:e,startTime:i,expirationTime:l=i+l,sortIndex:-1},i>s?(e.sortIndex=i,w(T,e),null===S(C)&&e===S(T)&&(P?r():P=!0,a(O,i-s))):(e.sortIndex=l,w(C,e),R||N||(R=!0,n(B))),e},t.unstable_wrapCallback=function(e){var t=L;return function(){var n=L;L=t;try{return e.apply(this,arguments)}finally{L=n}}}},3840:(e,t,n)=>{"use strict";e.exports=n(53)},6774:e=>{e.exports=function(e,t,n,a){var r=n?n.call(a,e,t):void 0;if(void 0!==r)return!!r;if(e===t)return!0;if("object"!=typeof e||!e||"object"!=typeof t||!t)return!1;var o=Object.keys(e),i=Object.keys(t);if(o.length!==i.length)return!1;for(var s=Object.prototype.hasOwnProperty.bind(t),l=0;l<o.length;l++){var c=o[l];if(!s(c))return!1;var u=e[c],d=t[c];if(!1===(r=n?n.call(a,u,d,c):void 0)||void 0===r&&u!==d)return!1}return!0}},3250:(e,t,n)=>{"use strict";var a=n(7294);var r="function"==typeof Object.is?Object.is:function(e,t){return e===t&&(0!==e||1/e==1/t)||e!=e&&t!=t},o=a.useState,i=a.useEffect,s=a.useLayoutEffect,l=a.useDebugValue;function c(e){var t=e.getSnapshot;e=e.value;try{var n=t();return!r(e,n)}catch(a){return!0}}var u="undefined"==typeof window||void 0===window.document||void 0===window.document.createElement?function(e,t){return t()}:function(e,t){var n=t(),a=o({inst:{value:n,getSnapshot:t}}),r=a[0].inst,u=a[1];return s((function(){r.value=n,r.getSnapshot=t,c(r)&&u({inst:r})}),[e,n,t]),i((function(){return c(r)&&u({inst:r}),e((function(){c(r)&&u({inst:r})}))}),[e]),l(n),n};t.useSyncExternalStore=void 0!==a.useSyncExternalStore?a.useSyncExternalStore:u},1688:(e,t,n)=>{"use strict";e.exports=n(3250)},6809:(e,t,n)=>{"use strict";n.r(t),n.d(t,{default:()=>a});const a={title:"FastKafka",tagline:"Effortless Kafka integration for web services",customFields:{description:"Powerful and easy-to-use open-source framework for building asynchronous web services that interact with Kafka."},favicon:"img/AIRT_icon_blue.svg",url:"https://fastkafka.airt.ai",baseUrl:"/",organizationName:"airt",projectName:"fastkafka",trailingSlash:!0,onBrokenLinks:"warn",onBrokenMarkdownLinks:"warn",i18n:{defaultLocale:"en",locales:["en"],path:"i18n",localeConfigs:{}},presets:[["classic",{docs:{sidebarPath:"/home/runner/work/fastkafka/fastkafka/docusaurus/sidebars.js",exclude:["**/*.test.{js,jsx,ts,tsx}","**/__tests__/**"],versions:{current:{label:"dev \ud83d\udea7"}}},blog:{showReadingTime:!0},theme:{customCss:"/home/runner/work/fastkafka/fastkafka/docusaurus/src/css/custom.css"},gtag:{trackingID:"G-WLMWPELHMB"}}]],themeConfig:{algolia:{appId:"EHYNSIUGMY",apiKey:"2680cd13947844a00a5a657b959e6211",indexName:"fastkafka-airt",contextualSearch:!0,searchParameters:{},searchPagePath:"search"},image:"https://opengraph.githubassets.com/1671805243.560327/airtai/fastkafka",navbar:{title:"airt",logo:{alt:"airt logo",src:"img/AIRT_icon_blue.svg",href:"https://airt.ai",target:"_blank"},items:[{to:"/",html:'<div><img src="/img/home-icon.svg"><p>FastKafka</p></div>',position:"right",className:"fastkafka-home"},{type:"docsVersionDropdown",position:"right",dropdownActiveClassDisabled:!0,dropdownItemsBefore:[],dropdownItemsAfter:[]},{type:"docSidebar",sidebarId:"tutorialSidebar",position:"right",label:"Docs"},{type:"html",position:"right",className:"github-stars",value:'<iframe src="https://ghbtns.com/github-btn.html?user=airtai&repo=fastkafka&type=star&count=true&size=large" frameborder="0" scrolling="0" width="170" height="30" title="GitHub"></iframe>'},{href:"https://discord.gg/CJWmYpyFbc",position:"right",className:"header-discord-link","aria-label":"Discord Link"},{to:"/",html:'<div><img src="/img/home-icon.svg"></div>',position:"right",className:"fastkafka-home-mobile"}],hideOnScroll:!1},footer:{style:"dark",links:[{title:"COMMUNITY",items:[{html:'\n <a class="footer-discord-link" href="https://discord.gg/CJWmYpyFbc" target="_blank" rel="noreferrer noopener" aria-label="Discord link"></a>\n '},{html:'\n <a class="footer-github-link" href="https://github.com/airtai" target="_blank" rel="noreferrer noopener" aria-label="Github link"></a>\n '},{html:'\n <a class="footer-twitter-link" href="https://twitter.com/airt_AI" target="_blank" rel="noreferrer noopener" aria-label="Twitter link"></a>\n '},{html:'\n <a class="footer-facebook-link" href="https://www.facebook.com/airt.ai.api/" target="_blank" rel="noreferrer noopener" aria-label="Facebook link"></a>\n '},{html:'\n <a class="footer-linkedin-link" href="https://www.linkedin.com/company/airt-ai/" target="_blank" rel="noreferrer noopener" aria-label="LinkedIn link"></a>\n '}]},{title:"EXPLORE DOCS",items:[{label:"Get Started",to:"/docs"}]},{title:"EXPLORE MORE",items:[{label:"News",to:"https://airt.ai/news"},{label:"About Us",to:"https://airt.ai/about-us"},{label:"Company information",to:"https://airt.ai/company-information"}]}],copyright:"\xa9 2023 airt. All rights reserved."},prism:{theme:{plain:{color:"#393A34",backgroundColor:"#f6f8fa"},styles:[{types:["comment","prolog","doctype","cdata"],style:{color:"#999988",fontStyle:"italic"}},{types:["namespace"],style:{opacity:.7}},{types:["string","attr-value"],style:{color:"#e3116c"}},{types:["punctuation","operator"],style:{color:"#393A34"}},{types:["entity","url","symbol","number","boolean","variable","constant","property","regex","inserted"],style:{color:"#36acaa"}},{types:["atrule","keyword","attr-name","selector"],style:{color:"#00a4db"}},{types:["function","deleted","tag"],style:{color:"#d73a49"}},{types:["function-variable"],style:{color:"#6f42c1"}},{types:["tag","selector","keyword"],style:{color:"#00009f"}},{types:["title"],style:{color:"#0550AE",fontWeight:"bold"}},{types:["parameter"],style:{color:"#953800"}},{types:["boolean","rule","color","number","constant","property"],style:{color:"#005CC5"}},{types:["atrule","tag"],style:{color:"#22863A"}},{types:["script"],style:{color:"#24292E"}},{types:["operator","unit","rule"],style:{color:"#D73A49"}},{types:["font-matter","string","attr-value"],style:{color:"#C6105F"}},{types:["class-name"],style:{color:"#116329"}},{types:["attr-name"],style:{color:"#0550AE"}},{types:["keyword"],style:{color:"#CF222E"}},{types:["function"],style:{color:"#8250DF"}},{types:["selector"],style:{color:"#6F42C1"}},{types:["variable"],style:{color:"#E36209"}},{types:["comment"],style:{color:"#6B6B6B"}},{types:["builtin"],style:{color:"#005CC5"}}]},darkTheme:{plain:{color:"#D4D4D4",backgroundColor:"#212121"},styles:[{types:["prolog"],style:{color:"rgb(0, 0, 128)"}},{types:["comment"],style:{color:"rgb(106, 153, 85)"}},{types:["builtin","changed","keyword","interpolation-punctuation"],style:{color:"rgb(86, 156, 214)"}},{types:["number","inserted"],style:{color:"rgb(181, 206, 168)"}},{types:["constant"],style:{color:"rgb(100, 102, 149)"}},{types:["attr-name","variable"],style:{color:"rgb(156, 220, 254)"}},{types:["deleted","string","attr-value","template-punctuation"],style:{color:"rgb(206, 145, 120)"}},{types:["selector"],style:{color:"rgb(215, 186, 125)"}},{types:["tag"],style:{color:"rgb(78, 201, 176)"}},{types:["tag"],languages:["markup"],style:{color:"rgb(86, 156, 214)"}},{types:["punctuation","operator"],style:{color:"rgb(212, 212, 212)"}},{types:["punctuation"],languages:["markup"],style:{color:"#808080"}},{types:["function"],style:{color:"rgb(220, 220, 170)"}},{types:["class-name"],style:{color:"rgb(78, 201, 176)"}},{types:["char"],style:{color:"rgb(209, 105, 105)"}},{types:["title"],style:{color:"#569CD6",fontWeight:"bold"}},{types:["property","parameter"],style:{color:"#9CDCFE"}},{types:["script"],style:{color:"#D4D4D4"}},{types:["boolean","arrow","atrule","tag"],style:{color:"#569CD6"}},{types:["number","color","unit"],style:{color:"#B5CEA8"}},{types:["font-matter"],style:{color:"#CE9178"}},{types:["keyword","rule"],style:{color:"#C586C0"}},{types:["regex"],style:{color:"#D16969"}},{types:["maybe-class-name"],style:{color:"#4EC9B0"}},{types:["constant"],style:{color:"#4FC1FF"}}]},additionalLanguages:[],magicComments:[{className:"theme-code-block-highlighted-line",line:"highlight-next-line",block:{start:"highlight-start",end:"highlight-end"}}]},colorMode:{defaultMode:"light",disableSwitch:!1,respectPrefersColorScheme:!1},docs:{versionPersistence:"localStorage",sidebar:{hideable:!1,autoCollapseCategories:!1}},metadata:[],tableOfContents:{minHeadingLevel:2,maxHeadingLevel:3}},baseUrlIssueBanner:!0,onDuplicateRoutes:"warn",staticDirectories:["static"],plugins:[],themes:[],scripts:[],headTags:[],stylesheets:[],clientModules:[],titleDelimiter:"|",noIndex:!1,markdown:{mermaid:!1}}},7462:(e,t,n)=>{"use strict";function a(){return a=Object.assign?Object.assign.bind():function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var a in n)Object.prototype.hasOwnProperty.call(n,a)&&(e[a]=n[a])}return e},a.apply(this,arguments)}n.d(t,{Z:()=>a})},5068:(e,t,n)=>{"use strict";function a(e,t){return a=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(e,t){return e.__proto__=t,e},a(e,t)}function r(e,t){e.prototype=Object.create(t.prototype),e.prototype.constructor=e,a(e,t)}n.d(t,{Z:()=>r})},3366:(e,t,n)=>{"use strict";function a(e,t){if(null==e)return{};var n,a,r={},o=Object.keys(e);for(a=0;a<o.length;a++)n=o[a],t.indexOf(n)>=0||(r[n]=e[n]);return r}n.d(t,{Z:()=>a})},8776:(e,t,n)=>{"use strict";n.d(t,{Z:()=>o});var a=!0,r="Invariant failed";function o(e,t){if(!e){if(a)throw new Error(r);var n="function"==typeof t?t():t,o=n?"".concat(r,": ").concat(n):r;throw new Error(o)}}},7529:e=>{"use strict";e.exports={}},6887:e=>{"use strict";e.exports=JSON.parse('{"/demo/-745":{"__comp":"980c25d7","__context":{"plugin":"aacd1d40"},"config":"5e9f5e1a"},"/search/-b54":{"__comp":"1a4e3797","__context":{"plugin":"dde1ff6e"}},"/docs/0.5.0/-1a6":{"__comp":"1be78505","__context":{"plugin":"ca2bf8a3"},"versionMetadata":"0030fd86"},"/docs/0.5.0/-f05":{"__comp":"17896441","content":"f2954f34"},"/docs/0.5.0/api/fastkafka/-aba":{"__comp":"17896441","content":"2ae68e65"},"/docs/0.5.0/api/fastkafka/encoder/avsc_to_pydantic/-cfa":{"__comp":"17896441","content":"81bf77fc"},"/docs/0.5.0/api/fastkafka/KafkaEvent/-80f":{"__comp":"17896441","content":"2afa602b"},"/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker/-cdb":{"__comp":"17896441","content":"61386b8d"},"/docs/0.5.0/api/fastkafka/testing/LocalRedpandaBroker/-dd7":{"__comp":"17896441","content":"4e5074e6"},"/docs/0.5.0/api/fastkafka/testing/Tester/-b37":{"__comp":"17896441","content":"d9ce81b2"},"/docs/0.5.0/CHANGELOG/-3a0":{"__comp":"17896441","content":"b24805c2"},"/docs/0.5.0/cli/fastkafka/-aea":{"__comp":"17896441","content":"cd19d898"},"/docs/0.5.0/cli/run_fastkafka_server_process/-35c":{"__comp":"17896441","content":"cac45e38"},"/docs/0.5.0/guides/Guide_00_FastKafka_Demo/-a1e":{"__comp":"17896441","content":"222e7c49"},"/docs/0.5.0/guides/Guide_01_Intro/-a79":{"__comp":"17896441","content":"a03cde8f"},"/docs/0.5.0/guides/Guide_02_First_Steps/-2c5":{"__comp":"17896441","content":"bfac6a8d"},"/docs/0.5.0/guides/Guide_03_Authentication/-8a7":{"__comp":"17896441","content":"b638c32b"},"/docs/0.5.0/guides/Guide_04_Github_Actions_Workflow/-d3b":{"__comp":"17896441","content":"409b7aa0"},"/docs/0.5.0/guides/Guide_05_Lifespan_Handler/-e7c":{"__comp":"17896441","content":"b1b6a961"},"/docs/0.5.0/guides/Guide_06_Benchmarking_FastKafka/-880":{"__comp":"17896441","content":"fc8a86b2"},"/docs/0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/-b72":{"__comp":"17896441","content":"847c12c2"},"/docs/0.5.0/guides/Guide_11_Consumes_Basics/-683":{"__comp":"17896441","content":"414d4a37"},"/docs/0.5.0/guides/Guide_21_Produces_Basics/-cd5":{"__comp":"17896441","content":"47ac2e75"},"/docs/0.5.0/guides/Guide_22_Partition_Keys/-098":{"__comp":"17896441","content":"111ae602"},"/docs/0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/-693":{"__comp":"17896441","content":"a4cbee7f"},"/docs/0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/-bcc":{"__comp":"17896441","content":"81b6783d"},"/docs/0.6.0/-8d8":{"__comp":"1be78505","__context":{"plugin":"ca2bf8a3"},"versionMetadata":"58f10d9f"},"/docs/0.6.0/-22e":{"__comp":"17896441","content":"4ace981f"},"/docs/0.6.0/api/fastkafka/-8f8":{"__comp":"17896441","content":"e7ab2684"},"/docs/0.6.0/api/fastkafka/encoder/avro_decoder/-54e":{"__comp":"17896441","content":"2b2faa0a"},"/docs/0.6.0/api/fastkafka/encoder/avro_encoder/-837":{"__comp":"17896441","content":"6c174e6d"},"/docs/0.6.0/api/fastkafka/encoder/AvroBase/-535":{"__comp":"17896441","content":"6e7b1bc6"},"/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic/-885":{"__comp":"17896441","content":"a07fb1cb"},"/docs/0.6.0/api/fastkafka/encoder/json_decoder/-e6b":{"__comp":"17896441","content":"f39642a1"},"/docs/0.6.0/api/fastkafka/encoder/json_encoder/-e82":{"__comp":"17896441","content":"c602cd44"},"/docs/0.6.0/api/fastkafka/EventMetadata/-541":{"__comp":"17896441","content":"46d2add0"},"/docs/0.6.0/api/fastkafka/executors/DynamicTaskExecutor/-e6e":{"__comp":"17896441","content":"9fda8563"},"/docs/0.6.0/api/fastkafka/executors/SequentialExecutor/-846":{"__comp":"17896441","content":"d241d4ef"},"/docs/0.6.0/api/fastkafka/KafkaEvent/-acc":{"__comp":"17896441","content":"09cca5f2"},"/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker/-26d":{"__comp":"17896441","content":"4f8e8160"},"/docs/0.6.0/api/fastkafka/testing/LocalRedpandaBroker/-f47":{"__comp":"17896441","content":"7b589963"},"/docs/0.6.0/api/fastkafka/testing/Tester/-21f":{"__comp":"17896441","content":"cd59f9ef"},"/docs/0.6.0/CHANGELOG/-e01":{"__comp":"17896441","content":"5534c352"},"/docs/0.6.0/cli/fastkafka/-c61":{"__comp":"17896441","content":"38a44003"},"/docs/0.6.0/cli/run_fastkafka_server_process/-616":{"__comp":"17896441","content":"dbc0f590"},"/docs/0.6.0/CONTRIBUTING/-5d5":{"__comp":"17896441","content":"beaba6c2"},"/docs/0.6.0/guides/Guide_00_FastKafka_Demo/-d41":{"__comp":"17896441","content":"ac02e102"},"/docs/0.6.0/guides/Guide_01_Intro/-73d":{"__comp":"17896441","content":"a686ca68"},"/docs/0.6.0/guides/Guide_02_First_Steps/-956":{"__comp":"17896441","content":"e109b3ff"},"/docs/0.6.0/guides/Guide_03_Authentication/-5e3":{"__comp":"17896441","content":"2c797d78"},"/docs/0.6.0/guides/Guide_04_Github_Actions_Workflow/-3c0":{"__comp":"17896441","content":"99912bf6"},"/docs/0.6.0/guides/Guide_05_Lifespan_Handler/-3e4":{"__comp":"17896441","content":"62ff7ec9"},"/docs/0.6.0/guides/Guide_06_Benchmarking_FastKafka/-62b":{"__comp":"17896441","content":"e8ae88bc"},"/docs/0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/-1fd":{"__comp":"17896441","content":"6c450cd6"},"/docs/0.6.0/guides/Guide_11_Consumes_Basics/-301":{"__comp":"17896441","content":"f7e229b3"},"/docs/0.6.0/guides/Guide_21_Produces_Basics/-771":{"__comp":"17896441","content":"002d14fa"},"/docs/0.6.0/guides/Guide_22_Partition_Keys/-fc9":{"__comp":"17896441","content":"5cf0f698"},"/docs/0.6.0/guides/Guide_23_Batch_Producing/-d0a":{"__comp":"17896441","content":"35d7f647"},"/docs/0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/-093":{"__comp":"17896441","content":"1674a630"},"/docs/0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/-efc":{"__comp":"17896441","content":"4a2f1dfa"},"/docs/0.6.0/LICENSE/-204":{"__comp":"17896441","content":"1efdbea1"},"/docs/0.7.0/-6a8":{"__comp":"1be78505","__context":{"plugin":"ca2bf8a3"},"versionMetadata":"2fe15297"},"/docs/0.7.0/-54f":{"__comp":"17896441","content":"15f1310d"},"/docs/0.7.0/api/fastkafka/-790":{"__comp":"17896441","content":"4d11873e"},"/docs/0.7.0/api/fastkafka/encoder/avro_decoder/-cda":{"__comp":"17896441","content":"5584c47d"},"/docs/0.7.0/api/fastkafka/encoder/avro_encoder/-ba0":{"__comp":"17896441","content":"ba3b9f5c"},"/docs/0.7.0/api/fastkafka/encoder/AvroBase/-8c7":{"__comp":"17896441","content":"e56c502c"},"/docs/0.7.0/api/fastkafka/encoder/avsc_to_pydantic/-455":{"__comp":"17896441","content":"6d9c0b04"},"/docs/0.7.0/api/fastkafka/encoder/json_decoder/-41f":{"__comp":"17896441","content":"d2282d9e"},"/docs/0.7.0/api/fastkafka/encoder/json_encoder/-810":{"__comp":"17896441","content":"1f1765ab"},"/docs/0.7.0/api/fastkafka/EventMetadata/-841":{"__comp":"17896441","content":"c4a14462"},"/docs/0.7.0/api/fastkafka/executors/DynamicTaskExecutor/-1ec":{"__comp":"17896441","content":"a17dbf83"},"/docs/0.7.0/api/fastkafka/executors/SequentialExecutor/-5dc":{"__comp":"17896441","content":"5a11a8c6"},"/docs/0.7.0/api/fastkafka/KafkaEvent/-ce6":{"__comp":"17896441","content":"b91921d6"},"/docs/0.7.0/api/fastkafka/testing/ApacheKafkaBroker/-771":{"__comp":"17896441","content":"0ff0556c"},"/docs/0.7.0/api/fastkafka/testing/LocalRedpandaBroker/-164":{"__comp":"17896441","content":"d73efefc"},"/docs/0.7.0/api/fastkafka/testing/Tester/-ece":{"__comp":"17896441","content":"fd2e624b"},"/docs/0.7.0/CHANGELOG/-14a":{"__comp":"17896441","content":"1957b43a"},"/docs/0.7.0/cli/fastkafka/-bed":{"__comp":"17896441","content":"1244450e"},"/docs/0.7.0/cli/run_fastkafka_server_process/-117":{"__comp":"17896441","content":"23c607c1"},"/docs/0.7.0/CONTRIBUTING/-61b":{"__comp":"17896441","content":"6af17b1d"},"/docs/0.7.0/guides/Guide_00_FastKafka_Demo/-52d":{"__comp":"17896441","content":"7ae5d564"},"/docs/0.7.0/guides/Guide_01_Intro/-801":{"__comp":"17896441","content":"14056c2c"},"/docs/0.7.0/guides/Guide_02_First_Steps/-aa1":{"__comp":"17896441","content":"d2af0b95"},"/docs/0.7.0/guides/Guide_03_Authentication/-375":{"__comp":"17896441","content":"06acf88d"},"/docs/0.7.0/guides/Guide_04_Github_Actions_Workflow/-292":{"__comp":"17896441","content":"29105828"},"/docs/0.7.0/guides/Guide_05_Lifespan_Handler/-62d":{"__comp":"17896441","content":"d67a4111"},"/docs/0.7.0/guides/Guide_06_Benchmarking_FastKafka/-3c6":{"__comp":"17896441","content":"74e1ba0d"},"/docs/0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/-a34":{"__comp":"17896441","content":"3087bb2d"},"/docs/0.7.0/guides/Guide_11_Consumes_Basics/-7f0":{"__comp":"17896441","content":"68d54528"},"/docs/0.7.0/guides/Guide_12_Batch_Consuming/-f88":{"__comp":"17896441","content":"456c5d82"},"/docs/0.7.0/guides/Guide_21_Produces_Basics/-1ee":{"__comp":"17896441","content":"68c835af"},"/docs/0.7.0/guides/Guide_22_Partition_Keys/-9e0":{"__comp":"17896441","content":"fb969bb3"},"/docs/0.7.0/guides/Guide_23_Batch_Producing/-36c":{"__comp":"17896441","content":"aa946361"},"/docs/0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters/-7d0":{"__comp":"17896441","content":"0d766b78"},"/docs/0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/-01a":{"__comp":"17896441","content":"14f7f42b"},"/docs/0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/-764":{"__comp":"17896441","content":"b7f60777"},"/docs/0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/-052":{"__comp":"17896441","content":"1d4d4e46"},"/docs/0.7.0/LICENSE/-200":{"__comp":"17896441","content":"d2b827bd"},"/docs/0.7.1/-64f":{"__comp":"1be78505","__context":{"plugin":"ca2bf8a3"},"versionMetadata":"e4d0ad4d"},"/docs/0.7.1/-3ad":{"__comp":"17896441","content":"a7914a5c"},"/docs/0.7.1/api/fastkafka/-78c":{"__comp":"17896441","content":"7245ce96"},"/docs/0.7.1/api/fastkafka/encoder/avro_decoder/-a8b":{"__comp":"17896441","content":"e97b3564"},"/docs/0.7.1/api/fastkafka/encoder/avro_encoder/-a81":{"__comp":"17896441","content":"f35e2aba"},"/docs/0.7.1/api/fastkafka/encoder/AvroBase/-25b":{"__comp":"17896441","content":"11c86bb5"},"/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic/-10c":{"__comp":"17896441","content":"6cafb666"},"/docs/0.7.1/api/fastkafka/encoder/json_decoder/-91e":{"__comp":"17896441","content":"e6eb5527"},"/docs/0.7.1/api/fastkafka/encoder/json_encoder/-920":{"__comp":"17896441","content":"fe73cc84"},"/docs/0.7.1/api/fastkafka/EventMetadata/-e00":{"__comp":"17896441","content":"fdc5233c"},"/docs/0.7.1/api/fastkafka/executors/DynamicTaskExecutor/-7f7":{"__comp":"17896441","content":"6dbdf8e8"},"/docs/0.7.1/api/fastkafka/executors/SequentialExecutor/-e27":{"__comp":"17896441","content":"ebc40d40"},"/docs/0.7.1/api/fastkafka/KafkaEvent/-27f":{"__comp":"17896441","content":"20f8c1fd"},"/docs/0.7.1/api/fastkafka/testing/ApacheKafkaBroker/-438":{"__comp":"17896441","content":"a4055066"},"/docs/0.7.1/api/fastkafka/testing/LocalRedpandaBroker/-13a":{"__comp":"17896441","content":"205a719b"},"/docs/0.7.1/api/fastkafka/testing/Tester/-a2a":{"__comp":"17896441","content":"be529d37"},"/docs/0.7.1/CHANGELOG/-92e":{"__comp":"17896441","content":"94d2eef0"},"/docs/0.7.1/cli/fastkafka/-6d7":{"__comp":"17896441","content":"381a15bc"},"/docs/0.7.1/cli/run_fastkafka_server_process/-569":{"__comp":"17896441","content":"15aa5f44"},"/docs/0.7.1/CONTRIBUTING/-a22":{"__comp":"17896441","content":"97a352ae"},"/docs/0.7.1/guides/Guide_00_FastKafka_Demo/-ebb":{"__comp":"17896441","content":"a80d168f"},"/docs/0.7.1/guides/Guide_01_Intro/-028":{"__comp":"17896441","content":"9defa5b7"},"/docs/0.7.1/guides/Guide_02_First_Steps/-3a5":{"__comp":"17896441","content":"ee2e0a62"},"/docs/0.7.1/guides/Guide_03_Authentication/-3f8":{"__comp":"17896441","content":"d35204c3"},"/docs/0.7.1/guides/Guide_04_Github_Actions_Workflow/-8ed":{"__comp":"17896441","content":"9980ea0e"},"/docs/0.7.1/guides/Guide_05_Lifespan_Handler/-1bd":{"__comp":"17896441","content":"647303d6"},"/docs/0.7.1/guides/Guide_06_Benchmarking_FastKafka/-36f":{"__comp":"17896441","content":"c16f65ec"},"/docs/0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/-bf5":{"__comp":"17896441","content":"65ab9689"},"/docs/0.7.1/guides/Guide_11_Consumes_Basics/-25a":{"__comp":"17896441","content":"8804eadc"},"/docs/0.7.1/guides/Guide_12_Batch_Consuming/-32a":{"__comp":"17896441","content":"c9eeccbf"},"/docs/0.7.1/guides/Guide_21_Produces_Basics/-f0c":{"__comp":"17896441","content":"4c4d6ef6"},"/docs/0.7.1/guides/Guide_22_Partition_Keys/-821":{"__comp":"17896441","content":"2bc15a09"},"/docs/0.7.1/guides/Guide_23_Batch_Producing/-243":{"__comp":"17896441","content":"9af63d42"},"/docs/0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters/-545":{"__comp":"17896441","content":"9fc8d1d9"},"/docs/0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka/-359":{"__comp":"17896441","content":"ae1efb81"},"/docs/0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka/-228":{"__comp":"17896441","content":"6b76d411"},"/docs/0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/-c3b":{"__comp":"17896441","content":"1f0a946f"},"/docs/0.7.1/LICENSE/-edd":{"__comp":"17896441","content":"fff0a46d"},"/docs/next/-9c0":{"__comp":"1be78505","__context":{"plugin":"ca2bf8a3"},"versionMetadata":"935f2afb"},"/docs/next/-acf":{"__comp":"17896441","content":"c377a04b"},"/docs/next/api/fastkafka/-323":{"__comp":"17896441","content":"4d517c40"},"/docs/next/api/fastkafka/encoder/avro_decoder/-f4a":{"__comp":"17896441","content":"898ba646"},"/docs/next/api/fastkafka/encoder/avro_encoder/-332":{"__comp":"17896441","content":"0a79db1f"},"/docs/next/api/fastkafka/encoder/AvroBase/-7fe":{"__comp":"17896441","content":"1128ab4d"},"/docs/next/api/fastkafka/encoder/avsc_to_pydantic/-252":{"__comp":"17896441","content":"982d0b04"},"/docs/next/api/fastkafka/encoder/json_decoder/-555":{"__comp":"17896441","content":"99bfca7e"},"/docs/next/api/fastkafka/encoder/json_encoder/-11a":{"__comp":"17896441","content":"69a9729f"},"/docs/next/api/fastkafka/EventMetadata/-489":{"__comp":"17896441","content":"b70bee8d"},"/docs/next/api/fastkafka/executors/DynamicTaskExecutor/-c05":{"__comp":"17896441","content":"5347168a"},"/docs/next/api/fastkafka/executors/SequentialExecutor/-93f":{"__comp":"17896441","content":"de2621c2"},"/docs/next/api/fastkafka/KafkaEvent/-b8e":{"__comp":"17896441","content":"e333f535"},"/docs/next/api/fastkafka/testing/ApacheKafkaBroker/-304":{"__comp":"17896441","content":"514a13f6"},"/docs/next/api/fastkafka/testing/LocalRedpandaBroker/-7d3":{"__comp":"17896441","content":"e323208f"},"/docs/next/api/fastkafka/testing/Tester/-3eb":{"__comp":"17896441","content":"04d4af82"},"/docs/next/CHANGELOG/-53a":{"__comp":"17896441","content":"ca36df4d"},"/docs/next/cli/fastkafka/-552":{"__comp":"17896441","content":"83ec613f"},"/docs/next/cli/run_fastkafka_server_process/-c49":{"__comp":"17896441","content":"d7dfec52"},"/docs/next/CONTRIBUTING/-a7d":{"__comp":"17896441","content":"8d193b98"},"/docs/next/guides/Guide_00_FastKafka_Demo/-c8c":{"__comp":"17896441","content":"9440fd12"},"/docs/next/guides/Guide_01_Intro/-89f":{"__comp":"17896441","content":"516ebbd1"},"/docs/next/guides/Guide_02_First_Steps/-bf4":{"__comp":"17896441","content":"036db789"},"/docs/next/guides/Guide_03_Authentication/-735":{"__comp":"17896441","content":"40415b6c"},"/docs/next/guides/Guide_04_Github_Actions_Workflow/-7fb":{"__comp":"17896441","content":"7b4381d3"},"/docs/next/guides/Guide_05_Lifespan_Handler/-256":{"__comp":"17896441","content":"0fb5d45b"},"/docs/next/guides/Guide_06_Benchmarking_FastKafka/-ee6":{"__comp":"17896441","content":"e968e69e"},"/docs/next/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/-c00":{"__comp":"17896441","content":"d40fb48f"},"/docs/next/guides/Guide_11_Consumes_Basics/-376":{"__comp":"17896441","content":"10df9fdc"},"/docs/next/guides/Guide_12_Batch_Consuming/-2d0":{"__comp":"17896441","content":"e1584d63"},"/docs/next/guides/Guide_21_Produces_Basics/-8b9":{"__comp":"17896441","content":"d87f7f29"},"/docs/next/guides/Guide_22_Partition_Keys/-26d":{"__comp":"17896441","content":"7107eb83"},"/docs/next/guides/Guide_23_Batch_Producing/-a58":{"__comp":"17896441","content":"f8edae29"},"/docs/next/guides/Guide_24_Using_Multiple_Kafka_Clusters/-fa5":{"__comp":"17896441","content":"060147ec"},"/docs/next/guides/Guide_30_Using_docker_to_deploy_fastkafka/-a93":{"__comp":"17896441","content":"87f59f37"},"/docs/next/guides/Guide_31_Using_redpanda_to_test_fastkafka/-630":{"__comp":"17896441","content":"48199270"},"/docs/next/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/-b52":{"__comp":"17896441","content":"14111b0c"},"/docs/next/guides/Guide_33_Using_Tester_class_to_test_fastkafka/-a1d":{"__comp":"17896441","content":"065bbf18"},"/docs/next/LICENSE/-491":{"__comp":"17896441","content":"8ad68633"},"/docs/-e68":{"__comp":"1be78505","__context":{"plugin":"ca2bf8a3"},"versionMetadata":"c95b781b"},"/docs/-2f0":{"__comp":"17896441","content":"4a9e4762"},"/docs/api/fastkafka/-209":{"__comp":"17896441","content":"a6c229c0"},"/docs/api/fastkafka/encoder/avro_decoder/-422":{"__comp":"17896441","content":"80f42d74"},"/docs/api/fastkafka/encoder/avro_encoder/-67e":{"__comp":"17896441","content":"b9d0db8e"},"/docs/api/fastkafka/encoder/AvroBase/-719":{"__comp":"17896441","content":"c0e3ff8b"},"/docs/api/fastkafka/encoder/avsc_to_pydantic/-2ad":{"__comp":"17896441","content":"a5b090b0"},"/docs/api/fastkafka/encoder/json_decoder/-ba2":{"__comp":"17896441","content":"478692f7"},"/docs/api/fastkafka/encoder/json_encoder/-d66":{"__comp":"17896441","content":"196c63a7"},"/docs/api/fastkafka/EventMetadata/-e9d":{"__comp":"17896441","content":"5527e5b7"},"/docs/api/fastkafka/executors/DynamicTaskExecutor/-200":{"__comp":"17896441","content":"16e87abe"},"/docs/api/fastkafka/executors/SequentialExecutor/-acb":{"__comp":"17896441","content":"2e96a196"},"/docs/api/fastkafka/KafkaEvent/-7f5":{"__comp":"17896441","content":"f2aaa4e5"},"/docs/api/fastkafka/testing/ApacheKafkaBroker/-edd":{"__comp":"17896441","content":"2622e95a"},"/docs/api/fastkafka/testing/LocalRedpandaBroker/-dc7":{"__comp":"17896441","content":"243cddb9"},"/docs/api/fastkafka/testing/Tester/-79a":{"__comp":"17896441","content":"c248ee7e"},"/docs/CHANGELOG/-6a5":{"__comp":"17896441","content":"8ff5d7ba"},"/docs/cli/fastkafka/-632":{"__comp":"17896441","content":"dc75700c"},"/docs/cli/run_fastkafka_server_process/-1ba":{"__comp":"17896441","content":"87b29f85"},"/docs/CONTRIBUTING/-6c9":{"__comp":"17896441","content":"c192c597"},"/docs/guides/Guide_00_FastKafka_Demo/-414":{"__comp":"17896441","content":"58b4829f"},"/docs/guides/Guide_01_Intro/-3e6":{"__comp":"17896441","content":"a624bde7"},"/docs/guides/Guide_02_First_Steps/-db7":{"__comp":"17896441","content":"232ab88c"},"/docs/guides/Guide_03_Authentication/-6a9":{"__comp":"17896441","content":"ba9d536d"},"/docs/guides/Guide_04_Github_Actions_Workflow/-c61":{"__comp":"17896441","content":"13bdfbad"},"/docs/guides/Guide_05_Lifespan_Handler/-bf4":{"__comp":"17896441","content":"0582779b"},"/docs/guides/Guide_06_Benchmarking_FastKafka/-b72":{"__comp":"17896441","content":"8c27608b"},"/docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/-bf6":{"__comp":"17896441","content":"4a00fd3a"},"/docs/guides/Guide_11_Consumes_Basics/-7c1":{"__comp":"17896441","content":"5300e879"},"/docs/guides/Guide_12_Batch_Consuming/-9ac":{"__comp":"17896441","content":"a9ab9f8f"},"/docs/guides/Guide_21_Produces_Basics/-dd3":{"__comp":"17896441","content":"75af10bd"},"/docs/guides/Guide_22_Partition_Keys/-61b":{"__comp":"17896441","content":"d9bd3427"},"/docs/guides/Guide_23_Batch_Producing/-83b":{"__comp":"17896441","content":"1187a271"},"/docs/guides/Guide_24_Using_Multiple_Kafka_Clusters/-2ef":{"__comp":"17896441","content":"d0381ee6"},"/docs/guides/Guide_30_Using_docker_to_deploy_fastkafka/-8da":{"__comp":"17896441","content":"a34ed3b2"},"/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka/-a0f":{"__comp":"17896441","content":"c3d488fa"},"/docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/-966":{"__comp":"17896441","content":"99d969f2"},"/docs/LICENSE/-a59":{"__comp":"17896441","content":"0d927e9a"},"/-50b":{"__comp":"c4f5d8e4","__context":{"plugin":"aacd1d40"},"config":"5e9f5e1a"}}')}},e=>{e.O(0,[532],(()=>{return t=9383,e(e.s=t);var t}));e.O()}]); \ No newline at end of file diff --git a/assets/js/main.e19c364c.js.LICENSE.txt b/assets/js/main.e19c364c.js.LICENSE.txt new file mode 100644 index 0000000..eb75d69 --- /dev/null +++ b/assets/js/main.e19c364c.js.LICENSE.txt @@ -0,0 +1,63 @@ +/* +object-assign +(c) Sindre Sorhus +@license MIT +*/ + +/* NProgress, (c) 2013, 2014 Rico Sta. Cruz - http://ricostacruz.com/nprogress + * @license MIT */ + +/** + * @license React + * use-sync-external-store-shim.production.min.js + * + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** + * Prism: Lightweight, robust, elegant syntax highlighting + * + * @license MIT <https://opensource.org/licenses/MIT> + * @author Lea Verou <https://lea.verou.me> + * @namespace + * @public + */ + +/** @license React v0.20.2 + * scheduler.production.min.js + * + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** @license React v16.13.1 + * react-is.production.min.js + * + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** @license React v17.0.2 + * react-dom.production.min.js + * + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** @license React v17.0.2 + * react.production.min.js + * + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ diff --git a/assets/js/runtime~main.bd2548d2.js b/assets/js/runtime~main.bd2548d2.js new file mode 100644 index 0000000..875f775 --- /dev/null +++ b/assets/js/runtime~main.bd2548d2.js @@ -0,0 +1 @@ +(()=>{"use strict";var e,a,f,d,b,c={},t={};function r(e){var a=t[e];if(void 0!==a)return a.exports;var f=t[e]={id:e,loaded:!1,exports:{}};return c[e].call(f.exports,f,f.exports,r),f.loaded=!0,f.exports}r.m=c,r.c=t,e=[],r.O=(a,f,d,b)=>{if(!f){var c=1/0;for(i=0;i<e.length;i++){f=e[i][0],d=e[i][1],b=e[i][2];for(var t=!0,o=0;o<f.length;o++)(!1&b||c>=b)&&Object.keys(r.O).every((e=>r.O[e](f[o])))?f.splice(o--,1):(t=!1,b<c&&(c=b));if(t){e.splice(i--,1);var n=d();void 0!==n&&(a=n)}}return a}b=b||0;for(var i=e.length;i>0&&e[i-1][2]>b;i--)e[i]=e[i-1];e[i]=[f,d,b]},r.n=e=>{var a=e&&e.__esModule?()=>e.default:()=>e;return r.d(a,{a:a}),a},f=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,r.t=function(e,d){if(1&d&&(e=this(e)),8&d)return e;if("object"==typeof e&&e){if(4&d&&e.__esModule)return e;if(16&d&&"function"==typeof e.then)return e}var b=Object.create(null);r.r(b);var c={};a=a||[null,f({}),f([]),f(f)];for(var t=2&d&&e;"object"==typeof t&&!~a.indexOf(t);t=f(t))Object.getOwnPropertyNames(t).forEach((a=>c[a]=()=>e[a]));return c.default=()=>e,r.d(b,c),b},r.d=(e,a)=>{for(var f in a)r.o(a,f)&&!r.o(e,f)&&Object.defineProperty(e,f,{enumerable:!0,get:a[f]})},r.f={},r.e=e=>Promise.all(Object.keys(r.f).reduce(((a,f)=>(r.f[f](e,a),a)),[])),r.u=e=>"assets/js/"+({29:"09cca5f2",53:"935f2afb",88:"2ae68e65",92:"7245ce96",99:"06acf88d",162:"a17dbf83",298:"de2621c2",309:"ae1efb81",383:"4a2f1dfa",424:"5347168a",554:"647303d6",604:"d35204c3",608:"b70bee8d",721:"58b4829f",733:"87f59f37",836:"ee2e0a62",904:"4d11873e",917:"ca36df4d",920:"e8ae88bc",953:"0582779b",984:"ebc40d40",1122:"e4d0ad4d",1159:"c4a14462",1195:"4d517c40",1202:"e56c502c",1244:"065bbf18",1267:"d2af0b95",1294:"243cddb9",1312:"4e5074e6",1358:"81bf77fc",1377:"4a9e4762",1384:"ba9d536d",1506:"9980ea0e",1604:"9440fd12",1612:"11c86bb5",1616:"b24805c2",1673:"9fc8d1d9",1674:"46d2add0",1707:"69a9729f",1733:"222e7c49",1753:"cd59f9ef",1783:"75af10bd",1790:"847c12c2",1856:"14111b0c",1887:"0030fd86",1939:"456c5d82",1984:"c192c597",1998:"6dbdf8e8",2e3:"68c835af",2049:"d40fb48f",2111:"2fe15297",2353:"d73efefc",2368:"c0e3ff8b",2399:"8ad68633",2473:"980c25d7",2493:"58f10d9f",2706:"4c4d6ef6",2732:"0a79db1f",2777:"a34ed3b2",2798:"c16f65ec",3023:"beaba6c2",3033:"be529d37",3042:"6d9c0b04",3051:"15aa5f44",3111:"cac45e38",3114:"5527e5b7",3196:"c3d488fa",3251:"fe73cc84",3331:"f7e229b3",3374:"e968e69e",3623:"2e96a196",3645:"1f1765ab",3671:"0fb5d45b",3679:"b7f60777",3684:"47ac2e75",3690:"f35e2aba",3696:"7b4381d3",3725:"a80d168f",3747:"c9eeccbf",3776:"e333f535",3800:"514a13f6",3814:"9af63d42",3927:"1f0a946f",3979:"a9ab9f8f",4018:"a4055066",4029:"a5b090b0",4039:"e109b3ff",4095:"516ebbd1",4098:"83ec613f",4168:"b1b6a961",4172:"99bfca7e",4195:"c4f5d8e4",4358:"ba3b9f5c",4379:"111ae602",4404:"196c63a7",4413:"6e7b1bc6",4457:"7107eb83",4497:"b638c32b",4559:"4a00fd3a",4641:"fff0a46d",4779:"d2282d9e",4789:"a03cde8f",4842:"a7914a5c",4874:"d2b827bd",4884:"5cf0f698",4886:"dc75700c",5012:"7ae5d564",5041:"48199270",5045:"6af17b1d",5050:"2b2faa0a",5144:"a4cbee7f",5171:"aa946361",5198:"1187a271",5252:"1674a630",5339:"d241d4ef",5340:"2622e95a",5375:"61386b8d",5394:"aacd1d40",5412:"dbc0f590",5414:"205a719b",5430:"a07fb1cb",5439:"97a352ae",5547:"14f7f42b",5623:"1d4d4e46",5625:"62ff7ec9",5628:"7b589963",5684:"1957b43a",5746:"5a11a8c6",5775:"04d4af82",5805:"1244450e",5845:"74e1ba0d",5896:"13bdfbad",5955:"9defa5b7",5997:"15f1310d",6005:"99d969f2",6064:"bfac6a8d",6133:"14056c2c",6147:"e7ab2684",6308:"3087bb2d",6492:"0d766b78",6527:"80f42d74",6590:"036db789",6623:"409b7aa0",6704:"ca2bf8a3",6791:"4ace981f",6803:"d9ce81b2",6862:"c602cd44",6964:"e6eb5527",6971:"c377a04b",6993:"dde1ff6e",7011:"2c797d78",7055:"9fda8563",7058:"40415b6c",7083:"6b76d411",7100:"232ab88c",7132:"16e87abe",7229:"5534c352",7368:"10df9fdc",7408:"f2954f34",7473:"414d4a37",7505:"8d193b98",7562:"478692f7",7600:"5300e879",7602:"060147ec",7624:"0ff0556c",7639:"a686ca68",7683:"4f8e8160",7710:"f8edae29",7750:"6c450cd6",7773:"a6c229c0",7786:"65ab9689",7795:"8ff5d7ba",7881:"68d54528",7883:"6cafb666",7886:"381a15bc",7918:"17896441",7920:"1a4e3797",8064:"002d14fa",8110:"87b29f85",8119:"5584c47d",8120:"d0381ee6",8197:"fc8a86b2",8215:"20f8c1fd",8270:"b91921d6",8279:"898ba646",8308:"d67a4111",8457:"d7dfec52",8468:"982d0b04",8565:"cd19d898",8653:"2afa602b",8674:"38a44003",8775:"35d7f647",8796:"d9bd3427",8823:"a624bde7",8851:"8c27608b",8861:"f2aaa4e5",8888:"81b6783d",8908:"2bc15a09",8914:"1efdbea1",8927:"c248ee7e",8945:"e1584d63",8952:"1128ab4d",9050:"29105828",9069:"d87f7f29",9107:"f39642a1",9170:"b9d0db8e",9469:"99912bf6",9498:"8804eadc",9511:"6c174e6d",9514:"1be78505",9571:"fb969bb3",9652:"0d927e9a",9723:"94d2eef0",9724:"e97b3564",9777:"e323208f",9794:"fdc5233c",9810:"c95b781b",9840:"23c607c1",9851:"fd2e624b",9942:"ac02e102"}[e]||e)+"."+{29:"a6e2df67",53:"c2b93cd4",88:"2da2affc",92:"2ad33dfd",99:"54a41cbf",162:"57177a4c",298:"5348d14a",309:"ef2d54c6",383:"5bc1686b",424:"4d2a41c3",554:"98160b7c",604:"f3736ac4",608:"45dd4d58",721:"f9857ac8",733:"6b9e11c6",836:"ea183c41",904:"e7adb799",917:"bdcd738d",920:"3ee8c934",953:"aec163df",984:"75714ad4",1030:"7d064482",1122:"f67d40a9",1159:"17d186df",1195:"eadb0bc3",1202:"7f764e63",1244:"851ffe9a",1267:"61dc7ae0",1294:"83dc99e3",1312:"b0d53ffd",1358:"e86cf07e",1377:"9c7fce8d",1384:"e1fae3ca",1506:"b9be0f2e",1604:"93ff0ee3",1612:"38ea69ae",1616:"5ca38736",1673:"4bcec99f",1674:"08babb68",1707:"c03205b8",1733:"9c8fb23c",1753:"54f4605b",1783:"f657f121",1790:"0dcae720",1856:"d1701b86",1887:"cda43b2b",1939:"1356cd36",1984:"2da137e0",1998:"90843a66",2e3:"1e1795fe",2049:"0dd8a669",2111:"5d5bccf7",2353:"77589437",2368:"4348bcbf",2399:"50914cab",2473:"6573b859",2493:"49ed8c8f",2706:"8a27cfcd",2732:"58fed57a",2777:"a8b3a7f3",2798:"04e07d23",3023:"94675e66",3033:"6621d11d",3042:"bac51284",3051:"44966933",3111:"ffd6c350",3114:"a5884eda",3196:"7534f536",3251:"50771324",3331:"1f12b6b5",3374:"7145b4d3",3623:"47765f9b",3645:"bbeac427",3671:"a1df13f6",3679:"7085564d",3684:"3ab08e6a",3690:"396a2a4d",3696:"483f4f09",3725:"2854157b",3747:"fa9d425c",3776:"1e236bcc",3800:"826a6d2f",3814:"bb0edfcf",3927:"fcc5a3d2",3979:"bdfd8b0f",4018:"ba2a7e48",4029:"a4364faf",4039:"16e87717",4095:"b21303ee",4098:"6af4e02b",4168:"74208c3e",4172:"4ea0922d",4195:"b78e8a3e",4358:"009751df",4379:"55a30aac",4404:"f818c35d",4413:"ac852278",4457:"9b621a01",4497:"ab27dab2",4559:"497faf2c",4641:"a0ecfadf",4779:"ad5da899",4789:"cdce6d67",4842:"64fb6d59",4874:"868ae315",4884:"9f573412",4886:"84688a9d",4972:"0680bd7d",5012:"6ea40e89",5041:"467a59ac",5045:"d647c97a",5050:"bb2930b4",5144:"d067a9ae",5171:"9a9d22e0",5198:"6b0af2cc",5252:"97d72d03",5339:"a87ac3a3",5340:"0b16f714",5375:"6fd0979f",5394:"c4e40d4c",5412:"302c834a",5414:"fba55762",5430:"ef0f7575",5439:"33fe6fad",5547:"27993dc9",5623:"53235266",5625:"ec600e31",5628:"32aefe0f",5684:"daf8773c",5746:"882f537d",5775:"f9ae366c",5805:"79248efa",5845:"3a0419d8",5896:"a7bd1580",5955:"dbb12a65",5997:"015cd27f",6005:"d89c6b4c",6064:"33d1069c",6133:"8964dc73",6147:"124a91a6",6308:"d34d4c8e",6492:"42ab4cf9",6527:"392d78fb",6590:"b29b3dc1",6623:"924e3028",6704:"b136f6a8",6780:"b8374b3c",6791:"70fbc63c",6803:"de1fc440",6862:"c0889991",6945:"a9a2d87d",6964:"890dde78",6971:"e32687a6",6993:"936ed816",7011:"264327d1",7055:"2e6d9fba",7058:"d29ab3ac",7083:"86c99a48",7100:"c37b9f14",7132:"02193372",7229:"d3e76e0e",7368:"6033a07f",7408:"e942d91a",7473:"2900f4a9",7505:"faf1a7fe",7562:"e9e2abf4",7600:"8a8dc6e5",7602:"e48d0a0e",7624:"0b867a3c",7639:"cc915e73",7683:"b9904ca2",7710:"e9acd7b2",7750:"73253a7e",7773:"135ade25",7786:"c83f172b",7795:"c28181ba",7881:"f4b4a695",7883:"6d7d0081",7886:"fecfc237",7918:"8961bab4",7920:"a52196f1",8064:"b553e184",8110:"960f3d83",8119:"98379ebd",8120:"20083f4a",8197:"1a0d2a4d",8215:"f55a6b8d",8270:"198b04b6",8279:"9c58f3c1",8308:"aa568655",8457:"b5cc8811",8468:"f756f754",8565:"67faad21",8653:"f892f0f7",8674:"5bed27ee",8775:"eeaa1f4e",8796:"e1f1f230",8823:"bd5c926b",8851:"c4322928",8861:"df8937bd",8888:"a996001b",8894:"674c4c01",8908:"b35b76c3",8914:"365af71b",8927:"56274b20",8945:"e755620a",8952:"708c7b33",9050:"3c1f0326",9069:"8b85c627",9107:"dcbef318",9170:"9fbaa8a9",9469:"1a9c1ee9",9498:"73cda2d1",9511:"40d06062",9514:"228b1716",9571:"8d0af030",9652:"ee128e68",9723:"b68cf51d",9724:"ead957c7",9777:"514adfe6",9794:"73a1ac59",9810:"54e317b5",9840:"55560318",9851:"b7df8a31",9942:"06e01491"}[e]+".js",r.miniCssF=e=>{},r.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),r.o=(e,a)=>Object.prototype.hasOwnProperty.call(e,a),d={},b="fastkafka:",r.l=(e,a,f,c)=>{if(d[e])d[e].push(a);else{var t,o;if(void 0!==f)for(var n=document.getElementsByTagName("script"),i=0;i<n.length;i++){var u=n[i];if(u.getAttribute("src")==e||u.getAttribute("data-webpack")==b+f){t=u;break}}t||(o=!0,(t=document.createElement("script")).charset="utf-8",t.timeout=120,r.nc&&t.setAttribute("nonce",r.nc),t.setAttribute("data-webpack",b+f),t.src=e),d[e]=[a];var l=(a,f)=>{t.onerror=t.onload=null,clearTimeout(s);var b=d[e];if(delete d[e],t.parentNode&&t.parentNode.removeChild(t),b&&b.forEach((e=>e(f))),a)return a(f)},s=setTimeout(l.bind(null,void 0,{type:"timeout",target:t}),12e4);t.onerror=l.bind(null,t.onerror),t.onload=l.bind(null,t.onload),o&&document.head.appendChild(t)}},r.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.p="/",r.gca=function(e){return e={17896441:"7918",29105828:"9050",48199270:"5041","09cca5f2":"29","935f2afb":"53","2ae68e65":"88","7245ce96":"92","06acf88d":"99",a17dbf83:"162",de2621c2:"298",ae1efb81:"309","4a2f1dfa":"383","5347168a":"424","647303d6":"554",d35204c3:"604",b70bee8d:"608","58b4829f":"721","87f59f37":"733",ee2e0a62:"836","4d11873e":"904",ca36df4d:"917",e8ae88bc:"920","0582779b":"953",ebc40d40:"984",e4d0ad4d:"1122",c4a14462:"1159","4d517c40":"1195",e56c502c:"1202","065bbf18":"1244",d2af0b95:"1267","243cddb9":"1294","4e5074e6":"1312","81bf77fc":"1358","4a9e4762":"1377",ba9d536d:"1384","9980ea0e":"1506","9440fd12":"1604","11c86bb5":"1612",b24805c2:"1616","9fc8d1d9":"1673","46d2add0":"1674","69a9729f":"1707","222e7c49":"1733",cd59f9ef:"1753","75af10bd":"1783","847c12c2":"1790","14111b0c":"1856","0030fd86":"1887","456c5d82":"1939",c192c597:"1984","6dbdf8e8":"1998","68c835af":"2000",d40fb48f:"2049","2fe15297":"2111",d73efefc:"2353",c0e3ff8b:"2368","8ad68633":"2399","980c25d7":"2473","58f10d9f":"2493","4c4d6ef6":"2706","0a79db1f":"2732",a34ed3b2:"2777",c16f65ec:"2798",beaba6c2:"3023",be529d37:"3033","6d9c0b04":"3042","15aa5f44":"3051",cac45e38:"3111","5527e5b7":"3114",c3d488fa:"3196",fe73cc84:"3251",f7e229b3:"3331",e968e69e:"3374","2e96a196":"3623","1f1765ab":"3645","0fb5d45b":"3671",b7f60777:"3679","47ac2e75":"3684",f35e2aba:"3690","7b4381d3":"3696",a80d168f:"3725",c9eeccbf:"3747",e333f535:"3776","514a13f6":"3800","9af63d42":"3814","1f0a946f":"3927",a9ab9f8f:"3979",a4055066:"4018",a5b090b0:"4029",e109b3ff:"4039","516ebbd1":"4095","83ec613f":"4098",b1b6a961:"4168","99bfca7e":"4172",c4f5d8e4:"4195",ba3b9f5c:"4358","111ae602":"4379","196c63a7":"4404","6e7b1bc6":"4413","7107eb83":"4457",b638c32b:"4497","4a00fd3a":"4559",fff0a46d:"4641",d2282d9e:"4779",a03cde8f:"4789",a7914a5c:"4842",d2b827bd:"4874","5cf0f698":"4884",dc75700c:"4886","7ae5d564":"5012","6af17b1d":"5045","2b2faa0a":"5050",a4cbee7f:"5144",aa946361:"5171","1187a271":"5198","1674a630":"5252",d241d4ef:"5339","2622e95a":"5340","61386b8d":"5375",aacd1d40:"5394",dbc0f590:"5412","205a719b":"5414",a07fb1cb:"5430","97a352ae":"5439","14f7f42b":"5547","1d4d4e46":"5623","62ff7ec9":"5625","7b589963":"5628","1957b43a":"5684","5a11a8c6":"5746","04d4af82":"5775","1244450e":"5805","74e1ba0d":"5845","13bdfbad":"5896","9defa5b7":"5955","15f1310d":"5997","99d969f2":"6005",bfac6a8d:"6064","14056c2c":"6133",e7ab2684:"6147","3087bb2d":"6308","0d766b78":"6492","80f42d74":"6527","036db789":"6590","409b7aa0":"6623",ca2bf8a3:"6704","4ace981f":"6791",d9ce81b2:"6803",c602cd44:"6862",e6eb5527:"6964",c377a04b:"6971",dde1ff6e:"6993","2c797d78":"7011","9fda8563":"7055","40415b6c":"7058","6b76d411":"7083","232ab88c":"7100","16e87abe":"7132","5534c352":"7229","10df9fdc":"7368",f2954f34:"7408","414d4a37":"7473","8d193b98":"7505","478692f7":"7562","5300e879":"7600","060147ec":"7602","0ff0556c":"7624",a686ca68:"7639","4f8e8160":"7683",f8edae29:"7710","6c450cd6":"7750",a6c229c0:"7773","65ab9689":"7786","8ff5d7ba":"7795","68d54528":"7881","6cafb666":"7883","381a15bc":"7886","1a4e3797":"7920","002d14fa":"8064","87b29f85":"8110","5584c47d":"8119",d0381ee6:"8120",fc8a86b2:"8197","20f8c1fd":"8215",b91921d6:"8270","898ba646":"8279",d67a4111:"8308",d7dfec52:"8457","982d0b04":"8468",cd19d898:"8565","2afa602b":"8653","38a44003":"8674","35d7f647":"8775",d9bd3427:"8796",a624bde7:"8823","8c27608b":"8851",f2aaa4e5:"8861","81b6783d":"8888","2bc15a09":"8908","1efdbea1":"8914",c248ee7e:"8927",e1584d63:"8945","1128ab4d":"8952",d87f7f29:"9069",f39642a1:"9107",b9d0db8e:"9170","99912bf6":"9469","8804eadc":"9498","6c174e6d":"9511","1be78505":"9514",fb969bb3:"9571","0d927e9a":"9652","94d2eef0":"9723",e97b3564:"9724",e323208f:"9777",fdc5233c:"9794",c95b781b:"9810","23c607c1":"9840",fd2e624b:"9851",ac02e102:"9942"}[e]||e,r.p+r.u(e)},(()=>{var e={1303:0,532:0};r.f.j=(a,f)=>{var d=r.o(e,a)?e[a]:void 0;if(0!==d)if(d)f.push(d[2]);else if(/^(1303|532)$/.test(a))e[a]=0;else{var b=new Promise(((f,b)=>d=e[a]=[f,b]));f.push(d[2]=b);var c=r.p+r.u(a),t=new Error;r.l(c,(f=>{if(r.o(e,a)&&(0!==(d=e[a])&&(e[a]=void 0),d)){var b=f&&("load"===f.type?"missing":f.type),c=f&&f.target&&f.target.src;t.message="Loading chunk "+a+" failed.\n("+b+": "+c+")",t.name="ChunkLoadError",t.type=b,t.request=c,d[1](t)}}),"chunk-"+a,a)}},r.O.j=a=>0===e[a];var a=(a,f)=>{var d,b,c=f[0],t=f[1],o=f[2],n=0;if(c.some((a=>0!==e[a]))){for(d in t)r.o(t,d)&&(r.m[d]=t[d]);if(o)var i=o(r)}for(a&&a(f);n<c.length;n++)b=c[n],r.o(e,b)&&e[b]&&e[b][0](),e[b]=0;return r.O(i)},f=self.webpackChunkfastkafka=self.webpackChunkfastkafka||[];f.forEach(a.bind(null,0)),f.push=a.bind(null,f.push.bind(f))})()})(); \ No newline at end of file diff --git a/demo/index.html b/demo/index.html new file mode 100644 index 0000000..048163f --- /dev/null +++ b/demo/index.html @@ -0,0 +1,32 @@ +<!doctype html> +<html lang="en" dir="ltr" class="plugin-pages plugin-id-default"> +<head> +<meta charset="UTF-8"> +<meta name="generator" content="Docusaurus v2.4.0"> +<title data-rh="true">Demo | FastKafka + + + + + + + + + + +
+
+ + + + \ No newline at end of file diff --git a/docs/0.5.0/CHANGELOG/index.html b/docs/0.5.0/CHANGELOG/index.html new file mode 100644 index 0000000..f1b8b43 --- /dev/null +++ b/docs/0.5.0/CHANGELOG/index.html @@ -0,0 +1,32 @@ + + + + + +Release notes | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

Release notes

0.5.0

New Features

  • Significant speedup of Kafka producer (#236), thanks to @Sternakt

Bugs Squashed

0.4.0

New Features

0.3.1

  • README.md file updated

0.3.0

New Features

  • Guide for fastkafka produces using partition key (#172), thanks to @Sternakt

    • Closes #161
  • Add support for Redpanda for testing and deployment (#181), thanks to @kumaranvpl

  • Remove bootstrap_servers from init and use the name of broker as an option when running/testing (#134)

  • Add a GH action file to check for broken links in the docs (#163)

  • Optimize requirements for testing and docs (#151)

  • Break requirements into base and optional for testing and dev (#124)

    • Minimize base requirements needed just for running the service.
  • Add link to example git repo into guide for building docs using actions (#81)

  • Add logging for run_in_background (#46)

  • Implement partition Key mechanism for producers (#16)

Bugs Squashed

  • Implement checks for npm installation and version (#176), thanks to @Sternakt

    • Closes #158 by checking if the npx is installed and more verbose error handling
  • Fix the helper.py link in CHANGELOG.md (#165)

  • fastkafka docs install_deps fails (#157)

    • Unexpected internal error: [Errno 2] No such file or directory: 'npx'
  • Broken links in docs (#141)

  • fastkafka run is not showing up in CLI docs (#132)

0.2.3

  • Fixed broken links on PyPi index page

0.2.2

New Features

  • Extract JDK and Kafka installation out of LocalKafkaBroker (#131)

  • PyYAML version relaxed (#119), thanks to @davorrunje

  • Replace docker based kafka with local (#68)

    • replace docker compose with a simple docker run (standard run_jupyter.sh should do)
    • replace all tests to use LocalKafkaBroker
    • update documentation

Bugs Squashed

  • Fix broken link for FastKafka docs in index notebook (#145)

  • Fix encoding issues when loading setup.py on windows OS (#135)

0.2.0

New Features

  • Replace kafka container with LocalKafkaBroker (#112)
      • Replace kafka container with LocalKafkaBroker in tests
  • Remove kafka container from tests environment
  • Fix failing tests

Bugs Squashed

  • Fix random failing in CI (#109)

0.1.3

  • version update in init.py

0.1.2

New Features

  • Git workflow action for publishing Kafka docs (#78)

Bugs Squashed

  • Include missing requirement (#110)
    • Typer is imported in this file but it is not included in settings.ini
    • Add aiohttp which is imported in this file
    • Add nbformat which is imported in _components/helpers.py
    • Add nbconvert which is imported in _components/helpers.py

0.1.1

Bugs Squashed

  • JDK install fails on Python 3.8 (#106)

0.1.0

Initial release

+ + + + \ No newline at end of file diff --git a/docs/0.5.0/api/fastkafka/KafkaEvent/index.html b/docs/0.5.0/api/fastkafka/KafkaEvent/index.html new file mode 100644 index 0000000..88b9597 --- /dev/null +++ b/docs/0.5.0/api/fastkafka/KafkaEvent/index.html @@ -0,0 +1,32 @@ + + + + + +KafkaEvent | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

KafkaEvent

fastkafka.KafkaEvent

A generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel

Parameters:

  • message: The message contained in the Kafka event, can be of type pydantic.BaseModel.
  • key: The optional key used to identify the Kafka event.
+ + + + \ No newline at end of file diff --git a/docs/0.5.0/api/fastkafka/encoder/avsc_to_pydantic/index.html b/docs/0.5.0/api/fastkafka/encoder/avsc_to_pydantic/index.html new file mode 100644 index 0000000..cd35211 --- /dev/null +++ b/docs/0.5.0/api/fastkafka/encoder/avsc_to_pydantic/index.html @@ -0,0 +1,32 @@ + + + + + +avsc_to_pydantic | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

avsc_to_pydantic

fastkafka.encoder.avsc_to_pydantic

avsc_to_pydantic

def avsc_to_pydantic(schema: Dict[str, Any]) -> ModelMetaclass

Generate pydantic model from given Avro Schema

Parameters:

  • schema: Avro schema in dictionary format

Returns:

  • Pydantic model class built from given avro schema
+ + + + \ No newline at end of file diff --git a/docs/0.5.0/api/fastkafka/index.html b/docs/0.5.0/api/fastkafka/index.html new file mode 100644 index 0000000..355610f --- /dev/null +++ b/docs/0.5.0/api/fastkafka/index.html @@ -0,0 +1,468 @@ + + + + + +FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

FastKafka

fastkafka.FastKafka

__init__

def __init__(self, title: Optional[str] = None, description: Optional[str] = None, version: Optional[str] = None, contact: Optional[Dict[str, str]] = None, kafka_brokers: Dict[str, Any], root_path: Optional[pathlib.Path, str] = None, lifespan: Optional[Callable[[ForwardRef('FastKafka')], AbstractAsyncContextManager[NoneType]]] = None, loop=None, client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x101ca6040>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x101c80310>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None, group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, exclude_internal_topics=True, isolation_level='read_uncommitted') -> None

Creates FastKafka application

Parameters:

  • title: optional title for the documentation. If None, +the title will be set to empty string
  • description: optional description for the documentation. If +None, the description will be set to empty string
  • version: optional version for the documentation. If None, +the version will be set to empty string
  • contact: optional contact for the documentation. If None, the +contact will be set to placeholder values: +name='Author' url=HttpUrl(' https://www.google.com ', ) email='noreply@gmail.com'
  • kafka_brokers: dictionary describing kafka brokers used for +generating documentation
  • root_path: path to where documentation will be created
  • lifespan: asynccontextmanager that is used for setting lifespan hooks. +aenter is called before app start and aexit after app stop. +The lifespan is called whe application is started as async context +manager, e.g.:async with kafka_app...
  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. +Default: aiokafka-producer-# (appended with a unique number +per instance)
  • key_serializer: used to convert user-supplied keys to bytes +If not :data:None, called as f(key), should return +:class:bytes. +Default: :data:None.
  • value_serializer: used to convert user-supplied message +values to :class:bytes. If not :data:None, called as +f(value), should return :class:bytes. +Default: :data:None.
  • acks: one of 0, 1, all. The number of acknowledgments +the producer requires the leader to have received before considering a +request complete. This controls the durability of records that are +sent. The following settings are common:
  • 0: Producer will not wait for any acknowledgment from the server +at all. The message will immediately be added to the socket +buffer and considered sent. No guarantee can be made that the +server has received the record in this case, and the retries +configuration will not take effect (as the client won't +generally know of any failures). The offset given back for each +record will always be set to -1.
  • 1: The broker leader will write the record to its local log but +will respond without awaiting full acknowledgement from all +followers. In this case should the leader fail immediately +after acknowledging the record but before the followers have +replicated it then the record will be lost.
  • all: The broker leader will wait for the full set of in-sync +replicas to acknowledge the record. This guarantees that the +record will not be lost as long as at least one in-sync replica +remains alive. This is the strongest available guarantee.

If unset, defaults to acks=1. If enable_idempotence is +:data:True defaults to acks=all

  • compression_type: The compression type for all data generated by +the producer. Valid values are gzip, snappy, lz4, zstd +or :data:None. +Compression is of full batches of data, so the efficacy of batching +will also impact the compression ratio (more batching means better +compression). Default: :data:None.
  • max_batch_size: Maximum size of buffered data per partition. +After this amount :meth:send coroutine will block until batch is +drained. +Default: 16384
  • linger_ms: The producer groups together any records that arrive +in between request transmissions into a single batched request. +Normally this occurs only under load when records arrive faster +than they can be sent out. However in some circumstances the client +may want to reduce the number of requests even under moderate load. +This setting accomplishes this by adding a small amount of +artificial delay; that is, if first request is processed faster, +than linger_ms, producer will wait linger_ms - process_time. +Default: 0 (i.e. no delay).
  • partitioner: Callable used to determine which partition +each message is assigned to. Called (after key serialization): +partitioner(key_bytes, all_partitions, available_partitions). +The default partitioner implementation hashes each non-None key +using the same murmur2 algorithm as the Java client so that +messages with the same key are assigned to the same partition. +When a key is :data:None, the message is delivered to a random partition +(filtered to partitions with available leaders only, if possible).
  • max_request_size: The maximum size of a request. This is also +effectively a cap on the maximum record size. Note that the server +has its own cap on record size which may be different from this. +This setting will limit the number of record batches the producer +will send in a single request to avoid sending huge requests. +Default: 1048576.
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • request_timeout_ms: Produce request timeout in milliseconds. +As it's sent as part of +:class:~kafka.protocol.produce.ProduceRequest (it's a blocking +call), maximum waiting time can be up to 2 * +request_timeout_ms. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • api_version: specify which kafka API version to use. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT. +Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more +information see :ref:ssl_auth. +Default: :data:None
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying :data:None will +disable idle checks. Default: 540000 (9 minutes).
  • enable_idempotence: When set to :data:True, the producer will +ensure that exactly one copy of each message is written in the +stream. If :data:False, producer retries due to broker failures, +etc., may write duplicates of the retried message in the stream. +Note that enabling idempotence acks to set to all. If it is not +explicitly set by the user it will be chosen. If incompatible +values are set, a :exc:ValueError will be thrown. +New in version 0.5.0.
  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values +are: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: :data:None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: :data:None
  • sasl_oauth_token_provider (: class:~aiokafka.abc.AbstractTokenProvider): +OAuthBearer token provider instance. (See +:mod:kafka.oauth.abstract). +Default: :data:None
  • *topics: optional list of topics to subscribe to. If not set, +call :meth:.subscribe or :meth:.assign before consuming records. +Passing topics directly is same as calling :meth:.subscribe API.
  • group_id: name of the consumer group to join for dynamic +partition assignment (if enabled), and to use for fetching and +committing offsets. If None, auto-partition assignment (via +group coordinator) and offset commits are disabled. +Default: None
  • key_deserializer: Any callable that takes a +raw message key and returns a deserialized key.
  • value_deserializer: Any callable that takes a +raw message value and returns a deserialized value.
  • fetch_min_bytes: Minimum amount of data the server should +return for a fetch request, otherwise wait up to +fetch_max_wait_ms for more data to accumulate. Default: 1.
  • fetch_max_bytes: The maximum amount of data the server should +return for a fetch request. This is not an absolute maximum, if +the first message in the first non-empty partition of the fetch +is larger than this value, the message will still be returned +to ensure that the consumer can make progress. NOTE: consumer +performs fetches to multiple brokers in parallel so memory +usage will depend on the number of brokers containing +partitions for the topic. +Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).
  • fetch_max_wait_ms: The maximum amount of time in milliseconds +the server will block before answering the fetch request if +there isn't sufficient data to immediately satisfy the +requirement given by fetch_min_bytes. Default: 500.
  • max_partition_fetch_bytes: The maximum amount of data +per-partition the server will return. The maximum total memory +used for a request = #partitions * max_partition_fetch_bytes. +This size must be at least as large as the maximum message size +the server allows or else it is possible for the producer to +send messages larger than the consumer can fetch. If that +happens, the consumer can get stuck trying to fetch a large +message on a certain partition. Default: 1048576.
  • max_poll_records: The maximum number of records returned in a +single call to :meth:.getmany. Defaults None, no limit.
  • auto_offset_reset: A policy for resetting offsets on +:exc:.OffsetOutOfRangeError errors: earliest will move to the oldest +available message, latest will move to the most recent, and +none will raise an exception so you can handle this case. +Default: latest.
  • enable_auto_commit: If true the consumer's offset will be +periodically committed in the background. Default: True.
  • auto_commit_interval_ms: milliseconds between automatic +offset commits, if enable_auto_commit is True. Default: 5000.
  • check_crcs: Automatically check the CRC32 of the records +consumed. This ensures no on-the-wire or on-disk corruption to +the messages occurred. This check adds some overhead, so it may +be disabled in cases seeking extreme performance. Default: True
  • partition_assignment_strategy: List of objects to use to +distribute partition ownership amongst consumer instances when +group management is used. This preference is implicit in the order +of the strategies in the list. When assignment strategy changes: +to support a change to the assignment strategy, new versions must +enable support both for the old assignment strategy and the new +one. The coordinator will choose the old assignment strategy until +all members have been updated. Then it will choose the new +strategy. Default: [:class:.RoundRobinPartitionAssignor]
  • max_poll_interval_ms: Maximum allowed time between calls to +consume messages (e.g., :meth:.getmany). If this interval +is exceeded the consumer is considered failed and the group will +rebalance in order to reassign the partitions to another consumer +group member. If API methods block waiting for messages, that time +does not count against this timeout. See KIP-62_ for more +information. Default 300000
  • rebalance_timeout_ms: The maximum time server will wait for this +consumer to rejoin the group in a case of rebalance. In Java client +this behaviour is bound to max.poll.interval.ms configuration, +but as aiokafka will rejoin the group in the background, we +decouple this setting to allow finer tuning by users that use +:class:.ConsumerRebalanceListener to delay rebalacing. Defaults +to session_timeout_ms
  • session_timeout_ms: Client group session and failure detection +timeout. The consumer sends periodic heartbeats +(heartbeat.interval.ms) to indicate its liveness to the broker. +If no hearts are received by the broker for a group member within +the session timeout, the broker will remove the consumer from the +group and trigger a rebalance. The allowed range is configured with +the broker configuration properties +group.min.session.timeout.ms and group.max.session.timeout.ms. +Default: 10000
  • heartbeat_interval_ms: The expected time in milliseconds +between heartbeats to the consumer coordinator when using +Kafka's group management feature. Heartbeats are used to ensure +that the consumer's session stays active and to facilitate +rebalancing when new consumers join or leave the group. The +value must be set lower than session_timeout_ms, but typically +should be set no higher than 1/3 of that value. It can be +adjusted even lower to control the expected time for normal +rebalances. Default: 3000
  • consumer_timeout_ms: maximum wait timeout for background fetching +routine. Mostly defines how fast the system will see rebalance and +request new data for new partitions. Default: 200
  • exclude_internal_topics: Whether records from internal topics +(such as offsets) should be exposed to the consumer. If set to True +the only way to receive records from an internal topic is +subscribing to it. Requires 0.10+ Default: True
  • isolation_level: Controls how to read messages written +transactionally.

If set to read_committed, :meth:.getmany will only return +transactional messages which have been committed. +If set to read_uncommitted (the default), :meth:.getmany will +return all messages, even transactional messages which have been +aborted.

Non-transactional messages will be returned unconditionally in +either mode.

Messages will always be returned in offset order. Hence, in +read_committed mode, :meth:.getmany will only return +messages up to the last stable offset (LSO), which is the one less +than the offset of the first open transaction. In particular any +messages appearing after messages belonging to ongoing transactions +will be withheld until the relevant transaction has been completed. +As a result, read_committed consumers will not be able to read up +to the high watermark when there are in flight transactions. +Further, when in read_committed the seek_to_end method will +return the LSO. See method docs below. Default: read_uncommitted

  • sasl_oauth_token_provider: OAuthBearer token provider instance. (See :mod:kafka.oauth.abstract). +Default: None

benchmark

def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Union[~O, NoneType]]], typing.Callable[[~I], typing.Union[~O, NoneType]]]

Decorator to benchmark produces/consumes functions

Parameters:

  • interval: Period to use to calculate throughput. If value is of type int, +then it will be used as seconds. If value is of type timedelta, +then it will be used as it is. default: 1 - one second
  • sliding_window_size: The size of the sliding window to use to calculate +average throughput. default: None - By default average throughput is +not calculated

consumes

def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', prefix: str = 'on_', loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Callable[[pydantic.main.BaseModel], typing.Union[NoneType, typing.Awaitable[NoneType]]]], typing.Callable[[pydantic.main.BaseModel], typing.Union[NoneType, typing.Awaitable[NoneType]]]]

Decorator registering the callback called when a message is received in a topic.

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the consumer will subscribe to and execute the +decorated function when it receives a message from the topic, +default: None. If the topic is not specified, topic name will be +inferred from the decorated function name by stripping the defined prefix
  • decoder: Decoder to use to decode messages consumed from the topic, +default: json - By default, it uses json decoder to decode +bytes to json string and then it creates instance of pydantic +BaseModel. It also accepts custom decoder function.
  • prefix: Prefix stripped from the decorated function to define a topic name +if the topic argument is not passed, default: "on_". If the decorated +function name is not prefixed with the defined prefix and topic argument +is not passed, then this method will throw ValueError
  • *topics: optional list of topics to subscribe to. If not set, +call :meth:.subscribe or :meth:.assign before consuming records. +Passing topics directly is same as calling :meth:.subscribe API.
  • bootstrap_servers: a host[:port] string (or list of +host[:port] strings) that the consumer should contact to bootstrap +initial cluster metadata.

This does not have to be the full node list. +It just needs to have at least one broker that will respond to a +Metadata API Request. Default port is 9092. If no servers are +specified, will default to localhost:9092.

  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. Also +submitted to :class:~.consumer.group_coordinator.GroupCoordinator +for logging with respect to consumer group administration. Default: +aiokafka-{version}
  • group_id: name of the consumer group to join for dynamic +partition assignment (if enabled), and to use for fetching and +committing offsets. If None, auto-partition assignment (via +group coordinator) and offset commits are disabled. +Default: None
  • key_deserializer: Any callable that takes a +raw message key and returns a deserialized key.
  • value_deserializer: Any callable that takes a +raw message value and returns a deserialized value.
  • fetch_min_bytes: Minimum amount of data the server should +return for a fetch request, otherwise wait up to +fetch_max_wait_ms for more data to accumulate. Default: 1.
  • fetch_max_bytes: The maximum amount of data the server should +return for a fetch request. This is not an absolute maximum, if +the first message in the first non-empty partition of the fetch +is larger than this value, the message will still be returned +to ensure that the consumer can make progress. NOTE: consumer +performs fetches to multiple brokers in parallel so memory +usage will depend on the number of brokers containing +partitions for the topic. +Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).
  • fetch_max_wait_ms: The maximum amount of time in milliseconds +the server will block before answering the fetch request if +there isn't sufficient data to immediately satisfy the +requirement given by fetch_min_bytes. Default: 500.
  • max_partition_fetch_bytes: The maximum amount of data +per-partition the server will return. The maximum total memory +used for a request = #partitions * max_partition_fetch_bytes. +This size must be at least as large as the maximum message size +the server allows or else it is possible for the producer to +send messages larger than the consumer can fetch. If that +happens, the consumer can get stuck trying to fetch a large +message on a certain partition. Default: 1048576.
  • max_poll_records: The maximum number of records returned in a +single call to :meth:.getmany. Defaults None, no limit.
  • request_timeout_ms: Client request timeout in milliseconds. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • auto_offset_reset: A policy for resetting offsets on +:exc:.OffsetOutOfRangeError errors: earliest will move to the oldest +available message, latest will move to the most recent, and +none will raise an exception so you can handle this case. +Default: latest.
  • enable_auto_commit: If true the consumer's offset will be +periodically committed in the background. Default: True.
  • auto_commit_interval_ms: milliseconds between automatic +offset commits, if enable_auto_commit is True. Default: 5000.
  • check_crcs: Automatically check the CRC32 of the records +consumed. This ensures no on-the-wire or on-disk corruption to +the messages occurred. This check adds some overhead, so it may +be disabled in cases seeking extreme performance. Default: True
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • partition_assignment_strategy: List of objects to use to +distribute partition ownership amongst consumer instances when +group management is used. This preference is implicit in the order +of the strategies in the list. When assignment strategy changes: +to support a change to the assignment strategy, new versions must +enable support both for the old assignment strategy and the new +one. The coordinator will choose the old assignment strategy until +all members have been updated. Then it will choose the new +strategy. Default: [:class:.RoundRobinPartitionAssignor]
  • max_poll_interval_ms: Maximum allowed time between calls to +consume messages (e.g., :meth:.getmany). If this interval +is exceeded the consumer is considered failed and the group will +rebalance in order to reassign the partitions to another consumer +group member. If API methods block waiting for messages, that time +does not count against this timeout. See KIP-62_ for more +information. Default 300000
  • rebalance_timeout_ms: The maximum time server will wait for this +consumer to rejoin the group in a case of rebalance. In Java client +this behaviour is bound to max.poll.interval.ms configuration, +but as aiokafka will rejoin the group in the background, we +decouple this setting to allow finer tuning by users that use +:class:.ConsumerRebalanceListener to delay rebalacing. Defaults +to session_timeout_ms
  • session_timeout_ms: Client group session and failure detection +timeout. The consumer sends periodic heartbeats +(heartbeat.interval.ms) to indicate its liveness to the broker. +If no hearts are received by the broker for a group member within +the session timeout, the broker will remove the consumer from the +group and trigger a rebalance. The allowed range is configured with +the broker configuration properties +group.min.session.timeout.ms and group.max.session.timeout.ms. +Default: 10000
  • heartbeat_interval_ms: The expected time in milliseconds +between heartbeats to the consumer coordinator when using +Kafka's group management feature. Heartbeats are used to ensure +that the consumer's session stays active and to facilitate +rebalancing when new consumers join or leave the group. The +value must be set lower than session_timeout_ms, but typically +should be set no higher than 1/3 of that value. It can be +adjusted even lower to control the expected time for normal +rebalances. Default: 3000
  • consumer_timeout_ms: maximum wait timeout for background fetching +routine. Mostly defines how fast the system will see rebalance and +request new data for new partitions. Default: 200
  • api_version: specify which kafka API version to use. +:class:AIOKafkaConsumer supports Kafka API versions >=0.9 only. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more information see +:ref:ssl_auth. Default: None.
  • exclude_internal_topics: Whether records from internal topics +(such as offsets) should be exposed to the consumer. If set to True +the only way to receive records from an internal topic is +subscribing to it. Requires 0.10+ Default: True
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying None will +disable idle checks. Default: 540000 (9 minutes).
  • isolation_level: Controls how to read messages written +transactionally.

If set to read_committed, :meth:.getmany will only return +transactional messages which have been committed. +If set to read_uncommitted (the default), :meth:.getmany will +return all messages, even transactional messages which have been +aborted.

Non-transactional messages will be returned unconditionally in +either mode.

Messages will always be returned in offset order. Hence, in +read_committed mode, :meth:.getmany will only return +messages up to the last stable offset (LSO), which is the one less +than the offset of the first open transaction. In particular any +messages appearing after messages belonging to ongoing transactions +will be withheld until the relevant transaction has been completed. +As a result, read_committed consumers will not be able to read up +to the high watermark when there are in flight transactions. +Further, when in read_committed the seek_to_end method will +return the LSO. See method docs below. Default: read_uncommitted

  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: +PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: None
  • sasl_oauth_token_provider: OAuthBearer token provider instance. (See :mod:kafka.oauth.abstract). +Default: None

Returns:

  • : A function returning the same function

create_mocks

def create_mocks(self: fastkafka.FastKafka) -> None

Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock

produces

def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x101ca6040>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x101c80310>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]]]]]

Decorator registering the callback called when delivery report for a produced message is received

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the producer will send returned values from +the decorated function to, default: None- If the topic is not +specified, topic name will be inferred from the decorated function +name by stripping the defined prefix.
  • encoder: Encoder to use to encode messages before sending it to topic, +default: json - By default, it uses json encoder to convert +pydantic basemodel to json string and then encodes the string to bytes +using 'utf-8' encoding. It also accepts custom encoder function.
  • prefix: Prefix stripped from the decorated function to define a topic +name if the topic argument is not passed, default: "to_". If the +decorated function name is not prefixed with the defined prefix +and topic argument is not passed, then this method will throw ValueError
  • bootstrap_servers: a host[:port] string or list of +host[:port] strings that the producer should contact to +bootstrap initial cluster metadata. This does not have to be the +full node list. It just needs to have at least one broker that will +respond to a Metadata API Request. Default port is 9092. If no +servers are specified, will default to localhost:9092.
  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. +Default: aiokafka-producer-# (appended with a unique number +per instance)
  • key_serializer: used to convert user-supplied keys to bytes +If not :data:None, called as f(key), should return +:class:bytes. +Default: :data:None.
  • value_serializer: used to convert user-supplied message +values to :class:bytes. If not :data:None, called as +f(value), should return :class:bytes. +Default: :data:None.
  • acks: one of 0, 1, all. The number of acknowledgments +the producer requires the leader to have received before considering a +request complete. This controls the durability of records that are +sent. The following settings are common:
  • 0: Producer will not wait for any acknowledgment from the server +at all. The message will immediately be added to the socket +buffer and considered sent. No guarantee can be made that the +server has received the record in this case, and the retries +configuration will not take effect (as the client won't +generally know of any failures). The offset given back for each +record will always be set to -1.
  • 1: The broker leader will write the record to its local log but +will respond without awaiting full acknowledgement from all +followers. In this case should the leader fail immediately +after acknowledging the record but before the followers have +replicated it then the record will be lost.
  • all: The broker leader will wait for the full set of in-sync +replicas to acknowledge the record. This guarantees that the +record will not be lost as long as at least one in-sync replica +remains alive. This is the strongest available guarantee.

If unset, defaults to acks=1. If enable_idempotence is +:data:True defaults to acks=all

  • compression_type: The compression type for all data generated by +the producer. Valid values are gzip, snappy, lz4, zstd +or :data:None. +Compression is of full batches of data, so the efficacy of batching +will also impact the compression ratio (more batching means better +compression). Default: :data:None.
  • max_batch_size: Maximum size of buffered data per partition. +After this amount :meth:send coroutine will block until batch is +drained. +Default: 16384
  • linger_ms: The producer groups together any records that arrive +in between request transmissions into a single batched request. +Normally this occurs only under load when records arrive faster +than they can be sent out. However in some circumstances the client +may want to reduce the number of requests even under moderate load. +This setting accomplishes this by adding a small amount of +artificial delay; that is, if first request is processed faster, +than linger_ms, producer will wait linger_ms - process_time. +Default: 0 (i.e. no delay).
  • partitioner: Callable used to determine which partition +each message is assigned to. Called (after key serialization): +partitioner(key_bytes, all_partitions, available_partitions). +The default partitioner implementation hashes each non-None key +using the same murmur2 algorithm as the Java client so that +messages with the same key are assigned to the same partition. +When a key is :data:None, the message is delivered to a random partition +(filtered to partitions with available leaders only, if possible).
  • max_request_size: The maximum size of a request. This is also +effectively a cap on the maximum record size. Note that the server +has its own cap on record size which may be different from this. +This setting will limit the number of record batches the producer +will send in a single request to avoid sending huge requests. +Default: 1048576.
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • request_timeout_ms: Produce request timeout in milliseconds. +As it's sent as part of +:class:~kafka.protocol.produce.ProduceRequest (it's a blocking +call), maximum waiting time can be up to 2 * +request_timeout_ms. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • api_version: specify which kafka API version to use. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT. +Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more +information see :ref:ssl_auth. +Default: :data:None
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying :data:None will +disable idle checks. Default: 540000 (9 minutes).
  • enable_idempotence: When set to :data:True, the producer will +ensure that exactly one copy of each message is written in the +stream. If :data:False, producer retries due to broker failures, +etc., may write duplicates of the retried message in the stream. +Note that enabling idempotence acks to set to all. If it is not +explicitly set by the user it will be chosen. If incompatible +values are set, a :exc:ValueError will be thrown. +New in version 0.5.0.
  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values +are: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: :data:None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: :data:None
  • sasl_oauth_token_provider (: class:~aiokafka.abc.AbstractTokenProvider): +OAuthBearer token provider instance. (See +:mod:kafka.oauth.abstract). +Default: :data:None

Returns:

  • : A function returning the same function

Exceptions:

  • ValueError: when needed

run_in_background

def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]

Decorator to schedule a task to be run in the background.

This decorator is used to schedule a task to be run in the background when the app's _on_startup event is triggered.

Returns:

  • A decorator function that takes a background task as an input and stores it to be run in the backround.
+ + + + \ No newline at end of file diff --git a/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker/index.html b/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker/index.html new file mode 100644 index 0000000..5d27593 --- /dev/null +++ b/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker/index.html @@ -0,0 +1,32 @@ + + + + + +ApacheKafkaBroker | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

ApacheKafkaBroker

fastkafka.testing.ApacheKafkaBroker

ApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing.

__init__

def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None

Initialises the ApacheKafkaBroker object

Parameters:

  • data_dir: Path to the directory where the zookeepeer instance will save data
  • zookeeper_port: Port for clients (Kafka brokes) to connect
  • listener_port: Port on which the clients (producers and consumers) can connect

start

def start(self: fastkafka.testing.ApacheKafkaBroker) -> str

Starts a local kafka broker and zookeeper instance synchronously

Returns:

  • Kafka broker bootstrap server address in string format: add:port

stop

def stop(self: fastkafka.testing.ApacheKafkaBroker) -> None

Stops a local kafka broker and zookeeper instance synchronously

Returns:

  • None
+ + + + \ No newline at end of file diff --git a/docs/0.5.0/api/fastkafka/testing/LocalRedpandaBroker/index.html b/docs/0.5.0/api/fastkafka/testing/LocalRedpandaBroker/index.html new file mode 100644 index 0000000..9cbc4ae --- /dev/null +++ b/docs/0.5.0/api/fastkafka/testing/LocalRedpandaBroker/index.html @@ -0,0 +1,32 @@ + + + + + +LocalRedpandaBroker | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

LocalRedpandaBroker

fastkafka.testing.LocalRedpandaBroker

LocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing.

__init__

def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug', **kwargs: Dict[str, Any]) -> None

Initialises the LocalRedpandaBroker object

Parameters:

  • listener_port: Port on which the clients (producers and consumers) can connect
  • tag: Tag of Redpanda image to use to start container
  • seastar_core: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)
  • memory: The amount of memory to make available to Redpanda
  • mode: Mode to use to load configuration properties in container
  • default_log_level: Log levels to use for Redpanda

get_service_config_string

def get_service_config_string(self, service: str, data_dir: pathlib.Path) -> str

Generates a configuration for a service

Parameters:

  • data_dir: Path to the directory where the zookeepeer instance will save data
  • service: "redpanda", defines which service to get config string for

start

def start(self: fastkafka.testing.LocalRedpandaBroker) -> str

Starts a local redpanda broker instance synchronously

Returns:

  • Redpanda broker bootstrap server address in string format: add:port

stop

def stop(self: fastkafka.testing.LocalRedpandaBroker) -> None

Stops a local redpanda broker instance synchronously

Returns:

  • None
+ + + + \ No newline at end of file diff --git a/docs/0.5.0/api/fastkafka/testing/Tester/index.html b/docs/0.5.0/api/fastkafka/testing/Tester/index.html new file mode 100644 index 0000000..ad260ad --- /dev/null +++ b/docs/0.5.0/api/fastkafka/testing/Tester/index.html @@ -0,0 +1,274 @@ + + + + + +Tester | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

Tester

fastkafka.testing.Tester

__init__

def __init__(self, app: Union[fastkafka.FastKafka, List[fastkafka.FastKafka]], broker: Optional[fastkafka.testing.ApacheKafkaBroker, fastkafka.testing.LocalRedpandaBroker, fastkafka._testing.in_memory_broker.InMemoryBroker] = None, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None

Mirror-like object for testing a FastKafka application

Can be used as context manager

Parameters:

  • data_dir: Path to the directory where the zookeepeer instance will save data
  • zookeeper_port: Port for clients (Kafka brokes) to connect
  • listener_port: Port on which the clients (producers and consumers) can connect

benchmark

def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Union[~O, NoneType]]], typing.Callable[[~I], typing.Union[~O, NoneType]]]

Decorator to benchmark produces/consumes functions

Parameters:

  • interval: Period to use to calculate throughput. If value is of type int, +then it will be used as seconds. If value is of type timedelta, +then it will be used as it is. default: 1 - one second
  • sliding_window_size: The size of the sliding window to use to calculate +average throughput. default: None - By default average throughput is +not calculated

consumes

def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', prefix: str = 'on_', loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Callable[[pydantic.main.BaseModel], typing.Union[NoneType, typing.Awaitable[NoneType]]]], typing.Callable[[pydantic.main.BaseModel], typing.Union[NoneType, typing.Awaitable[NoneType]]]]

Decorator registering the callback called when a message is received in a topic.

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the consumer will subscribe to and execute the +decorated function when it receives a message from the topic, +default: None. If the topic is not specified, topic name will be +inferred from the decorated function name by stripping the defined prefix
  • decoder: Decoder to use to decode messages consumed from the topic, +default: json - By default, it uses json decoder to decode +bytes to json string and then it creates instance of pydantic +BaseModel. It also accepts custom decoder function.
  • prefix: Prefix stripped from the decorated function to define a topic name +if the topic argument is not passed, default: "on_". If the decorated +function name is not prefixed with the defined prefix and topic argument +is not passed, then this method will throw ValueError
  • *topics: optional list of topics to subscribe to. If not set, +call :meth:.subscribe or :meth:.assign before consuming records. +Passing topics directly is same as calling :meth:.subscribe API.
  • bootstrap_servers: a host[:port] string (or list of +host[:port] strings) that the consumer should contact to bootstrap +initial cluster metadata.

This does not have to be the full node list. +It just needs to have at least one broker that will respond to a +Metadata API Request. Default port is 9092. If no servers are +specified, will default to localhost:9092.

  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. Also +submitted to :class:~.consumer.group_coordinator.GroupCoordinator +for logging with respect to consumer group administration. Default: +aiokafka-{version}
  • group_id: name of the consumer group to join for dynamic +partition assignment (if enabled), and to use for fetching and +committing offsets. If None, auto-partition assignment (via +group coordinator) and offset commits are disabled. +Default: None
  • key_deserializer: Any callable that takes a +raw message key and returns a deserialized key.
  • value_deserializer: Any callable that takes a +raw message value and returns a deserialized value.
  • fetch_min_bytes: Minimum amount of data the server should +return for a fetch request, otherwise wait up to +fetch_max_wait_ms for more data to accumulate. Default: 1.
  • fetch_max_bytes: The maximum amount of data the server should +return for a fetch request. This is not an absolute maximum, if +the first message in the first non-empty partition of the fetch +is larger than this value, the message will still be returned +to ensure that the consumer can make progress. NOTE: consumer +performs fetches to multiple brokers in parallel so memory +usage will depend on the number of brokers containing +partitions for the topic. +Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).
  • fetch_max_wait_ms: The maximum amount of time in milliseconds +the server will block before answering the fetch request if +there isn't sufficient data to immediately satisfy the +requirement given by fetch_min_bytes. Default: 500.
  • max_partition_fetch_bytes: The maximum amount of data +per-partition the server will return. The maximum total memory +used for a request = #partitions * max_partition_fetch_bytes. +This size must be at least as large as the maximum message size +the server allows or else it is possible for the producer to +send messages larger than the consumer can fetch. If that +happens, the consumer can get stuck trying to fetch a large +message on a certain partition. Default: 1048576.
  • max_poll_records: The maximum number of records returned in a +single call to :meth:.getmany. Defaults None, no limit.
  • request_timeout_ms: Client request timeout in milliseconds. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • auto_offset_reset: A policy for resetting offsets on +:exc:.OffsetOutOfRangeError errors: earliest will move to the oldest +available message, latest will move to the most recent, and +none will raise an exception so you can handle this case. +Default: latest.
  • enable_auto_commit: If true the consumer's offset will be +periodically committed in the background. Default: True.
  • auto_commit_interval_ms: milliseconds between automatic +offset commits, if enable_auto_commit is True. Default: 5000.
  • check_crcs: Automatically check the CRC32 of the records +consumed. This ensures no on-the-wire or on-disk corruption to +the messages occurred. This check adds some overhead, so it may +be disabled in cases seeking extreme performance. Default: True
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • partition_assignment_strategy: List of objects to use to +distribute partition ownership amongst consumer instances when +group management is used. This preference is implicit in the order +of the strategies in the list. When assignment strategy changes: +to support a change to the assignment strategy, new versions must +enable support both for the old assignment strategy and the new +one. The coordinator will choose the old assignment strategy until +all members have been updated. Then it will choose the new +strategy. Default: [:class:.RoundRobinPartitionAssignor]
  • max_poll_interval_ms: Maximum allowed time between calls to +consume messages (e.g., :meth:.getmany). If this interval +is exceeded the consumer is considered failed and the group will +rebalance in order to reassign the partitions to another consumer +group member. If API methods block waiting for messages, that time +does not count against this timeout. See KIP-62_ for more +information. Default 300000
  • rebalance_timeout_ms: The maximum time server will wait for this +consumer to rejoin the group in a case of rebalance. In Java client +this behaviour is bound to max.poll.interval.ms configuration, +but as aiokafka will rejoin the group in the background, we +decouple this setting to allow finer tuning by users that use +:class:.ConsumerRebalanceListener to delay rebalacing. Defaults +to session_timeout_ms
  • session_timeout_ms: Client group session and failure detection +timeout. The consumer sends periodic heartbeats +(heartbeat.interval.ms) to indicate its liveness to the broker. +If no hearts are received by the broker for a group member within +the session timeout, the broker will remove the consumer from the +group and trigger a rebalance. The allowed range is configured with +the broker configuration properties +group.min.session.timeout.ms and group.max.session.timeout.ms. +Default: 10000
  • heartbeat_interval_ms: The expected time in milliseconds +between heartbeats to the consumer coordinator when using +Kafka's group management feature. Heartbeats are used to ensure +that the consumer's session stays active and to facilitate +rebalancing when new consumers join or leave the group. The +value must be set lower than session_timeout_ms, but typically +should be set no higher than 1/3 of that value. It can be +adjusted even lower to control the expected time for normal +rebalances. Default: 3000
  • consumer_timeout_ms: maximum wait timeout for background fetching +routine. Mostly defines how fast the system will see rebalance and +request new data for new partitions. Default: 200
  • api_version: specify which kafka API version to use. +:class:AIOKafkaConsumer supports Kafka API versions >=0.9 only. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more information see +:ref:ssl_auth. Default: None.
  • exclude_internal_topics: Whether records from internal topics +(such as offsets) should be exposed to the consumer. If set to True +the only way to receive records from an internal topic is +subscribing to it. Requires 0.10+ Default: True
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying None will +disable idle checks. Default: 540000 (9 minutes).
  • isolation_level: Controls how to read messages written +transactionally.

If set to read_committed, :meth:.getmany will only return +transactional messages which have been committed. +If set to read_uncommitted (the default), :meth:.getmany will +return all messages, even transactional messages which have been +aborted.

Non-transactional messages will be returned unconditionally in +either mode.

Messages will always be returned in offset order. Hence, in +read_committed mode, :meth:.getmany will only return +messages up to the last stable offset (LSO), which is the one less +than the offset of the first open transaction. In particular any +messages appearing after messages belonging to ongoing transactions +will be withheld until the relevant transaction has been completed. +As a result, read_committed consumers will not be able to read up +to the high watermark when there are in flight transactions. +Further, when in read_committed the seek_to_end method will +return the LSO. See method docs below. Default: read_uncommitted

  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: +PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: None
  • sasl_oauth_token_provider: OAuthBearer token provider instance. (See :mod:kafka.oauth.abstract). +Default: None

Returns:

  • : A function returning the same function

create_mocks

def create_mocks(self: fastkafka.FastKafka) -> None

Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock

produces

def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x101ca6040>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x101c80310>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel]]]]]]

Decorator registering the callback called when delivery report for a produced message is received

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the producer will send returned values from +the decorated function to, default: None- If the topic is not +specified, topic name will be inferred from the decorated function +name by stripping the defined prefix.
  • encoder: Encoder to use to encode messages before sending it to topic, +default: json - By default, it uses json encoder to convert +pydantic basemodel to json string and then encodes the string to bytes +using 'utf-8' encoding. It also accepts custom encoder function.
  • prefix: Prefix stripped from the decorated function to define a topic +name if the topic argument is not passed, default: "to_". If the +decorated function name is not prefixed with the defined prefix +and topic argument is not passed, then this method will throw ValueError
  • bootstrap_servers: a host[:port] string or list of +host[:port] strings that the producer should contact to +bootstrap initial cluster metadata. This does not have to be the +full node list. It just needs to have at least one broker that will +respond to a Metadata API Request. Default port is 9092. If no +servers are specified, will default to localhost:9092.
  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. +Default: aiokafka-producer-# (appended with a unique number +per instance)
  • key_serializer: used to convert user-supplied keys to bytes +If not :data:None, called as f(key), should return +:class:bytes. +Default: :data:None.
  • value_serializer: used to convert user-supplied message +values to :class:bytes. If not :data:None, called as +f(value), should return :class:bytes. +Default: :data:None.
  • acks: one of 0, 1, all. The number of acknowledgments +the producer requires the leader to have received before considering a +request complete. This controls the durability of records that are +sent. The following settings are common:
  • 0: Producer will not wait for any acknowledgment from the server +at all. The message will immediately be added to the socket +buffer and considered sent. No guarantee can be made that the +server has received the record in this case, and the retries +configuration will not take effect (as the client won't +generally know of any failures). The offset given back for each +record will always be set to -1.
  • 1: The broker leader will write the record to its local log but +will respond without awaiting full acknowledgement from all +followers. In this case should the leader fail immediately +after acknowledging the record but before the followers have +replicated it then the record will be lost.
  • all: The broker leader will wait for the full set of in-sync +replicas to acknowledge the record. This guarantees that the +record will not be lost as long as at least one in-sync replica +remains alive. This is the strongest available guarantee.

If unset, defaults to acks=1. If enable_idempotence is +:data:True defaults to acks=all

  • compression_type: The compression type for all data generated by +the producer. Valid values are gzip, snappy, lz4, zstd +or :data:None. +Compression is of full batches of data, so the efficacy of batching +will also impact the compression ratio (more batching means better +compression). Default: :data:None.
  • max_batch_size: Maximum size of buffered data per partition. +After this amount :meth:send coroutine will block until batch is +drained. +Default: 16384
  • linger_ms: The producer groups together any records that arrive +in between request transmissions into a single batched request. +Normally this occurs only under load when records arrive faster +than they can be sent out. However in some circumstances the client +may want to reduce the number of requests even under moderate load. +This setting accomplishes this by adding a small amount of +artificial delay; that is, if first request is processed faster, +than linger_ms, producer will wait linger_ms - process_time. +Default: 0 (i.e. no delay).
  • partitioner: Callable used to determine which partition +each message is assigned to. Called (after key serialization): +partitioner(key_bytes, all_partitions, available_partitions). +The default partitioner implementation hashes each non-None key +using the same murmur2 algorithm as the Java client so that +messages with the same key are assigned to the same partition. +When a key is :data:None, the message is delivered to a random partition +(filtered to partitions with available leaders only, if possible).
  • max_request_size: The maximum size of a request. This is also +effectively a cap on the maximum record size. Note that the server +has its own cap on record size which may be different from this. +This setting will limit the number of record batches the producer +will send in a single request to avoid sending huge requests. +Default: 1048576.
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • request_timeout_ms: Produce request timeout in milliseconds. +As it's sent as part of +:class:~kafka.protocol.produce.ProduceRequest (it's a blocking +call), maximum waiting time can be up to 2 * +request_timeout_ms. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • api_version: specify which kafka API version to use. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT. +Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more +information see :ref:ssl_auth. +Default: :data:None
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying :data:None will +disable idle checks. Default: 540000 (9 minutes).
  • enable_idempotence: When set to :data:True, the producer will +ensure that exactly one copy of each message is written in the +stream. If :data:False, producer retries due to broker failures, +etc., may write duplicates of the retried message in the stream. +Note that enabling idempotence acks to set to all. If it is not +explicitly set by the user it will be chosen. If incompatible +values are set, a :exc:ValueError will be thrown. +New in version 0.5.0.
  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values +are: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: :data:None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: :data:None
  • sasl_oauth_token_provider (: class:~aiokafka.abc.AbstractTokenProvider): +OAuthBearer token provider instance. (See +:mod:kafka.oauth.abstract). +Default: :data:None

Returns:

  • : A function returning the same function

Exceptions:

  • ValueError: when needed

run_in_background

def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]

Decorator to schedule a task to be run in the background.

This decorator is used to schedule a task to be run in the background when the app's _on_startup event is triggered.

Returns:

  • A decorator function that takes a background task as an input and stores it to be run in the backround.

using_local_kafka

def using_local_kafka(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> Tester

Starts local Kafka broker used by the Tester instance

Parameters:

  • data_dir: Path to the directory where the zookeepeer instance will save data
  • zookeeper_port: Port for clients (Kafka brokes) to connect
  • listener_port: Port on which the clients (producers and consumers) can connect

Returns:

  • An instance of tester with Kafka as broker

using_local_redpanda

def using_local_redpanda(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug') -> Tester

Starts local Redpanda broker used by the Tester instance

Parameters:

  • listener_port: Port on which the clients (producers and consumers) can connect
  • tag: Tag of Redpanda image to use to start container
  • seastar_core: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)
  • memory: The amount of memory to make available to Redpanda
  • mode: Mode to use to load configuration properties in container
  • default_log_level: Log levels to use for Redpanda

Returns:

  • An instance of tester with Redpanda as broker
+ + + + \ No newline at end of file diff --git a/docs/0.5.0/cli/fastkafka/index.html b/docs/0.5.0/cli/fastkafka/index.html new file mode 100644 index 0000000..19cc0d4 --- /dev/null +++ b/docs/0.5.0/cli/fastkafka/index.html @@ -0,0 +1,32 @@ + + + + + +fastkafka | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

fastkafka

Usage:

$ fastkafka [OPTIONS] COMMAND [ARGS]...

Options:

  • --install-completion: Install completion for the current shell.
  • --show-completion: Show completion for the current shell, to copy it or customize the installation.
  • --help: Show this message and exit.

Commands:

  • docs: Commands for managing fastkafka app...
  • run: Runs Fast Kafka API application
  • testing: Commands for managing fastkafka testing

fastkafka docs

Commands for managing fastkafka app documentation

Usage:

$ fastkafka docs [OPTIONS] COMMAND [ARGS]...

Options:

  • --help: Show this message and exit.

Commands:

  • generate: Generates documentation for a FastKafka...
  • install_deps: Installs dependencies for FastKafka...
  • serve: Generates and serves documentation for a...

fastkafka docs generate

Generates documentation for a FastKafka application

Usage:

$ fastkafka docs generate [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --root-path TEXT: root path under which documentation will be created [default: .]
  • --help: Show this message and exit.

fastkafka docs install_deps

Installs dependencies for FastKafka documentation generation

Usage:

$ fastkafka docs install_deps [OPTIONS]

Options:

  • --help: Show this message and exit.

fastkafka docs serve

Generates and serves documentation for a FastKafka application

Usage:

$ fastkafka docs serve [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --root-path TEXT: root path under which documentation will be created [default: .]
  • --bind TEXT: Some info [default: 127.0.0.1]
  • --port INTEGER: Some info [default: 8000]
  • --help: Show this message and exit.

fastkafka run

Runs Fast Kafka API application

Usage:

$ fastkafka run [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --num-workers INTEGER: Number of FastKafka instances to run, defaults to number of CPU cores. [default: 8]
  • --kafka-broker TEXT: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. [required]
  • --help: Show this message and exit.

fastkafka testing

Commands for managing fastkafka testing

Usage:

$ fastkafka testing [OPTIONS] COMMAND [ARGS]...

Options:

  • --help: Show this message and exit.

Commands:

  • install_deps: Installs dependencies for FastKafka app...

fastkafka testing install_deps

Installs dependencies for FastKafka app testing

Usage:

$ fastkafka testing install_deps [OPTIONS]

Options:

  • --help: Show this message and exit.
+ + + + \ No newline at end of file diff --git a/docs/0.5.0/cli/run_fastkafka_server_process/index.html b/docs/0.5.0/cli/run_fastkafka_server_process/index.html new file mode 100644 index 0000000..5478839 --- /dev/null +++ b/docs/0.5.0/cli/run_fastkafka_server_process/index.html @@ -0,0 +1,32 @@ + + + + + +run_fastkafka_server_process | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

run_fastkafka_server_process

Usage:

$ run_fastkafka_server_process [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --kafka-broker TEXT: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. [required]
  • --install-completion: Install completion for the current shell.
  • --show-completion: Show completion for the current shell, to copy it or customize the installation.
  • --help: Show this message and exit.
+ + + + \ No newline at end of file diff --git a/docs/0.5.0/guides/Guide_00_FastKafka_Demo/index.html b/docs/0.5.0/guides/Guide_00_FastKafka_Demo/index.html new file mode 100644 index 0000000..04a9788 --- /dev/null +++ b/docs/0.5.0/guides/Guide_00_FastKafka_Demo/index.html @@ -0,0 +1,122 @@ + + + + + +FastKafka tutorial | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

FastKafka tutorial

FastKafka is a powerful and easy-to-use Python +library for building asynchronous services that interact with Kafka +topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.

Install

FastKafka works on macOS, Linux, and most Unix-style operating systems. +You can install it with pip as usual:

pip install fastkafka
try:
import fastkafka
except:
! pip install fastkafka

Running in Colab

You can start this interactive tutorial in Google Colab by clicking the +button below:

Open In Colab

Writing server code

Here is an example python script using FastKafka that takes data from a +Kafka topic, makes a prediction using a predictive model, and outputs +the prediction to another Kafka topic.

Preparing the demo model

First we will prepare our model using the Iris dataset so that we can +demonstrate the preditions using FastKafka. The following call downloads +the dataset and trains the model.

We will wrap the model creation into a lifespan of our app so that the +model is created just before the app is started.

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines two message classes for use in a FastKafka +application:

  • The IrisInputData class is used to represent input data for a +predictive model. It has four fields of type +NonNegativeFloat, +which is a subclass of float that only allows non-negative floating +point values.

  • The IrisPrediction class is used to represent the output of the +predictive model. It has a single field species of type string +representing the predicted species.

These message classes will be used to parse and validate incoming data +in Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used for +generating the documentation only and it is not being checked by the +actual server.

Next, an object of the +FastKafka +class is initialized with the minimum set of arguments:

  • kafka_brokers: a dictionary used for generation of documentation
from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the IrisInputData message class. Specifying the +type of the single argument is instructing the Pydantic to use +IrisInputData.parse_raw() on the consumed message before passing it +to the user defined function on_input_data.

  • The @produces decorator is applied to the to_predictions function, +which specifies that this function should produce a message to the +“predictions" Kafka topic whenever it is called. The to_predictions +function takes a single integer argument species_class representing +one of three possible strign values predicted by the mdoel. It creates +a new IrisPrediction message using this value and then returns it. +The framework will call the IrisPrediction.json().encode("utf-8") +function on the returned value and produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Testing the service

The service can be tested using the +Tester +instances which internally starts Kafka broker and zookeeper.

Before running tests, we have to install Java runtime and Apache Kafka +locally. To simplify the process, we provide the following convenience +command:

fastkafka testing install_deps
[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
from fastkafka.testing import Tester

msg = IrisInputData(
sepal_length=0.1,
sepal_width=0.2,
petal_length=0.3,
petal_width=0.4,
)

# Start Tester app and create local Kafka broker for testing
async with Tester(kafka_app) as tester:
# Send IrisInputData message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with IrisPrediction in predictions topic
await tester.awaited_mocks.on_predictions.assert_awaited_with(
IrisPrediction(species="setosa"), timeout=2
)
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Recap

We have created a Iris classification model and encapulated it into our +fastkafka application. The app will consume the IrisInputData from the +input_data topic and produce the predictions to predictions topic.

To test the app we have:

  1. Created the app

  2. Started our Tester class which mirrors the developed app topics for +testing purpuoses

  3. Sent IrisInputData message to input_data topic

  4. Asserted and checked that the developed iris classification service +has reacted to IrisInputData message

Running the service

The service can be started using builtin faskafka run CLI command. +Before we can do that, we will concatenate the code snippets from above +and save them in a file "application.py"

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

To run the service, you will need a running Kafka broker on localhost as +specified in the kafka_brokers parameter above. We can start the Kafka +broker locally using the +ApacheKafkaBroker. +Notice that the same happens automatically in the +Tester +as shown above.

[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()
[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.

'127.0.0.1:9092'

Then, we start the FastKafka service by running the following command in +the folder where the application.py file is located:

fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app

In the above command, we use --num-workers option to specify how many +workers to launch and we use --kafka-broker option to specify which +kafka broker configuration to use from earlier specified kafka_brokers

[1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[1200654]: [ERROR] aiokafka: Unable to update metadata from [0]
[1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[1200656]: [ERROR] aiokafka: Unable to update metadata from [0]
^C
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...

You need to interupt running of the cell above by selecting +Runtime->Interupt execution on the toolbar above.

Finally, we can stop the local Kafka Broker:

[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.

Documentation

The kafka app comes with builtin documentation generation using +AsyncApi HTML generator.

When running in Colab, we need to update Node.js first:

We need to install all dependancies for the generator using the +following command line:

fastkafka docs install_deps
[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed

To generate the documentation programatically you just need to call the +folloving command:

fastkafka docs generate application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.

. This will generate the asyncapi folder in relative path where all +your documentation will be saved. You can check out the content of it +with:

ls -l asyncapi
total 8
drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs
drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec

In docs folder you will find the servable static html file of your +documentation. This can also be served using our fastkafka docs serve +CLI command (more on that in our guides).

In spec folder you will find a asyncapi.yml file containing the async +API specification of your application.

We can locally preview the generated documentation by running the +following command:

fastkafka docs serve application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.


Serving documentation on http://127.0.0.1:8000
^C
Interupting serving of documentation and cleaning up...

From the parameters passed to the application constructor, we get the +documentation bellow:

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
bootstrap_servers="localhost:9092",
)

Kafka_servers

The following documentation snippet are for the consumer as specified in +the code above:

Kafka_consumer

The following documentation snippet are for the producer as specified in +the code above:

Kafka_producer

Finally, all messages as defined as subclasses of BaseModel are +documented as well:

Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)

+ + + + \ No newline at end of file diff --git a/docs/0.5.0/guides/Guide_01_Intro/index.html b/docs/0.5.0/guides/Guide_01_Intro/index.html new file mode 100644 index 0000000..e0c9c79 --- /dev/null +++ b/docs/0.5.0/guides/Guide_01_Intro/index.html @@ -0,0 +1,51 @@ + + + + + +Intro | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

Intro

This tutorial will show you how to use FastKafkaAPI, step by +step.

The goal of FastKafkaAPI is to simplify the use of Apache Kafka in +Python inspired by FastAPI look and feel.

In this Intro tutorial we’ll go trough the basic requirements to run the +demos presented in future steps.

Installing FastKafkaAPI

First step is to install FastKafkaAPI

$ pip install fastkafka

Preparing a Kafka broker

Next step is to prepare the Kafka environment, our consumers and +producers will need some channel of communication.

!!! info "Hey, your first info!"

If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. 

To go through the tutorial, we recommend that you use dockerized Kafka +brokers, if you have Docker and docker-compose installed the setup +should take you no time (if we exclude the container download times).

!!! warning "Listen! This is important."

To be able to setup this configuration you need to have Docker and docker-compose installed

See here for more info on <a href = \"https://docs.docker.com/\" target=\"_blank\">Docker</a> and <a href = \"https://docs.docker.com/compose/install/\" target=\"_blank\">docker compose</a>

To setup the recommended environment, first, create a new folder wher +you want to save your demo files (e.g. fastkafka_demo). Inside the new +folder create a new YAML file named kafka_demo.yml and copy the +following configuration into it:

version: "3"
services:
zookeeper:
image: wurstmeister/zookeeper
hostname: zookeeper
container_name: zookeeper
networks:
- fastkafka-network
ports:
- "2181:2181"
- "22:22"
- "2888:2888"
- "3888:3888"
kafka:
image: wurstmeister/kafka
container_name: kafka
ports:
- "9093:9093"
environment:
HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d' ' -f 2"
KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093
KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093
KAFKA_INTER_BROKER_LISTENER_NAME: INTER
KAFKA_CREATE_TOPICS: "hello:1:1"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
- zookeeper
healthcheck:
test: [ "CMD", "kafka-topics.sh", "--list", "--zookeeper", "zookeeper:2181" ]
interval: 5s
timeout: 10s
retries: 5
networks:
- fastkafka-network
networks:
fastkafka-network:
name: "fastkafka-network"

This configuration will start a single instance of Zookeeper, single +instance of Kafka broker and create a ‘hello’ topic (quite enough for a +start). To start the configuration, run:

$ docker-compose -f kafka_demo.yaml up -d --wait

This will start the necessary containers and wait till they report that +they are Healthy. After the command finishes, you are good to go to try +out the FastKafkaAPI capabilities! 🎊

Running the code

After installing FastKafkaAPI and initialising the Kafka broker you can +proceed to the ‘First Steps’ part of the tutorial. There, you will write +your first Kafka client and producer apps, run them, and interact with +them.

You are highly encouraged to follow along the tutorials not just by +reading trough them but by implementing the code examples in your own +environment. This will not only help you remember the use cases better +but also, hopefully, demonstrate to you the ease of use of this library.

+ + + + \ No newline at end of file diff --git a/docs/0.5.0/guides/Guide_02_First_Steps/index.html b/docs/0.5.0/guides/Guide_02_First_Steps/index.html new file mode 100644 index 0000000..7d99e5c --- /dev/null +++ b/docs/0.5.0/guides/Guide_02_First_Steps/index.html @@ -0,0 +1,49 @@ + + + + + +First Steps | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

First Steps

Creating a simple Kafka consumer app

For our first demo we will create the simplest possible Kafka consumer +and run it using ‘fastkafka run’ command.

The consumer will:

  1. Connect to the Kafka Broker we setup in the Intro guide

  2. Listen to the hello topic

  3. Write any message received from the hello topic to stdout

To create the consumer, first, create a file named

hello_kafka_consumer.py and copy the following code to it:

from os import environ

from fastkafka import FastKafka
from pydantic import BaseModel, Field

kafka_server_url = environ["KAFKA_HOSTNAME"]
kafka_server_port = environ["KAFKA_PORT"]

kafka_brokers = {
"localhost": {
"description": "local development kafka",
"url": kafka_server_url,
"port": kafka_server_port
}
}

class HelloKafkaMsg(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_app = FastKafka(
kafka_brokers=kafka_brokers
)

@kafka_app.consumes()
async def on_hello(msg: HelloKafkaMsg):
print(f"Got data, msg={msg.msg}", flush=True)

!!! info "Kafka configuration"

This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').

!!! warning "Remember to flush"

Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.

To run this consumer, in your terminal, run:

fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app

After running the command, you should see something similar to the ouput +below:

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})
[878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization
[878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.

[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.

Now you can interact with your consumer, by sending the messages to the +subscribed ‘hello’ topic, don’t worry, we will cover this in the next +step of this guide.

Sending first message to your consumer

After we have created and run our first consumer, we should send a +message to it, to make sure it is working properly.

If you are using the Kafka setup as described in the Intro guide, you +can follow the steps listed here to send a message to the hello topic.

First, connect to your running kafka broker by running:

docker run -it kafka /bin/bash

Then, when connected to the container, run:

kafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello

This will open an interactive connection to the hello topic, now you can +write your mesages to the topic and they will be consumed by our +consumer.

In the shell, type:

{"msg":"hello"}

and press enter. This will send a hello message to the topic which will +be read by our running consumer and outputed to stdout.

Check the output of your consumer (terminal where you ran the ‘fastkafka +run’ command) and confirm that your consumer has read the Kafka message. +You shoud see something like this:

Got data, msg=hello

Creating a hello Kafka producer

Consuming messages is only a part of this Library functionality, the +other big part is producing the messages. So, let’s create our first +kafka producer which will send it’s greetings to our consumer +periodically.

The producer will:

  1. Connect to the Kafka Broker we setup in the Intro guide
  2. Connect to the hello topic
  3. Periodically send a message to the hello world topic

To create the producer, first, create a file named

hello_kafka_producer.py and copy the following code to it:

from os import environ

import asyncio
from pydantic import BaseModel, Field

from fastkafka import FastKafka
from fastkafka._components.logger import get_logger

kafka_server_url = environ["KAFKA_HOSTNAME"]
kafka_server_port = environ["KAFKA_PORT"]

kafka_brokers = {
"localhost": {
"description": "local development kafka",
"url": kafka_server_url,
"port": kafka_server_port
}
}

class HelloKafkaMsg(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_app = FastKafka(
kafka_brokers=kafka_brokers
)

logger = get_logger(__name__)

@kafka_app.produces()
async def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:
logger.info(f"Producing: {msg}")
return msg

@kafka_app.run_in_background()
async def hello_every_second():
while(True):
await to_hello(HelloKafkaMsg(msg="hello"))
await asyncio.sleep(1)

!!! info "Kafka configuration"

This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').

To run this producer, in your terminal, run:

fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app

After running the command, you should see something similar to the ouput +below:

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.

[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.

Now, while the producer is running, it will send a HelloKafkaMsg every +second to the hello kafka topic. If your consumer is still running, you +should see the messages appear in its log.

Recap

In this guide we have:

  1. Created a simple Kafka consumer using FastKafka
  2. Sent a message to our consumer trough Kafka
  3. Created a simple Kafka producer using FastKafka
+ + + + \ No newline at end of file diff --git a/docs/0.5.0/guides/Guide_03_Authentication/index.html b/docs/0.5.0/guides/Guide_03_Authentication/index.html new file mode 100644 index 0000000..b218f9f --- /dev/null +++ b/docs/0.5.0/guides/Guide_03_Authentication/index.html @@ -0,0 +1,37 @@ + + + + + +Authentication | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

Authentication

TLS Authentication

sasl_mechanism (str) – Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN, +GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN

sasl_plain_username (str) – username for SASL PLAIN authentication. +Default: None

sasl_plain_password (str) – password for SASL PLAIN authentication. +Default: None

sasl_oauth_token_provider (AbstractTokenProvider) – OAuthBearer token +provider instance. (See kafka.oauth.abstract). Default: None

+ + + + \ No newline at end of file diff --git a/docs/0.5.0/guides/Guide_04_Github_Actions_Workflow/index.html b/docs/0.5.0/guides/Guide_04_Github_Actions_Workflow/index.html new file mode 100644 index 0000000..3ee7fab --- /dev/null +++ b/docs/0.5.0/guides/Guide_04_Github_Actions_Workflow/index.html @@ -0,0 +1,42 @@ + + + + + +Deploy FastKafka docs to GitHub Pages | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

Deploy FastKafka docs to GitHub Pages

Getting started

Add your workflow file .github/workflows/fastkafka_docs_deploy.yml and +push it to your remote default branch.

Here is an example workflow:

name: Deploy FastKafka Generated Documentation to GitHub Pages

on:
push:
branches: [ "main", "master" ]
workflow_dispatch:

jobs:
deploy:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: airtai/workflows/fastkafka-ghp@main
with:
app: "test_fastkafka.application:kafka_app"

Options

Set app location

Input in the form of path:app, where path is the path to a Python +file and app is an object of type +FastKafka:

- name: Deploy
uses: airtai/workflows/fastkafka-ghp@main
with:
app: "test_fastkafka.application:kafka_app"

In the above example, +FastKafka +app is named as kafka_app and it is available in the application +submodule of the test_fastkafka module.

Example Repository

A +FastKafka-based +library that uses the above-mentioned workfow actions to publish +FastKafka docs to Github Pages can be found +here.

+ + + + \ No newline at end of file diff --git a/docs/0.5.0/guides/Guide_05_Lifespan_Handler/index.html b/docs/0.5.0/guides/Guide_05_Lifespan_Handler/index.html new file mode 100644 index 0000000..500f321 --- /dev/null +++ b/docs/0.5.0/guides/Guide_05_Lifespan_Handler/index.html @@ -0,0 +1,75 @@ + + + + + +Lifespan Events | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

Lifespan Events

Did you know that you can define some special code that runs before and +after your Kafka application? This code will be executed just once, but +it covers the whole lifespan of your app! 🚀

Lets break it down:

You can define logic (code) that should be executed before the +application starts up. This is like a warm-up for your app, getting it +ready to consume and produce messages.

Similarly, you can define logic (code) that should be executed when the +application is shutting down. This is like a cool-down for your app, +making sure everything is properly closed and cleaned up.

By executing code before consuming and after producing, you cover the +entire lifecycle of your application 🎉

This is super handy for setting up shared resources that are needed +across consumers and producers, like a database connection pool or a +machine learning model. And the best part? You can clean up these +resources when the app is shutting down!

So lets give it a try and see how it can make your Kafka app even more +awesome! 💪

Lifespan example - Iris prediction model

Let’s dive into an example to see how you can leverage the lifecycle +handler to solve a common use case. Imagine that you have some machine +learning models that need to consume incoming messages and produce +response/prediction messages. These models are shared among consumers +and producers, which means you don’t want to load them for every +message.

Here’s where the lifecycle handler comes to the rescue! By loading the +model before the messages are consumed and produced, but only right +before the application starts receiving messages, you can ensure that +the model is ready to use without compromising the performance of your +tests. In the upcoming sections, we’ll walk you through how to +initialize an Iris species prediction model and use it in your developed +application.

Lifespan

You can define this startup and shutdown logic using the lifespan +parameter of the FastKafka app, and an async context manager.

Let’s start with an example and then see it in detail.

We create an async function lifespan() with yield like this:

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
from contextlib import asynccontextmanager

from fastkafka import FastKafka

ml_models = {}

@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
print("Loading the model!")
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)
yield
# Clean up the ML models and release the resources

print("Exiting, clearing model dict!")
ml_models.clear()

The first thing to notice, is that we are defining an async function +with yield. This is very similar to Dependencies with yield.

The first part of the function, before the yield, will be executed +before the application starts. And the part after the yield will +be executed after the application has finished.

This lifespan will create an iris_prediction model on application +startup and cleanup the references after the app is shutdown.

The lifespan will be passed an KafkaApp reference on startup of your +application, which you can use to reference your application on startup.

For demonstration sake, we also added prints so that when running the +app we can see that our lifespan was called.

Async context manager

Context managers can be used in with blocks, our lifespan, for example +could be used like this:

ml_models = {}
async with lifespan(None):
print(ml_models)

When you create a context manager or an async context manager, what it +does is that, before entering the with block, it will execute the code +before the yield, and after exiting the with block, it will execute +the code after the yield.

If you want to learn more about context managers and contextlib +decorators, please visit Python official +docs

App demo

FastKafka app

Lets now create our application using the created lifespan handler.

Notice how we passed our lifespan handler to the app when constructing +it trough the lifespan argument.

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local development kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Data modeling

Lets model the Iris data for our app:

from pydantic import BaseModel, Field, NonNegativeFloat

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Consumers and producers

Lets create a consumer and producer for our app that will generate +predictions from input iris data.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Final app

The final app looks like this:

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
from contextlib import asynccontextmanager

from pydantic import BaseModel, Field, NonNegativeFloat

from fastkafka import FastKafka

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")
ml_models = {}

@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
print("Loading the model!")
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)
yield
# Clean up the ML models and release the resources

print("Exiting, clearing model dict!")
ml_models.clear()

kafka_brokers = {
"localhost": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local development kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Running the app

Now we can run the app with your custom lifespan handler. Copy the code +above in lifespan_example.py and run it by running

fastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app

When you run the app, you should see a simmilar output to the one below:

[262292]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[262292]: Loading the model!
[262292]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Entering...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting send_stream
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Finished.
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[262292]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[262292]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[262292]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[262292]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 262292...
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Entering...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Exiting send_stream
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Finished.
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Stoping producer...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Finished
[262292]: Exiting, clearing model dict!
[INFO] fastkafka._server: terminate_asyncio_process(): Process 262292 terminated.

Recap

In this guide we have defined a lifespan handler and passed to our +FastKafka app.

Some important points are:

  1. Lifespan handler is implemented as +AsyncContextManager
  2. Code before yield in lifespan will be executed before +application startup
  3. Code after yield in lifespan will be executed after +application shutdown
  4. You can pass your lifespan handler to FastKafka app on +initialisation by passing a lifespan argument
+ + + + \ No newline at end of file diff --git a/docs/0.5.0/guides/Guide_06_Benchmarking_FastKafka/index.html b/docs/0.5.0/guides/Guide_06_Benchmarking_FastKafka/index.html new file mode 100644 index 0000000..da171c3 --- /dev/null +++ b/docs/0.5.0/guides/Guide_06_Benchmarking_FastKafka/index.html @@ -0,0 +1,80 @@ + + + + + +Benchmarking FastKafka app | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

Benchmarking FastKafka app

Prerequisites

To benchmark a +FastKafka +project, you will need the following:

  1. A library built with +FastKafka.
  2. A running Kafka instance to benchmark the FastKafka application +against.

Creating FastKafka Code

Let’s create a +FastKafka-based +application and write it to the application.py file based on the +tutorial.

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

FastKafka +has a decorator for benchmarking which is appropriately called as +benchmark. Let’s edit our application.py file and add the +benchmark decorator to the consumes method.

# content of the "application.py" file with benchmark

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
@kafka_app.benchmark(interval=1, sliding_window_size=5)
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Here we are conducting a benchmark of a function that consumes data from +the input_data topic with an interval of 1 second and a sliding window +size of 5.

This benchmark method uses the interval parameter to calculate the +results over a specific time period, and the sliding_window_size +parameter to determine the maximum number of results to use in +calculating the average throughput and standard deviation.

This benchmark is important to ensure that the function is performing +optimally and to identify any areas for improvement.

Starting Kafka

If you already have a Kafka running somewhere, then you can skip this +step.

Please keep in mind that your benchmarking results may be affected by +bottlenecks such as network, CPU cores in the Kafka machine, or even the +Kafka configuration itself.

Installing Java and Kafka

We need a working Kafkainstance to benchmark our +FastKafka +app, and to run Kafka we need Java. Thankfully, +FastKafka +comes with a CLI to install both Java and Kafka on our machine.

So, let’s install Java and Kafka by executing the following command.

fastkafka testing install_deps

The above command will extract Kafka scripts at the location +“\$HOME/.local/kafka_2.13-3.3.2" on your machine.

Creating configuration for Zookeeper and Kafka

Now we need to start Zookeeper and Kafka separately, and to start +them we need zookeeper.properties and kafka.properties files.

Let’s create a folder inside the folder where Kafka scripts were +extracted and change directory into it.

mkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir

Let’s create a file called zookeeper.properties and write the +following content to the file:

dataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper
clientPort=2181
maxClientCnxns=0

Similarly, let’s create a file called kafka.properties and write the +following content to the file:

broker.id=0
listeners=PLAINTEXT://:9092

num.network.threads=3
num.io.threads=8
socket.send.buffer.bytes=102400
socket.receive.buffer.bytes=102400
socket.request.max.bytes=104857600

num.partitions=1
num.recovery.threads.per.data.dir=1
offsets.topic.replication.factor=1
transaction.state.log.replication.factor=1
transaction.state.log.min.isr=1

log.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs
log.flush.interval.messages=10000
log.flush.interval.ms=1000
log.retention.hours=168
log.retention.bytes=1073741824
log.segment.bytes=1073741824
log.retention.check.interval.ms=300000

zookeeper.connect=localhost:2181
zookeeper.connection.timeout.ms=18000

Starting Zookeeper and Kafka

We need two different terminals to run Zookeeper in one and Kafka in +another. Let’s open a new terminal and run the following commands to +start Zookeeper:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./zookeeper-server-start.sh ../data_dir/zookeeper.properties

Once Zookeeper is up and running, open a new terminal and execute the +follwing commands to start Kafka:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./kafka-server-start.sh ../data_dir/kafka.properties

Now we have both Zookeeper and Kafka up and running.

Creating topics in Kafka

In a new terminal, please execute the following command to create +necessary topics in Kafka:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092
./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092

Populating topics with dummy data

To benchmark our +FastKafka +app, we need some data in Kafka topics.

In the same terminal, let’s create some dummy data:

yes '{"sepal_length": 0.7739560486, "sepal_width": 0.8636615789, "petal_length": 0.6122663046, "petal_width": 0.1338914722}' | head -n 1000000 > /tmp/test_data

This command will create a file called test_data in the tmp folder +with one million rows of text. This will act as dummy data to populate +the input_data topic.

Let’s populate the created topic input_data with the dummy data which +we created above:

./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data

Now our topic input_data has one million records/messages in it. If +you want more messages in topic, you can simply execute the above +command again and again.

Benchmarking FastKafka

Once Zookeeper and Kafka are ready, benchmarking +FastKafka +app is as simple as running the fastkafka run command:

fastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app

This command will start the +FastKafka +app and begin consuming messages from Kafka, which we spun up earlier. +Additionally, the same command will output all of the benchmark +throughputs based on the interval and sliding_window_size values.

The output for the fastkafka run command is:

[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh
ost:9092', 'max_poll_records': 100}
[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc
[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin
[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1
[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition
=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)

Based on the output, when using 1 worker, our +FastKafka +app achieved a throughput of 93k messages per second and an +average throughput of 93k messages per second.

+ + + + \ No newline at end of file diff --git a/docs/0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html b/docs/0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html new file mode 100644 index 0000000..7237c6e --- /dev/null +++ b/docs/0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html @@ -0,0 +1,150 @@ + + + + + +Encoding and Decoding Kafka Messages with FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

Encoding and Decoding Kafka Messages with FastKafka

Prerequisites

  1. A basic knowledge of +FastKafka +is needed to proceed with this guide. If you are not familiar with +FastKafka, +please go through the tutorial first.
  2. FastKafka +with its dependencies installed is needed. Please install +FastKafka +using the command - pip install fastkafka

Ways to Encode and Decode Messages with FastKafka

In python, by default, we send Kafka messages as bytes. Even if our +message is a string, we convert it to bytes and then send it to Kafka +topic. imilarly, while consuming messages, we consume them as bytes and +then convert them to strings.

In FastKafka, we specify message schema using Pydantic models as +mentioned in tutorial:

# Define Pydantic models for Kafka messages
from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Then, we send and receive messages as instances of Pydantic models which +we defined. So, FastKafka needs a way to encode/decode to these Pydantic +model messages to bytes in order to send/receive messages to/from Kafka +topics.

The @consumes and @produces methods of FastKafka accept a parameter +called decoder/encoder to decode/encode Kafka messages. FastKafka +provides three ways to encode and decode messages:

  1. json - This is the default encoder/decoder option in FastKafka. +While producing, this option converts our instance of Pydantic model +messages to a JSON string and then converts it to bytes before +sending it to the topic. While consuming, it converts bytes to a +JSON string and then constructs an instance of Pydantic model from +the JSON string.
  2. avro - This option uses Avro encoding/decoding to convert instances +of Pydantic model messages to bytes while producing, and while +consuming, it constructs an instance of Pydantic model from bytes.
  3. custom encoder/decoder - If you are not happy with the json or avro +encoder/decoder options, you can write your own encoder/decoder +functions and use them to encode/decode Pydantic messages.

1. Json encoder and decoder

The default option in FastKafka is json encoder/decoder. This option, +while producing, converts our instance of pydantic model messages to +json string and then converts to bytes before sending it to the topics. +While consuming it converts bytes to json string and then constructs +instance of pydantic model from json string.

We can use the application from tutorial as +is, and it will use the json encoder/decoder by default. But, for +clarity, let’s modify it to explicitly accept the ‘json’ encoder/decoder +parameter:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder="json")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder="json")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

In the above code, the @kafka_app.consumes decorator sets up a +consumer for the “input_data" topic, using the ‘json’ decoder to convert +the message payload to an instance of IrisInputData. The +@kafka_app.produces decorator sets up a producer for the “predictions" +topic, using the ‘json’ encoder to convert the instance of +IrisPrediction to message payload.

2. Avro encoder and decoder

What is Avro?

Avro is a row-oriented remote procedure call and data serialization +framework developed within Apache’s Hadoop project. It uses JSON for +defining data types and protocols, and serializes data in a compact +binary format. To learn more about the Apache Avro, please check out the +docs.

Installing FastKafka with Avro dependencies

FastKafka +with dependencies for Apache Avro installed is needed to use avro +encoder/decoder. Please install +FastKafka +with Avro support using the command - pip install fastkafka[avro]

Defining Avro Schema Using Pydantic Models

By default, you can use Pydantic model to define your message schemas. +FastKafka internally takes care of encoding and decoding avro messages, +based on the Pydantic models.

So, similar to the tutorial, the message schema will +remain as it is.

# Define Pydantic models for Avro messages
from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

No need to change anything to support avro. You can use existing +Pydantic models as is.

Reusing existing avro schema

If you are using some other library to send and receive avro encoded +messages, it is highly likely that you already have an Avro schema +defined.

Building pydantic models from avro schema dictionary

Let’s modify the above example and let’s assume we have schemas already +for IrisInputData and IrisPrediction which will look like below:

iris_input_data_schema = {
"type": "record",
"namespace": "IrisInputData",
"name": "IrisInputData",
"fields": [
{"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},
{"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},
{"doc": "Petal length in cm", "type": "double", "name": "petal_length"},
{"doc": "Petal width in cm", "type": "double", "name": "petal_width"},
],
}
iris_prediction_schema = {
"type": "record",
"namespace": "IrisPrediction",
"name": "IrisPrediction",
"fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],
}

We can easily construct pydantic models from avro schema using +avsc_to_pydantic +function which is included as part of +FastKafka +itself.

from fastkafka._components.encoder.avro import avsc_to_pydantic

IrisInputData = avsc_to_pydantic(iris_input_data_schema)
print(IrisInputData.__fields__)

IrisPrediction = avsc_to_pydantic(iris_prediction_schema)
print(IrisPrediction.__fields__)

The above code will convert avro schema to pydantic models and will +print pydantic models’ fields. The output of the above is:

{'sepal_length': ModelField(name='sepal_length', type=float, required=True),
'sepal_width': ModelField(name='sepal_width', type=float, required=True),
'petal_length': ModelField(name='petal_length', type=float, required=True),
'petal_width': ModelField(name='petal_width', type=float, required=True)}

{'species': ModelField(name='species', type=str, required=True)}

This is exactly same as manually defining the pydantic models ourselves. +You don’t have to worry about not making any mistakes while converting +avro schema to pydantic models manually. You can easily and +automatically accomplish it by using +avsc_to_pydantic +function as demonstrated above.

Building pydantic models from .avsc file

Not all cases will have avro schema conveniently defined as a python +dictionary. You may have it stored as the proprietary .avsc files in +filesystem. Let’s see how to convert those .avsc files to pydantic +models.

Let’s assume our avro files are stored in files called +iris_input_data_schema.avsc and iris_prediction_schema.avsc. In that +case, following code converts the schema to pydantic models:

import json
from fastkafka._components.encoder.avro import avsc_to_pydantic


with open("iris_input_data_schema.avsc", "rb") as f:
iris_input_data_schema = json.load(f)

with open("iris_prediction_schema.avsc", "rb") as f:
iris_prediction_schema = json.load(f)


IrisInputData = avsc_to_pydantic(iris_input_data_schema)
print(IrisInputData.__fields__)

IrisPrediction = avsc_to_pydantic(iris_prediction_schema)
print(IrisPrediction.__fields__)

Consume/Produce avro messages with FastKafka

FastKafka +provides @consumes and @produces methods to consume/produces +messages to/from a Kafka topic. This is explained in +tutorial.

The @consumes and @produces methods accepts a parameter called +decoder/encoder to decode/encode avro messages.

@kafka_app.consumes(topic="input_data", encoder="avro")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", decoder="avro")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

In the above example, in @consumes and @produces methods, we +explicitly instruct FastKafka to decode and encode messages using +the avro decoder/encoder instead of the default json +decoder/encoder.

Assembling it all together

Let’s rewrite the sample code found in +tutorial to use avro to decode and +encode messages:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


iris_input_data_schema = {
"type": "record",
"namespace": "IrisInputData",
"name": "IrisInputData",
"fields": [
{"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},
{"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},
{"doc": "Petal length in cm", "type": "double", "name": "petal_length"},
{"doc": "Petal width in cm", "type": "double", "name": "petal_width"},
],
}
iris_prediction_schema = {
"type": "record",
"namespace": "IrisPrediction",
"name": "IrisPrediction",
"fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],
}
# Or load schema from avsc files

from fastkafka._components.encoder.avro import avsc_to_pydantic

IrisInputData = avsc_to_pydantic(iris_input_data_schema)
IrisPrediction = avsc_to_pydantic(iris_prediction_schema)


from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder="avro")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder="avro")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

The above code is a sample implementation of using FastKafka to consume +and produce Avro-encoded messages from/to a Kafka topic. The code +defines two Avro schemas for the input data and the prediction result. +It then uses the +avsc_to_pydantic +function from the FastKafka library to convert the Avro schema into +Pydantic models, which will be used to decode and encode Avro messages.

The +FastKafka +class is then instantiated with the broker details, and two functions +decorated with @kafka_app.consumes and @kafka_app.produces are +defined to consume messages from the “input_data" topic and produce +messages to the “predictions" topic, respectively. The functions uses +the decoder=“avro" and encoder=“avro" parameters to decode and encode +the Avro messages.

In summary, the above code demonstrates a straightforward way to use +Avro-encoded messages with FastKafka to build a message processing +pipeline.

3. Custom encoder and decoder

If you are not happy with the json or avro encoder/decoder options, you +can write your own encoder/decoder functions and use them to +encode/decode Pydantic messages.

Writing a custom encoder and decoder

In this section, let’s see how to write a custom encoder and decoder +which obfuscates kafka message with simple +ROT13 cipher.

import codecs
import json


def custom_encoder(msg: BaseModel) -> bytes:
msg_str = msg.json()
obfuscated = codecs.encode(msg_str, 'rot13')
raw_bytes = obfuscated.encode("utf-8")
return raw_bytes

def custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:
obfuscated = raw_msg.decode("utf-8")
msg_str = codecs.decode(obfuscated, 'rot13')
msg_dict = json.loads(msg_str)
return cls(**msg_dict)

The above code defines two custom functions for encoding and decoding +messages in a Kafka application using the FastKafka library.

The encoding function, custom_encoder(), takes a message msg which +is an instance of a Pydantic model, converts it to a JSON string using +the json() method, obfuscates the resulting string using the ROT13 +algorithm from the codecs module, and finally encodes the obfuscated +string as raw bytes using the UTF-8 encoding.

The decoding function, custom_decoder(), takes a raw message raw_msg +in bytes format, a Pydantic class to construct instance with cls +parameter. It first decodes the raw message from UTF-8 encoding, then +uses the ROT13 algorithm to de-obfuscate the string. Finally, it loads +the resulting JSON string using the json.loads() method and returns a +new instance of the specified cls class initialized with the decoded +dictionary.

These functions can be used with FastKafka’s encoder and decoder +parameters to customize the serialization and deserialization of +messages in Kafka topics.

Let’s test the above code

i = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)

encoded = custom_encoder(i)
display(encoded)

decoded = custom_decoder(encoded, IrisInputData)
display(decoded)

This will result in following output

b'{"frcny_yratgu": 0.5, "frcny_jvqgu": 0.5, "crgny_yratgu": 0.5, "crgny_jvqgu": 0.5}'

IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)

Assembling it all together

Let’s rewrite the sample code found in +tutorial to use our custom decoder and +encoder functions:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")


import codecs
import json


def custom_encoder(msg: BaseModel) -> bytes:
msg_str = msg.json()
obfuscated = codecs.encode(msg_str, 'rot13')
raw_bytes = obfuscated.encode("utf-8")
return raw_bytes

def custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:
obfuscated = raw_msg.decode("utf-8")
msg_str = codecs.decode(obfuscated, 'rot13')
msg_dict = json.loads(msg_str)
return cls(**msg_dict)


from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder=custom_decoder)
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder=custom_encoder)
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

This code defines a custom encoder and decoder functions for encoding +and decoding messages sent through a Kafka messaging system.

The custom encoder function takes a message represented as a +BaseModel and encodes it as bytes by first converting it to a JSON +string and then obfuscating it using the ROT13 encoding. The obfuscated +message is then converted to bytes using UTF-8 encoding and returned.

The custom decoder function takes in the bytes representing an +obfuscated message, decodes it using UTF-8 encoding, then decodes the +ROT13 obfuscation, and finally loads it as a dictionary using the json +module. This dictionary is then converted to a BaseModel instance +using the cls parameter.

+ + + + \ No newline at end of file diff --git a/docs/0.5.0/guides/Guide_11_Consumes_Basics/index.html b/docs/0.5.0/guides/Guide_11_Consumes_Basics/index.html new file mode 100644 index 0000000..5e53f88 --- /dev/null +++ b/docs/0.5.0/guides/Guide_11_Consumes_Basics/index.html @@ -0,0 +1,61 @@ + + + + + +@consumes basics | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

@consumes basics

You can use @consumes decorator to consume messages from Kafka topics.

In this guide we will create a simple FastKafka app that will consume +HelloWorld messages from hello_world topic.

Import FastKafka

To use the @consumes decorator, first we need to import the base +FastKafka app to create our application.

from fastkafka import FastKafka

Define the structure of the messages

Next, you need to define the structure of the messages you want to +consume from the topic using pydantic. For +the guide we’ll stick to something basic, but you are free to define any +complex message structure you wish in your project, just make sure it +can be JSON encoded.

Let’s import BaseModel and Field from pydantic and create a simple +HelloWorld class containing one string parameter msg

from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

Create a base FastKafka app

Now we will create and define a base FastKafka app, replace the +<url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values of your +Kafka bootstrap server



kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

Create a consumer function and decorate it with @consumes

Let’s create a consumer function that will consume HelloWorld messages +from hello_world topic and log them.


from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

The function decorated with the @consumes decorator will be called +when a message is produced to Kafka.

The message will then be injected into the typed msg argument of the +function and its type will be used to parse the message.

In this example case, when the message is sent into a hello_world +topic, it will be parsed into a HelloWorld class and on_hello_world +function will be called with the parsed class as msg argument value.

Final app

Your app code should look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)


kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Run the app

Now we can run the app. Copy the code above in consumer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app

After running the command, you should see this output in your terminal:

[382372]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[382372]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})
[382372]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}
[382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[382372]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization
[382372]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 0}.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 382372...
[382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[382372]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._server: terminate_asyncio_process(): Process 382372 terminated.

Send the message to kafka topic

Lets send a HelloWorld message to the hello_world topic and check if +our consumer kafka application has logged the received message. In your +terminal, run:

echo {\"msg\": \"Hello world\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>

You should see the “Got msg: msg='Hello world'" being logged by your +consumer.

Choosing a topic

You probably noticed that you didn’t define which topic you are +receiving the message from, this is because the @consumes decorator +determines the topic by default from your function name. The decorator +will take your function name and strip the default “on_" prefix from it +and use the rest as the topic name. In this example case, the topic is +hello_world.

You can choose your custom prefix by defining the prefix parameter in +consumes decorator, like this:


from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

@app.consumes(prefix="read_from_")
async def read_from_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Also, you can define the topic name completely by defining the topic +in parameter in consumes decorator, like this:


from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

@app.consumes(topic="my_special_topic")
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Message data

The message received from kafka is translated from binary JSON +representation int the class defined by typing of msg parameter in the +function decorated by the @consumes decorator.

In this example case, the message will be parsed into a HelloWorld +class.

+ + + + \ No newline at end of file diff --git a/docs/0.5.0/guides/Guide_21_Produces_Basics/index.html b/docs/0.5.0/guides/Guide_21_Produces_Basics/index.html new file mode 100644 index 0000000..c9271c1 --- /dev/null +++ b/docs/0.5.0/guides/Guide_21_Produces_Basics/index.html @@ -0,0 +1,62 @@ + + + + + +@produces basics | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

@produces basics

You can use @produces decorator to produce messages to Kafka topics.

In this guide we will create a simple FastKafka app that will produce +hello world messages to hello_world topic.

Import FastKafka

To use the @produces decorator, frist we need to import the base +FastKafka app to create our application.

from fastkafka import FastKafka

Define the structure of the messages

Next, you need to define the structure of the messages you want to send +to the topic using pydantic. For the guide +we’ll stick to something basic, but you are free to define any complex +message structure you wish in your project, just make sure it can be +JSON encoded.

Let’s import BaseModel and Field from pydantic and create a simple +HelloWorld class containing one string parameter msg

from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

Create a base FastKafka app

Now we will create and define a base FastKafka app, replace the +<url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values of your +Kafka bootstrap server



kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

Create a producer function and decorate it with @produces

Let’s create a producer function that will produce HelloWorld messages +to hello_world topic:


@app.produces()
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Now you can call your defined function as any normal python function in +your code. The side effect of calling the function will be that the +value you are returning will also be sent to a kafka topic.

By default, the topic is determined from your function name, the “to_" +prefix is stripped and what is left over is used as a topic name. I this +case, that is hello_world.

Instruct the app to start sending HelloWorld messages

Let’s use @run_in_background decorator to instruct our app to send +HelloWorld messages to hello_world topic every second.


import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Final app

Your app code should look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)


kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.produces()
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Run the app

script_file = "producer_example.py"
cmd = "fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app"
md(
f"Now we can run the app. Copy the code above in producer_example.py and run it by running\n```shell\n{cmd}\n```"
)

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app

After running the command, you should see this output in your terminal:

[84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization
[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.

Check if the message was sent to the Kafka topic

Lets check the topic and see if there is a “Hello world!" message in the +hello_world topic. In your terminal run:

kafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>

You should see the {“msg": “Hello world!"} messages in your topic.

Choosing a topic

You probably noticed that you didn’t define which topic you are sending +the message to, this is because the @produces decorator determines the +topic by default from your function name. The decorator will take your +function name and strip the default “to_" prefix from it and use the +rest as the topic name. In this example case, the topic is +hello_world.

!!! warn "New topics"

Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.

You can choose your custom prefix by defining the prefix parameter in +produces decorator, like this:


@app.produces(prefix="send_to_")
async def send_to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Also, you can define the topic name completely by defining the topic +in parameter in produces decorator, like this:


@app.produces(topic="my_special_topic")
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Message data

The return value from your function will be translated JSON string and +then to bytes and sent to defined Kafka topic. The typing of the return +value is used for generating the documentation for your Kafka app.

In this example case, the return value is HelloWorld class which will be +translated into JSON formatted string and then to bytes. The translated +data will then be sent to Kafka. In the from of: +b'{"msg": "Hello world!"}'

+ + + + \ No newline at end of file diff --git a/docs/0.5.0/guides/Guide_22_Partition_Keys/index.html b/docs/0.5.0/guides/Guide_22_Partition_Keys/index.html new file mode 100644 index 0000000..d6d5241 --- /dev/null +++ b/docs/0.5.0/guides/Guide_22_Partition_Keys/index.html @@ -0,0 +1,55 @@ + + + + + +Defining a partition key | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

Defining a partition key

Partition keys are used in Apache Kafka to determine which partition a +message should be written to. This ensures that related messages are +kept together in the same partition, which can be useful for ensuring +order or for grouping related messages together for efficient +processing. Additionally, partitioning data across multiple partitions +allows Kafka to distribute load across multiple brokers and scale +horizontally, while replicating data across multiple brokers provides +fault tolerance.

You can define your partition keys when using the @produces decorator, +this guide will demonstrate to you this feature.

Return a key from the producing function

To define a key for the message that you want to produce to Kafka topic, +you need to wrap the response into +KafkaEvent +class and set the key value. Check the example below:


from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:
return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")

In the example, we want to return the HelloWorld message class with +the key defined as my_key. So, we wrap the message and key into a +KafkaEvent class and return it as such.

While generating the documentation, the +KafkaEvent +class will be unwrapped and the HelloWorld class will be documented in +the definition of message type, same way if you didn’t use the key.

!!! info "Which key to choose?"

Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.

App example

We will modify the app example from @producer basics guide to return +the HelloWorld with our key. The final app will look like this (make +sure you replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:
return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")

import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Run the app

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app

After running the command, you should see this output in your terminal:

[347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.

Check if the message was sent to the Kafka topic with the desired key

Lets check the topic and see if there is a “Hello world!" message in the +hello_world topic with the defined key. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the my_key {“msg": “Hello world!"} messages in your +topic appearing, the my_key part of the message is the key that we +defined in our producing function.

+ + + + \ No newline at end of file diff --git a/docs/0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html b/docs/0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html new file mode 100644 index 0000000..ca116f7 --- /dev/null +++ b/docs/0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html @@ -0,0 +1,73 @@ + + + + + +Deploying FastKafka using Docker | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

Deploying FastKafka using Docker

Building a Docker Image

To build a Docker image for a FastKafka project, we need the following +items:

  1. A library that is built using FastKafka.
  2. A file in which the requirements are specified. This could be a +requirements.txt file, a setup.py file, or even a wheel file.
  3. A Dockerfile to build an image that will include the two files +mentioned above.

Creating FastKafka Code

Let’s create a +FastKafka-based +application and write it to the application.py file based on the +tutorial.

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Creating requirements.txt file

The above code only requires fastkafka. So, we will add only +fastkafka to the requirements.txt file, but you can add additional +requirements to it as well.

fastkafka>=0.3.0

Here we are using requirements.txt to store the project’s +dependencies. However, other methods like setup.py, pipenv, and +wheel files can also be used. setup.py is commonly used for +packaging and distributing Python modules, while pipenv is a tool used +for managing virtual environments and package dependencies. wheel +files are built distributions of Python packages that can be installed +with pip.

Creating Dockerfile

# (1)
FROM python:3.9-slim-bullseye
# (2)
WORKDIR /project
# (3)
COPY application.py requirements.txt /project/
# (4)
RUN pip install --no-cache-dir --upgrade -r /project/requirements.txt
# (5)
CMD ["fastkafka", "run", "--num-workers", "2", "--kafka-broker", "production", "application:kafka_app"]
  1. Start from the official Python base image.

  2. Set the current working directory to /project.

    This is where we’ll put the requirements.txt file and the +application.py file.

  3. Copy the application.py file and requirements.txt file inside +the /project directory.

  4. Install the package dependencies in the requirements file.

    The --no-cache-dir option tells pip to not save the downloaded +packages locally, as that is only if pip was going to be run again +to install the same packages, but that’s not the case when working +with containers.

    The --upgrade option tells pip to upgrade the packages if they +are already installed.

  5. Set the command to run the fastkafka run command.

    CMD takes a list of strings, each of these strings is what you +would type in the command line separated by spaces.

    This command will be run from the current working directory, the +same /project directory you set above with WORKDIR /project.

    We supply additional parameters --num-workers and --kafka-broker +for the run command. Finally, we specify the location of our +fastkafka application location as a command argument.

    To learn more about fastkafka run command please check the CLI +docs.

Build the Docker Image

Now that all the files are in place, let’s build the container image.

  1. Go to the project directory (where your Dockerfile is, containing +your application.py file).

  2. Run the following command to build the image:

    docker build -t fastkafka_project_image .

    This command will create a docker image with the name +fastkafka_project_image and the latest tag.

That’s it! You have now built a docker image for your FastKafka project.

Start the Docker Container

Run a container based on the built image:

docker run -d --name fastkafka_project_container fastkafka_project_image

Additional Security

Trivy is an open-source tool that scans Docker images for +vulnerabilities. It can be integrated into your CI/CD pipeline to ensure +that your images are secure and free from known vulnerabilities. Here’s +how you can use trivy to scan your fastkafka_project_image:

  1. Install trivy on your local machine by following the instructions +provided in the official trivy +documentation.

  2. Run the following command to scan your fastkafka_project_image:

    trivy image fastkafka_project_image

    This command will scan your fastkafka_project_image for any +vulnerabilities and provide you with a report of its findings.

  3. Fix any vulnerabilities identified by trivy. You can do this by +updating the vulnerable package to a more secure version or by using +a different package altogether.

  4. Rebuild your fastkafka_project_image and repeat steps 2 and 3 +until trivy reports no vulnerabilities.

By using trivy to scan your Docker images, you can ensure that your +containers are secure and free from known vulnerabilities.

Example repo

A +FastKafka +based library which uses above mentioned Dockerfile to build a docker +image can be found +here

+ + + + \ No newline at end of file diff --git a/docs/0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html b/docs/0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html new file mode 100644 index 0000000..81fe59d --- /dev/null +++ b/docs/0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html @@ -0,0 +1,143 @@ + + + + + +Using Redpanda to test FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

Using Redpanda to test FastKafka

What is FastKafka?

FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.

What is Redpanda?

Redpanda is a drop-in replacement for Kafka. Most of the Kafka tools +work out of the box with Redpanda.

From redpanda.com:

Redpanda is a Kafka®-compatible streaming data platform that is proven +to be 10x faster and 6x lower in total costs. It is also JVM-free, +ZooKeeper®-free, Jepsen-tested and source available.

Some of the advantages of Redpanda over Kafka are

  1. A single binary with built-in everything, no ZooKeeper® or JVM +needed.
  2. Costs upto 6X less than Kafka.
  3. Up to 10x lower average latencies and up to 6x faster Kafka +transactions without compromising correctness.

To learn more about Redpanda, please visit their +website or checkout this blog +post +comparing Redpanda and Kafka’s performance benchmarks.

Example repo

A sample fastkafka-based library that uses Redpanda for testing, based +on this guide, can be found +here.

The process

Here are the steps we’ll be walking through to build our example:

  1. Set up the prerequisites.
  2. Clone the example repo.
  3. Explain how to write an application using FastKafka.
  4. Explain how to write a test case to test FastKafka with Redpanda.
  5. Run the test case and produce/consume messages.

1. Prerequisites

Before starting, make sure you have the following prerequisites set up:

  1. Python 3.x: A Python 3.x installation is required to run +FastKafka. You can download the latest version of Python from the +official website. You’ll also +need to have pip installed and updated, which is Python’s package +installer.
  2. Docker Desktop: Docker is used to run Redpanda, which is +required for testing FastKafka. You can download and install Docker +Desktop from the official +website.
  3. Git: You’ll need to have Git installed to clone the example +repo. You can download Git from the official +website.

2. Cloning and setting up the example repo

To get started with the example code, clone the GitHub +repository by +running the following command in your terminal:

git clone https://github.com/airtai/sample_fastkafka_with_redpanda.git
cd sample_fastkafka_with_redpanda

This will create a new directory called sample_fastkafka_with_redpanda +and download all the necessary files.

Create a virtual environment

Before writing any code, let’s create a new virtual +environment +for our project.

A virtual environment is an isolated environment for a Python project, +which allows you to manage project-specific dependencies and avoid +conflicts between different projects.

To create a new virtual environment, run the following commands in your +terminal:

python3 -m venv venv

This will create a new directory called venv in your project +directory, which will contain the virtual environment.

To activate the virtual environment, run the following command:

source venv/bin/activate

This will change your shell’s prompt to indicate that you are now +working inside the virtual environment.

Finally, run the following command to upgrade pip, the Python package +installer:

pip install --upgrade pip

Install Python dependencies

Next, let’s install the required Python dependencies. In this guide, +we’ll be using +FastKafka +to write our application code and pytest and pytest-asyncio to test +it.

You can install the dependencies from the requirements.txt file +provided in the cloned repository by running:

pip install -r requirements.txt

This will install all the required packages and their dependencies.

3. Writing server code

The application.py file in the cloned repository demonstrates how to +use FastKafka to consume messages from a Kafka topic, make predictions +using a predictive model, and publish the predictions to another Kafka +topic. Here is an explanation of the code:

Preparing the demo model

First we will prepare our model using the Iris dataset so that we can +demonstrate the predictions using FastKafka. The following call +downloads the dataset and trains the model.

We will wrap the model creation into a lifespan of our app so that the +model is created just before the app is started.

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines two message classes for use in a FastKafka +application:

  • The IrisInputData class is used to represent input data for a +predictive model. It has four fields of type +NonNegativeFloat, +which is a subclass of float that only allows non-negative floating +point values.

  • The IrisPrediction class is used to represent the output of the +predictive model. It has a single field species of type string +representing the predicted species.

These message classes will be used to parse and validate incoming data +in Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used both +to generate documentation and to later run the server against one of the +given kafka broker.

Next, an instance of the +FastKafka +class is initialized with the minimum required arguments:

  • kafka_brokers: a dictionary used for generating documentation
from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the IrisInputData message class. Specifying the +type of the single argument is instructing the Pydantic to use +IrisInputData.parse_raw() on the consumed message before passing it +to the user defined function on_input_data.

  • The @produces decorator is applied to the to_predictions function, +which specifies that this function should produce a message to the +“predictions" Kafka topic whenever it is called. The to_predictions +function takes a single integer argument species_class representing +one of three possible strign values predicted by the mdoel. It creates +a new IrisPrediction message using this value and then returns it. +The framework will call the IrisPrediction.json().encode("utf-8") +function on the returned value and produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

4. Writing the test code

The service can be tested using the +Tester +instance which can be configured to start a Redpanda +broker for testing +purposes. The test.py file in the cloned repository contains the +following code for testing.

import pytest
from application import IrisInputData, IrisPrediction, kafka_app

from fastkafka.testing import Tester

msg = IrisInputData(
sepal_length=0.1,
sepal_width=0.2,
petal_length=0.3,
petal_width=0.4,
)


@pytest.mark.asyncio
async def test():
# Start Tester app and create local Redpanda broker for testing
async with Tester(kafka_app).using_local_redpanda(
tag="v23.1.2", listener_port=9092
) as tester:
# Send IrisInputData message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with IrisPrediction in predictions topic
await tester.awaited_mocks.on_predictions.assert_awaited_with(
IrisPrediction(species="setosa"), timeout=2
)

The +Tester +module utilizes uses +LocalRedpandaBroker +to start and stop a Redpanda broker for testing purposes using Docker

5. Running the tests

We can run the tests which is in test.py file by executing the +following command:

pytest test.py

This will start a Redpanda broker using Docker and executes tests. The +output of the command is:

(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest
============================== test session starts ===============================
platform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0
rootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py
plugins: asyncio-0.21.0, anyio-3.6.2
asyncio: mode=strict
collected 1 item

test.py . [100%]

=============================== 1 passed in 7.28s ================================
(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$

Running the tests with the Redpanda broker ensures that your code is +working correctly with a real Kafka-like message broker, making your +tests more reliable.

Recap

We have created an Iris classification model and encapulated it into our +FastKafka +application. The app will consume the IrisInputData from the +input_data topic and produce the predictions to predictions topic.

To test the app we have:

  1. Created the app

  2. Started our +Tester +class with Redpanda broker which mirrors the developed app topics +for testing purposes

  3. Sent IrisInputData message to input_data topic

  4. Asserted and checked that the developed iris classification service +has reacted to IrisInputData message

+ + + + \ No newline at end of file diff --git a/docs/0.5.0/index.html b/docs/0.5.0/index.html new file mode 100644 index 0000000..08a4d4c --- /dev/null +++ b/docs/0.5.0/index.html @@ -0,0 +1,139 @@ + + + + + +FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.5.0

FastKafka

Effortless Kafka integration for your web services

PyPI PyPI -
+Downloads PyPI - Python
+Version

GitHub Workflow
+Status +CodeQL +Dependency
+Review

GitHub


FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.


⭐⭐⭐ Stay in touch ⭐⭐⭐

Please show your support and stay in touch by:

Your support helps us to stay in touch with you and encourages us to +continue developing and improving the library. Thank you for your +support!


🐝🐝🐝 We were busy lately 🐝🐝🐝

Activity

Install

FastKafka works on macOS, Linux, and most Unix-style operating systems. +You can install base version of fastkafka with pip as usual:

pip install fastkafka

To install fastkafka with testing features please use:

pip install fastkafka[test]

To install fastkafka with asyncapi docs please use:

pip install fastkafka[docs]

To install fastkafka with all the features please use:

pip install fastkafka[test,docs]

Tutorial

You can start an interactive tutorial in Google Colab by clicking the +button below:

Open In Colab

Writing server code

Here is an example python script using FastKafka that takes data from a +Kafka topic, makes a prediction using a predictive model, and outputs +the prediction to another Kafka topic.

Preparing the demo model

First we will prepare our model using the Iris dataset so that we can +demonstrate the predictions using FastKafka. The following call +downloads the dataset and trains the model.

We will wrap the model creation into a lifespan of our app so that the +model is created just before the app is started.

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines two message classes for use in a FastKafka +application:

  • The IrisInputData class is used to represent input data for a +predictive model. It has four fields of type +NonNegativeFloat, +which is a subclass of float that only allows non-negative floating +point values.

  • The IrisPrediction class is used to represent the output of the +predictive model. It has a single field species of type string +representing the predicted species.

These message classes will be used to parse and validate incoming data +in Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used for +both generating the documentation and later to run the actual server +against one of the given kafka broker.

Next, an object of the +FastKafka +class is initialized with the minimum set of arguments:

  • kafka_brokers: a dictionary used for generation of documentation
from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the IrisInputData message class. Specifying the +type of the single argument is instructing the Pydantic to use +IrisInputData.parse_raw() on the consumed message before passing it +to the user defined function on_input_data.

  • The @produces decorator is applied to the to_predictions function, +which specifies that this function should produce a message to the +“predictions" Kafka topic whenever it is called. The to_predictions +function takes a single integer argument species_class representing +one of three possible strign values predicted by the mdoel. It creates +a new IrisPrediction message using this value and then returns it. +The framework will call the IrisPrediction.json().encode("utf-8") +function on the returned value and produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Testing the service

The service can be tested using the +Tester +instances which internally starts InMemory implementation of Kafka +broker.

The Tester will redirect your consumes and produces decorated functions +to the InMemory Kafka broker so that you can quickly test your app +without the need for a running Kafka broker and all its dependencies.

from fastkafka.testing import Tester

msg = IrisInputData(
sepal_length=0.1,
sepal_width=0.2,
petal_length=0.3,
petal_width=0.4,
)

# Start Tester app and create InMemory Kafka broker for testing
async with Tester(kafka_app) as tester:
# Send IrisInputData message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with IrisPrediction in predictions topic
await tester.awaited_mocks.on_predictions.assert_awaited_with(
IrisPrediction(species="setosa"), timeout=2
)
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Recap

We have created a Iris classification model and encapulated it into our +fastkafka application. The app will consume the IrisInputData from the +input_data topic and produce the predictions to predictions topic.

To test the app we have:

  1. Created the app

  2. Started our Tester class which mirrors the developed app topics for +testing purposes

  3. Sent IrisInputData message to input_data topic

  4. Asserted and checked that the developed iris classification service +has reacted to IrisInputData message

Running the service

The service can be started using builtin faskafka run CLI command. +Before we can do that, we will concatenate the code snippets from above +and save them in a file "application.py"

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

To run the service, you will need a running Kafka broker on localhost as +specified in the kafka_brokers parameter above. We can start the Kafka +broker locally using the +ApacheKafkaBroker.

To use +ApacheKafkaBroker, +you need to install JRE and Kafka to your environment. To simplify this +process, fastkafka comes with a CLI command that does just that, to run +it, in your terminal execute the following:

fastkafka testing install_deps

Now we can run +ApacheKafkaBroker +that will start a Kafka broker instance for us.

from fastkafka.testing import ApacheKafkaBroker

broker = ApacheKafkaBroker(apply_nest_asyncio=True)

broker.start()
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()
[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.

'127.0.0.1:9092'

Then, we start the FastKafka service by running the following command in +the folder where the application.py file is located:

fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app

In the above command, we use --num-workers option to specify how many +workers to launch and we use --kafka-broker option to specify which +kafka broker configuration to use from earlier specified kafka_brokers

[801767]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[801765]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[801767]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[801765]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[801767]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[801767]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[801765]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[801765]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[801765]: [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata
[801765]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[801767]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[801767]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[801767]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[801765]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[801767]: [ERROR] aiokafka: Unable to update metadata from [0]
[801765]: [ERROR] aiokafka: Unable to update metadata from [0]
^C
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 801765...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 801767...
[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.

You need to interupt running of the cell above by selecting +Runtime->Interupt execution on the toolbar above.

Finally, we can stop the local Kafka Broker:

broker.stop()
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 801303...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 801303 was already terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 800930...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 800930 was already terminated.
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.

Documentation

The kafka app comes with builtin documentation generation using +AsyncApi HTML generator.

AsyncApi requires Node.js to be installed and we provide the following +convenience command line for it:

fastkafka docs install_deps
[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed

To generate the documentation programatically you just need to call the +folloving command:

fastkafka docs generate application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/asyncapi/docs.

. This will generate the asyncapi folder in relative path where all +your documentation will be saved. You can check out the content of it +with:

ls -l asyncapi
total 8
drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 09:14 docs
drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 09:14 spec

In docs folder you will find the servable static html file of your +documentation. This can also be served using our fastkafka docs serve +CLI command (more on that in our guides).

In spec folder you will find a asyncapi.yml file containing the async +API specification of your application.

We can locally preview the generated documentation by running the +following command:

fastkafka docs serve application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/asyncapi/docs.


Serving documentation on http://127.0.0.1:8000
^C
Interupting serving of documentation and cleaning up...

From the parameters passed to the application constructor, we get the +documentation bellow:

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
)

Kafka_servers

The following documentation snippet are for the consumer as specified in +the code above:

Kafka_consumer

The following documentation snippet are for the producer as specified in +the code above:

Kafka_producer

Finally, all messages as defined as subclasses of BaseModel are +documented as well:

Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)

License

FastKafka is licensed under the Apache License 2.0

A permissive license whose main conditions require preservation of +copyright and license notices. Contributors provide an express grant of +patent rights. Licensed works, modifications, and larger works may be +distributed under different terms and without source code.

The full text of the license can be found +here.

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/CHANGELOG/index.html b/docs/0.6.0/CHANGELOG/index.html new file mode 100644 index 0000000..3723f00 --- /dev/null +++ b/docs/0.6.0/CHANGELOG/index.html @@ -0,0 +1,33 @@ + + + + + +Release notes | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

Release notes

0.6.0

New Features

  • Timestamps added to CLI commands (#283), thanks to @davorrunje

  • Added option to process messages concurrently (#278), thanks to @Sternakt

    • A new executor option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies.
  • Add consumes and produces functions to app (#274), thanks to @Sternakt

  • Export encoders, decoders from fastkafka.encoder (#246), thanks to @kumaranvpl
  • Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. (#239)
  • UI Improvement: Post screenshots with links to the actual messages in testimonials section (#228)

Bugs Squashed

  • Batch testing fix (#280), thanks to @Sternakt

  • Tester breaks when using Batching or KafkaEvent producers (#279)

  • Consumer loop callbacks are not executing in parallel (#276)

0.5.0

New Features

  • Significant speedup of Kafka producer (#236), thanks to @Sternakt

Bugs Squashed

0.4.0

New Features

0.3.1

  • README.md file updated

0.3.0

New Features

  • Guide for fastkafka produces using partition key (#172), thanks to @Sternakt

    • Closes #161
  • Add support for Redpanda for testing and deployment (#181), thanks to @kumaranvpl

  • Remove bootstrap_servers from init and use the name of broker as an option when running/testing (#134)

  • Add a GH action file to check for broken links in the docs (#163)

  • Optimize requirements for testing and docs (#151)

  • Break requirements into base and optional for testing and dev (#124)

    • Minimize base requirements needed just for running the service.
  • Add link to example git repo into guide for building docs using actions (#81)

  • Add logging for run_in_background (#46)

  • Implement partition Key mechanism for producers (#16)

Bugs Squashed

  • Implement checks for npm installation and version (#176), thanks to @Sternakt

    • Closes #158 by checking if the npx is installed and more verbose error handling
  • Fix the helper.py link in CHANGELOG.md (#165)

  • fastkafka docs install_deps fails (#157)

    • Unexpected internal error: [Errno 2] No such file or directory: 'npx'
  • Broken links in docs (#141)

  • fastkafka run is not showing up in CLI docs (#132)

0.2.3

  • Fixed broken links on PyPi index page

0.2.2

New Features

  • Extract JDK and Kafka installation out of LocalKafkaBroker (#131)

  • PyYAML version relaxed (#119), thanks to @davorrunje

  • Replace docker based kafka with local (#68)

    • replace docker compose with a simple docker run (standard run_jupyter.sh should do)
    • replace all tests to use LocalKafkaBroker
    • update documentation

Bugs Squashed

  • Fix broken link for FastKafka docs in index notebook (#145)

  • Fix encoding issues when loading setup.py on windows OS (#135)

0.2.0

New Features

  • Replace kafka container with LocalKafkaBroker (#112)
      • Replace kafka container with LocalKafkaBroker in tests
  • Remove kafka container from tests environment
  • Fix failing tests

Bugs Squashed

  • Fix random failing in CI (#109)

0.1.3

  • version update in init.py

0.1.2

New Features

  • Git workflow action for publishing Kafka docs (#78)

Bugs Squashed

  • Include missing requirement (#110)
    • Typer is imported in this file but it is not included in settings.ini
    • Add aiohttp which is imported in this file
    • Add nbformat which is imported in _components/helpers.py
    • Add nbconvert which is imported in _components/helpers.py

0.1.1

Bugs Squashed

  • JDK install fails on Python 3.8 (#106)

0.1.0

Initial release

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/CONTRIBUTING/index.html b/docs/0.6.0/CONTRIBUTING/index.html new file mode 100644 index 0000000..07cae2d --- /dev/null +++ b/docs/0.6.0/CONTRIBUTING/index.html @@ -0,0 +1,36 @@ + + + + + +Contributing to fastkafka | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

Contributing to fastkafka

First off, thanks for taking the time to contribute! ❤️

All types of contributions are encouraged and valued. See the Table of Contents for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. 🎉

And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:

  • Star the project
  • Tweet about it
  • Refer this project in your project's readme
  • Mention the project at local meetups and tell your friends/colleagues

Table of Contents

I Have a Question

If you want to ask a question, we assume that you have read the available Documentation.

Before you ask a question, it is best to search for existing Issues that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue.

If you then still feel the need to ask a question and need clarification, we recommend the following:

  • Contact us on Discord
  • Open an Issue
    • Provide as much context as you can about what you're running into

We will then take care of the issue as soon as possible.

I Want To Contribute

When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.

Reporting Bugs

Before Submitting a Bug Report

A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible.

  • Make sure that you are using the latest version.
  • Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the documentation. If you are looking for support, you might want to check this section).
  • To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the bug tracker.
  • Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue.
  • Collect information about the bug:
    • Stack trace (Traceback)
    • OS, Platform and Version (Windows, Linux, macOS, x86, ARM)
    • Python version
    • Possibly your input and the output
    • Can you reliably reproduce the issue? And can you also reproduce it with older versions?

How Do I Submit a Good Bug Report?

We use GitHub issues to track bugs and errors. If you run into an issue with the project:

  • Open an Issue. (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)
  • Explain the behavior you would expect and the actual behavior.
  • Please provide as much context as possible and describe the reproduction steps that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case.
  • Provide the information you collected in the previous section.

Once it's filed:

  • The project team will label the issue accordingly.
  • A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as needs-repro. Bugs with the needs-repro tag will not be addressed until they are reproduced.
  • If the team is able to reproduce the issue, it will be marked needs-fix, as well as possibly other tags (such as critical), and the issue will be left to be implemented.

Suggesting Enhancements

This section guides you through submitting an enhancement suggestion for fastkafka, including completely new features and minor improvements to existing functionality. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions.

Before Submitting an Enhancement

  • Make sure that you are using the latest version.
  • Read the documentation carefully and find out if the functionality is already covered, maybe by an individual configuration.
  • Perform a search to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one.
  • Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library.
  • If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on Discord

How Do I Submit a Good Enhancement Suggestion?

Enhancement suggestions are tracked as GitHub issues.

  • Use a clear and descriptive title for the issue to identify the suggestion.
  • Provide a step-by-step description of the suggested enhancement in as many details as possible.
  • Describe the current behavior and explain which behavior you expected to see instead and why. At this point you can also tell which alternatives do not work for you.
  • Explain why this enhancement would be useful to most fastkafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.

Development

Prepare the dev environment

To start contributing to fastkafka, you first have to prepare the development environment.

Clone the fastkafka repository

To clone the repository, run the following command in the CLI:

git clone https://github.com/airtai/fastkafka.git

Optional: create a virtual python environment

To prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your fastkafka project by running:

python3 -m venv fastkafka-env

And to activate your virtual environment run:

source fastkafka-env/bin/activate

To learn more about virtual environments, please have a look at official python documentation

Install fastkafka

To install fastkafka, navigate to the root directory of the cloned fastkafka project and run:

pip install fastkafka -e [."dev"]

Install JRE and Kafka toolkit

To be able to run tests and use all the functionalities of fastkafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:

  1. Use our fastkafka testing install-deps CLI command which will install JRE and Kafka toolkit for you in your .local folder +OR
  2. Install JRE and Kafka manually. +To do this, please refer to JDK and JRE installation guide and Apache Kafka quickstart

Install npm

To be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:

  1. Use our fastkafka docs install_deps CLI command which will install npm for you in your .local folder +OR
  2. Install npm manually. +To do this, please refer to NPM installation guide

Install docusaurus

To generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of fastkafka project.

Check if everything works

After installing fastkafka and all the necessary dependencies, run nbdev_test in the root of fastkafka project. This will take a couple of minutes as it will run all the tests on fastkafka project. If everythng is setup correctly, you will get a "Success." message in your terminal, otherwise please refer to previous steps.

Way of working

The development of fastkafka is done in Jupyter notebooks. Inside the nbs directory you will find all the source code of fastkafka, this is where you will implement your changes.

The testing, cleanup and exporting of the code is being handled by nbdev, please, before starting the work on fastkafka, get familiar with it by reading nbdev documentation.

The general philosopy you should follow when writing code for fastkafka is:

  • Function should be an atomic functionality, short and concise
    • Good rule of thumb: your function should be 5-10 lines long usually
  • If there are more than 2 params, enforce keywording using *
    • E.g.: def function(param1, *, param2, param3): ...
  • Define typing of arguments and return value
    • If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected
  • After the function cell, write test cells using the assert keyword
    • Whenever you implement something you should test tat functionality immediateli in the cells below
  • Add Google style python docstrings when function is implemented and tested

Before a PR

After you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as painless for you and us, please do the following before opening the request (all the commands are to be run in the root of fastkafka project):

  1. Format your notebooks: nbqa black nbs
  2. Close, shutdown, and clean the metadata from your notebooks: nbdev_clean
  3. Export your code: nbdev_export
  4. Run the tests: nbdev_test
  5. Test code typing: mypy fastkafka
  6. Test code safety with bandit: bandit -r fastkafka
  7. Test code safety with semgrep: semgrep --config auto -r fastkafka

When you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge.

Attribution

This guide is based on the contributing-gen. Make your own!

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/LICENSE/index.html b/docs/0.6.0/LICENSE/index.html new file mode 100644 index 0000000..dd1ca37 --- /dev/null +++ b/docs/0.6.0/LICENSE/index.html @@ -0,0 +1,168 @@ + + + + + +LICENSE | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

LICENSE

Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/

TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

  1. Definitions.

    "License" shall mean the terms and conditions for use, reproduction, +and distribution as defined by Sections 1 through 9 of this document.

    "Licensor" shall mean the copyright owner or entity authorized by +the copyright owner that is granting the License.

    "Legal Entity" shall mean the union of the acting entity and all +other entities that control, are controlled by, or are under common +control with that entity. For the purposes of this definition, +"control" means (i) the power, direct or indirect, to cause the +direction or management of such entity, whether by contract or +otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity.

    "You" (or "Your") shall mean an individual or Legal Entity +exercising permissions granted by this License.

    "Source" form shall mean the preferred form for making modifications, +including but not limited to software source code, documentation +source, and configuration files.

    "Object" form shall mean any form resulting from mechanical +transformation or translation of a Source form, including but +not limited to compiled object code, generated documentation, +and conversions to other media types.

    "Work" shall mean the work of authorship, whether in Source or +Object form, made available under the License, as indicated by a +copyright notice that is included in or attached to the work +(an example is provided in the Appendix below).

    "Derivative Works" shall mean any work, whether in Source or Object +form, that is based on (or derived from) the Work and for which the +editorial revisions, annotations, elaborations, or other modifications +represent, as a whole, an original work of authorship. For the purposes +of this License, Derivative Works shall not include works that remain +separable from, or merely link (or bind by name) to the interfaces of, +the Work and Derivative Works thereof.

    "Contribution" shall mean any work of authorship, including +the original version of the Work and any modifications or additions +to that Work or Derivative Works thereof, that is intentionally +submitted to Licensor for inclusion in the Work by the copyright owner +or by an individual or Legal Entity authorized to submit on behalf of +the copyright owner. For the purposes of this definition, "submitted" +means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, +and issue tracking systems that are managed by, or on behalf of, the +Licensor for the purpose of discussing and improving the Work, but +excluding communication that is conspicuously marked or otherwise +designated in writing by the copyright owner as "Not a Contribution."

    "Contributor" shall mean Licensor and any individual or Legal Entity +on behalf of whom a Contribution has been received by Licensor and +subsequently incorporated within the Work.

  2. Grant of Copyright License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the +Work and such Derivative Works in Source or Object form.

  3. Grant of Patent License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +(except as stated in this section) patent license to make, have made, +use, offer to sell, sell, import, and otherwise transfer the Work, +where such license applies only to those patent claims licensable +by such Contributor that are necessarily infringed by their +Contribution(s) alone or by combination of their Contribution(s) +with the Work to which such Contribution(s) was submitted. If You +institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work +or a Contribution incorporated within the Work constitutes direct +or contributory patent infringement, then any patent licenses +granted to You under this License for that Work shall terminate +as of the date such litigation is filed.

  4. Redistribution. You may reproduce and distribute copies of the +Work or Derivative Works thereof in any medium, with or without +modifications, and in Source or Object form, provided that You +meet the following conditions:

    (a) You must give any other recipients of the Work or +Derivative Works a copy of this License; and

    (b) You must cause any modified files to carry prominent notices +stating that You changed the files; and

    (c) You must retain, in the Source form of any Derivative Works +that You distribute, all copyright, patent, trademark, and +attribution notices from the Source form of the Work, +excluding those notices that do not pertain to any part of +the Derivative Works; and

    (d) If the Work includes a "NOTICE" text file as part of its +distribution, then any Derivative Works that You distribute must +include a readable copy of the attribution notices contained +within such NOTICE file, excluding those notices that do not +pertain to any part of the Derivative Works, in at least one +of the following places: within a NOTICE text file distributed +as part of the Derivative Works; within the Source form or +documentation, if provided along with the Derivative Works; or, +within a display generated by the Derivative Works, if and +wherever such third-party notices normally appear. The contents +of the NOTICE file are for informational purposes only and +do not modify the License. You may add Your own attribution +notices within Derivative Works that You distribute, alongside +or as an addendum to the NOTICE text from the Work, provided +that such additional attribution notices cannot be construed +as modifying the License.

    You may add Your own copyright statement to Your modifications and +may provide additional or different license terms and conditions +for use, reproduction, or distribution of Your modifications, or +for any such Derivative Works as a whole, provided Your use, +reproduction, and distribution of the Work otherwise complies with +the conditions stated in this License.

  5. Submission of Contributions. Unless You explicitly state otherwise, +any Contribution intentionally submitted for inclusion in the Work +by You to the Licensor shall be under the terms and conditions of +this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify +the terms of any separate license agreement you may have executed +with Licensor regarding such Contributions.

  6. Trademarks. This License does not grant permission to use the trade +names, trademarks, service marks, or product names of the Licensor, +except as required for reasonable and customary use in describing the +origin of the Work and reproducing the content of the NOTICE file.

  7. Disclaimer of Warranty. Unless required by applicable law or +agreed to in writing, Licensor provides the Work (and each +Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +implied, including, without limitation, any warranties or conditions +of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A +PARTICULAR PURPOSE. You are solely responsible for determining the +appropriateness of using or redistributing the Work and assume any +risks associated with Your exercise of permissions under this License.

  8. Limitation of Liability. In no event and under no legal theory, +whether in tort (including negligence), contract, or otherwise, +unless required by applicable law (such as deliberate and grossly +negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, +incidental, or consequential damages of any character arising as a +result of this License or out of the use or inability to use the +Work (including but not limited to damages for loss of goodwill, +work stoppage, computer failure or malfunction, or any and all +other commercial damages or losses), even if such Contributor +has been advised of the possibility of such damages.

  9. Accepting Warranty or Additional Liability. While redistributing +the Work or Derivative Works thereof, You may choose to offer, +and charge a fee for, acceptance of support, warranty, indemnity, +or other liability obligations and/or rights consistent with this +License. However, in accepting such obligations, You may act only +on Your own behalf and on Your sole responsibility, not on behalf +of any other Contributor, and only if You agree to indemnify, +defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason +of your accepting any such warranty or additional liability.

    END OF TERMS AND CONDITIONS

    APPENDIX: How to apply the Apache License to your work.

    To apply the Apache License to your work, attach the following +boilerplate notice, with the fields enclosed by brackets "[]" +replaced with your own identifying information. (Don't include +the brackets!) The text should be enclosed in the appropriate +comment syntax for the file format. We also recommend that a +file or class name and description of purpose be included on the +same "printed page" as the copyright notice for easier +identification within third-party archives.

    Copyright [yyyy][name of copyright owner]

    Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

    Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License.

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/api/fastkafka/EventMetadata/index.html b/docs/0.6.0/api/fastkafka/EventMetadata/index.html new file mode 100644 index 0000000..9d074cf --- /dev/null +++ b/docs/0.6.0/api/fastkafka/EventMetadata/index.html @@ -0,0 +1,32 @@ + + + + + +EventMetadata | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

EventMetadata

fastkafka.EventMetadata

A class for encapsulating Kafka record metadata.

Parameters:

  • topic: The topic this record is received from
  • partition: The partition from which this record is received
  • offset: The position of this record in the corresponding Kafka partition
  • timestamp: The timestamp of this record
  • timestamp_type: The timestamp type of this record
  • key: The key (or None if no key is specified)
  • value: The value
  • serialized_key_size: The size of the serialized, uncompressed key in bytes
  • serialized_value_size: The size of the serialized, uncompressed value in bytes
  • headers: The headers
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/api/fastkafka/KafkaEvent/index.html b/docs/0.6.0/api/fastkafka/KafkaEvent/index.html new file mode 100644 index 0000000..5c20717 --- /dev/null +++ b/docs/0.6.0/api/fastkafka/KafkaEvent/index.html @@ -0,0 +1,32 @@ + + + + + +KafkaEvent | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

KafkaEvent

fastkafka.KafkaEvent

A generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel

Parameters:

  • message: The message contained in the Kafka event, can be of type pydantic.BaseModel.
  • key: The optional key used to identify the Kafka event.
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/api/fastkafka/encoder/AvroBase/index.html b/docs/0.6.0/api/fastkafka/encoder/AvroBase/index.html new file mode 100644 index 0000000..c0004d9 --- /dev/null +++ b/docs/0.6.0/api/fastkafka/encoder/AvroBase/index.html @@ -0,0 +1,32 @@ + + + + + +AvroBase | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

AvroBase

fastkafka.encoder.AvroBase

This is base pydantic class that will add some methods

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/api/fastkafka/encoder/avro_decoder/index.html b/docs/0.6.0/api/fastkafka/encoder/avro_decoder/index.html new file mode 100644 index 0000000..7249b84 --- /dev/null +++ b/docs/0.6.0/api/fastkafka/encoder/avro_decoder/index.html @@ -0,0 +1,32 @@ + + + + + +avro_decoder | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

avro_decoder

fastkafka.encoder.avro_decoder

avro_decoder

def avro_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any

Decoder to decode avro encoded messages to pydantic model instance

Parameters:

  • raw_msg: Avro encoded bytes message received from Kafka topic
  • cls: Pydantic class; This pydantic class will be used to construct instance of same class

Returns:

  • An instance of given pydantic class
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/api/fastkafka/encoder/avro_encoder/index.html b/docs/0.6.0/api/fastkafka/encoder/avro_encoder/index.html new file mode 100644 index 0000000..982701d --- /dev/null +++ b/docs/0.6.0/api/fastkafka/encoder/avro_encoder/index.html @@ -0,0 +1,32 @@ + + + + + +avro_encoder | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

avro_encoder

fastkafka.encoder.avro_encoder

avro_encoder

def avro_encoder(msg: pydantic.main.BaseModel) -> bytes

Encoder to encode pydantic instances to avro message

Parameters:

  • msg: An instance of pydantic basemodel

Returns:

  • A bytes message which is encoded from pydantic basemodel
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic/index.html b/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic/index.html new file mode 100644 index 0000000..1859617 --- /dev/null +++ b/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic/index.html @@ -0,0 +1,32 @@ + + + + + +avsc_to_pydantic | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

avsc_to_pydantic

fastkafka.encoder.avsc_to_pydantic

avsc_to_pydantic

def avsc_to_pydantic(schema: Dict[str, Any]) -> ModelMetaclass

Generate pydantic model from given Avro Schema

Parameters:

  • schema: Avro schema in dictionary format

Returns:

  • Pydantic model class built from given avro schema
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/api/fastkafka/encoder/json_decoder/index.html b/docs/0.6.0/api/fastkafka/encoder/json_decoder/index.html new file mode 100644 index 0000000..4f2cdbb --- /dev/null +++ b/docs/0.6.0/api/fastkafka/encoder/json_decoder/index.html @@ -0,0 +1,32 @@ + + + + + +json_decoder | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

json_decoder

fastkafka.encoder.json_decoder

json_decoder

def json_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any

Decoder to decode json string in bytes to pydantic model instance

Parameters:

  • raw_msg: Bytes message received from Kafka topic
  • cls: Pydantic class; This pydantic class will be used to construct instance of same class

Returns:

  • An instance of given pydantic class
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/api/fastkafka/encoder/json_encoder/index.html b/docs/0.6.0/api/fastkafka/encoder/json_encoder/index.html new file mode 100644 index 0000000..5cc0fb5 --- /dev/null +++ b/docs/0.6.0/api/fastkafka/encoder/json_encoder/index.html @@ -0,0 +1,32 @@ + + + + + +json_encoder | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

json_encoder

fastkafka.encoder.json_encoder

json_encoder

def json_encoder(msg: pydantic.main.BaseModel) -> bytes

Encoder to encode pydantic instances to json string

Parameters:

  • msg: An instance of pydantic basemodel

Returns:

  • Json string in bytes which is encoded from pydantic basemodel
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/api/fastkafka/executors/DynamicTaskExecutor/index.html b/docs/0.6.0/api/fastkafka/executors/DynamicTaskExecutor/index.html new file mode 100644 index 0000000..1cf7de0 --- /dev/null +++ b/docs/0.6.0/api/fastkafka/executors/DynamicTaskExecutor/index.html @@ -0,0 +1,35 @@ + + + + + +DynamicTaskExecutor | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

DynamicTaskExecutor

fastkafka.executors.DynamicTaskExecutor

A class that implements a dynamic task executor for processing consumer records.

The DynamicTaskExecutor class extends the StreamExecutor class and provides functionality +for running a tasks in parallel using asyncio.Task.

__init__

def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000, size: int = 100000) -> None

Create an instance of DynamicTaskExecutor

Parameters:

  • throw_exceptions: Flag indicating whether exceptions should be thrown ot logged. +Defaults to False.
  • max_buffer_size: Maximum buffer size for the memory object stream. +Defaults to 100_000.
  • size: Size of the task pool. Defaults to 100_000.

run

def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None

Runs the dynamic task executor.

Parameters:

  • is_shutting_down_f: Function to check if the executor is shutting down.
  • generator: Generator function for retrieving consumer records.
  • processor: Processor function for processing consumer records.

Returns:

  • None
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/api/fastkafka/executors/SequentialExecutor/index.html b/docs/0.6.0/api/fastkafka/executors/SequentialExecutor/index.html new file mode 100644 index 0000000..7c29b4e --- /dev/null +++ b/docs/0.6.0/api/fastkafka/executors/SequentialExecutor/index.html @@ -0,0 +1,35 @@ + + + + + +SequentialExecutor | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

SequentialExecutor

fastkafka.executors.SequentialExecutor

A class that implements a sequential executor for processing consumer records.

The SequentialExecutor class extends the StreamExecutor class and provides functionality +for running processing tasks in sequence by awaiting their coroutines.

__init__

def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000) -> None

Create an instance of SequentialExecutor

Parameters:

  • throw_exceptions: Flag indicating whether exceptions should be thrown or logged. +Defaults to False.
  • max_buffer_size: Maximum buffer size for the memory object stream. +Defaults to 100_000.

run

def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None

Runs the sequential executor.

Parameters:

  • is_shutting_down_f: Function to check if the executor is shutting down.
  • generator: Generator function for retrieving consumer records.
  • processor: Processor function for processing consumer records.

Returns:

  • None
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/api/fastkafka/index.html b/docs/0.6.0/api/fastkafka/index.html new file mode 100644 index 0000000..39f4bb4 --- /dev/null +++ b/docs/0.6.0/api/fastkafka/index.html @@ -0,0 +1,475 @@ + + + + + +FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

FastKafka

fastkafka.FastKafka

__init__

def __init__(self, title: Optional[str] = None, description: Optional[str] = None, version: Optional[str] = None, contact: Optional[Dict[str, str]] = None, kafka_brokers: Dict[str, Any], root_path: Optional[pathlib.Path, str] = None, lifespan: Optional[Callable[[ForwardRef('FastKafka')], AsyncContextManager[NoneType]]] = None, loop=None, client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fa3e2864f70>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fa3e1879090>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None, group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, exclude_internal_topics=True, isolation_level='read_uncommitted') -> None

Creates FastKafka application

Parameters:

  • title: optional title for the documentation. If None, +the title will be set to empty string
  • description: optional description for the documentation. If +None, the description will be set to empty string
  • version: optional version for the documentation. If None, +the version will be set to empty string
  • contact: optional contact for the documentation. If None, the +contact will be set to placeholder values: +name='Author' url=HttpUrl(' https://www.google.com ', ) email='noreply@gmail.com'
  • kafka_brokers: dictionary describing kafka brokers used for +generating documentation
  • root_path: path to where documentation will be created
  • lifespan: asynccontextmanager that is used for setting lifespan hooks. +aenter is called before app start and aexit after app stop. +The lifespan is called whe application is started as async context +manager, e.g.:async with kafka_app...
  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. +Default: aiokafka-producer-# (appended with a unique number +per instance)
  • key_serializer: used to convert user-supplied keys to bytes +If not :data:None, called as f(key), should return +:class:bytes. +Default: :data:None.
  • value_serializer: used to convert user-supplied message +values to :class:bytes. If not :data:None, called as +f(value), should return :class:bytes. +Default: :data:None.
  • acks: one of 0, 1, all. The number of acknowledgments +the producer requires the leader to have received before considering a +request complete. This controls the durability of records that are +sent. The following settings are common:
  • 0: Producer will not wait for any acknowledgment from the server +at all. The message will immediately be added to the socket +buffer and considered sent. No guarantee can be made that the +server has received the record in this case, and the retries +configuration will not take effect (as the client won't +generally know of any failures). The offset given back for each +record will always be set to -1.
  • 1: The broker leader will write the record to its local log but +will respond without awaiting full acknowledgement from all +followers. In this case should the leader fail immediately +after acknowledging the record but before the followers have +replicated it then the record will be lost.
  • all: The broker leader will wait for the full set of in-sync +replicas to acknowledge the record. This guarantees that the +record will not be lost as long as at least one in-sync replica +remains alive. This is the strongest available guarantee.

If unset, defaults to acks=1. If enable_idempotence is +:data:True defaults to acks=all

  • compression_type: The compression type for all data generated by +the producer. Valid values are gzip, snappy, lz4, zstd +or :data:None. +Compression is of full batches of data, so the efficacy of batching +will also impact the compression ratio (more batching means better +compression). Default: :data:None.
  • max_batch_size: Maximum size of buffered data per partition. +After this amount :meth:send coroutine will block until batch is +drained. +Default: 16384
  • linger_ms: The producer groups together any records that arrive +in between request transmissions into a single batched request. +Normally this occurs only under load when records arrive faster +than they can be sent out. However in some circumstances the client +may want to reduce the number of requests even under moderate load. +This setting accomplishes this by adding a small amount of +artificial delay; that is, if first request is processed faster, +than linger_ms, producer will wait linger_ms - process_time. +Default: 0 (i.e. no delay).
  • partitioner: Callable used to determine which partition +each message is assigned to. Called (after key serialization): +partitioner(key_bytes, all_partitions, available_partitions). +The default partitioner implementation hashes each non-None key +using the same murmur2 algorithm as the Java client so that +messages with the same key are assigned to the same partition. +When a key is :data:None, the message is delivered to a random partition +(filtered to partitions with available leaders only, if possible).
  • max_request_size: The maximum size of a request. This is also +effectively a cap on the maximum record size. Note that the server +has its own cap on record size which may be different from this. +This setting will limit the number of record batches the producer +will send in a single request to avoid sending huge requests. +Default: 1048576.
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • request_timeout_ms: Produce request timeout in milliseconds. +As it's sent as part of +:class:~kafka.protocol.produce.ProduceRequest (it's a blocking +call), maximum waiting time can be up to 2 * +request_timeout_ms. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • api_version: specify which kafka API version to use. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT. +Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more +information see :ref:ssl_auth. +Default: :data:None
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying :data:None will +disable idle checks. Default: 540000 (9 minutes).
  • enable_idempotence: When set to :data:True, the producer will +ensure that exactly one copy of each message is written in the +stream. If :data:False, producer retries due to broker failures, +etc., may write duplicates of the retried message in the stream. +Note that enabling idempotence acks to set to all. If it is not +explicitly set by the user it will be chosen. If incompatible +values are set, a :exc:ValueError will be thrown. +New in version 0.5.0.
  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values +are: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: :data:None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: :data:None
  • sasl_oauth_token_provider (: class:~aiokafka.abc.AbstractTokenProvider): +OAuthBearer token provider instance. (See +:mod:kafka.oauth.abstract). +Default: :data:None
  • *topics: optional list of topics to subscribe to. If not set, +call :meth:.subscribe or :meth:.assign before consuming records. +Passing topics directly is same as calling :meth:.subscribe API.
  • group_id: name of the consumer group to join for dynamic +partition assignment (if enabled), and to use for fetching and +committing offsets. If None, auto-partition assignment (via +group coordinator) and offset commits are disabled. +Default: None
  • key_deserializer: Any callable that takes a +raw message key and returns a deserialized key.
  • value_deserializer: Any callable that takes a +raw message value and returns a deserialized value.
  • fetch_min_bytes: Minimum amount of data the server should +return for a fetch request, otherwise wait up to +fetch_max_wait_ms for more data to accumulate. Default: 1.
  • fetch_max_bytes: The maximum amount of data the server should +return for a fetch request. This is not an absolute maximum, if +the first message in the first non-empty partition of the fetch +is larger than this value, the message will still be returned +to ensure that the consumer can make progress. NOTE: consumer +performs fetches to multiple brokers in parallel so memory +usage will depend on the number of brokers containing +partitions for the topic. +Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).
  • fetch_max_wait_ms: The maximum amount of time in milliseconds +the server will block before answering the fetch request if +there isn't sufficient data to immediately satisfy the +requirement given by fetch_min_bytes. Default: 500.
  • max_partition_fetch_bytes: The maximum amount of data +per-partition the server will return. The maximum total memory +used for a request = #partitions * max_partition_fetch_bytes. +This size must be at least as large as the maximum message size +the server allows or else it is possible for the producer to +send messages larger than the consumer can fetch. If that +happens, the consumer can get stuck trying to fetch a large +message on a certain partition. Default: 1048576.
  • max_poll_records: The maximum number of records returned in a +single call to :meth:.getmany. Defaults None, no limit.
  • auto_offset_reset: A policy for resetting offsets on +:exc:.OffsetOutOfRangeError errors: earliest will move to the oldest +available message, latest will move to the most recent, and +none will raise an exception so you can handle this case. +Default: latest.
  • enable_auto_commit: If true the consumer's offset will be +periodically committed in the background. Default: True.
  • auto_commit_interval_ms: milliseconds between automatic +offset commits, if enable_auto_commit is True. Default: 5000.
  • check_crcs: Automatically check the CRC32 of the records +consumed. This ensures no on-the-wire or on-disk corruption to +the messages occurred. This check adds some overhead, so it may +be disabled in cases seeking extreme performance. Default: True
  • partition_assignment_strategy: List of objects to use to +distribute partition ownership amongst consumer instances when +group management is used. This preference is implicit in the order +of the strategies in the list. When assignment strategy changes: +to support a change to the assignment strategy, new versions must +enable support both for the old assignment strategy and the new +one. The coordinator will choose the old assignment strategy until +all members have been updated. Then it will choose the new +strategy. Default: [:class:.RoundRobinPartitionAssignor]
  • max_poll_interval_ms: Maximum allowed time between calls to +consume messages (e.g., :meth:.getmany). If this interval +is exceeded the consumer is considered failed and the group will +rebalance in order to reassign the partitions to another consumer +group member. If API methods block waiting for messages, that time +does not count against this timeout. See KIP-62_ for more +information. Default 300000
  • rebalance_timeout_ms: The maximum time server will wait for this +consumer to rejoin the group in a case of rebalance. In Java client +this behaviour is bound to max.poll.interval.ms configuration, +but as aiokafka will rejoin the group in the background, we +decouple this setting to allow finer tuning by users that use +:class:.ConsumerRebalanceListener to delay rebalacing. Defaults +to session_timeout_ms
  • session_timeout_ms: Client group session and failure detection +timeout. The consumer sends periodic heartbeats +(heartbeat.interval.ms) to indicate its liveness to the broker. +If no hearts are received by the broker for a group member within +the session timeout, the broker will remove the consumer from the +group and trigger a rebalance. The allowed range is configured with +the broker configuration properties +group.min.session.timeout.ms and group.max.session.timeout.ms. +Default: 10000
  • heartbeat_interval_ms: The expected time in milliseconds +between heartbeats to the consumer coordinator when using +Kafka's group management feature. Heartbeats are used to ensure +that the consumer's session stays active and to facilitate +rebalancing when new consumers join or leave the group. The +value must be set lower than session_timeout_ms, but typically +should be set no higher than 1/3 of that value. It can be +adjusted even lower to control the expected time for normal +rebalances. Default: 3000
  • consumer_timeout_ms: maximum wait timeout for background fetching +routine. Mostly defines how fast the system will see rebalance and +request new data for new partitions. Default: 200
  • exclude_internal_topics: Whether records from internal topics +(such as offsets) should be exposed to the consumer. If set to True +the only way to receive records from an internal topic is +subscribing to it. Requires 0.10+ Default: True
  • isolation_level: Controls how to read messages written +transactionally.

If set to read_committed, :meth:.getmany will only return +transactional messages which have been committed. +If set to read_uncommitted (the default), :meth:.getmany will +return all messages, even transactional messages which have been +aborted.

Non-transactional messages will be returned unconditionally in +either mode.

Messages will always be returned in offset order. Hence, in +read_committed mode, :meth:.getmany will only return +messages up to the last stable offset (LSO), which is the one less +than the offset of the first open transaction. In particular any +messages appearing after messages belonging to ongoing transactions +will be withheld until the relevant transaction has been completed. +As a result, read_committed consumers will not be able to read up +to the high watermark when there are in flight transactions. +Further, when in read_committed the seek_to_end method will +return the LSO. See method docs below. Default: read_uncommitted

  • sasl_oauth_token_provider: OAuthBearer token provider instance. (See :mod:kafka.oauth.abstract). +Default: None

benchmark

def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]

Decorator to benchmark produces/consumes functions

Parameters:

  • interval: Period to use to calculate throughput. If value is of type int, +then it will be used as seconds. If value is of type timedelta, +then it will be used as it is. default: 1 - one second
  • sliding_window_size: The size of the sliding window to use to calculate +average throughput. default: None - By default average throughput is +not calculated

consumes

def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, prefix: str = 'on_', loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[pydantic.main.BaseModel], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel], NoneType], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], NoneType]]], typing.Union[typing.Callable[[pydantic.main.BaseModel], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel], NoneType], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], NoneType]]]

Decorator registering the callback called when a message is received in a topic.

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the consumer will subscribe to and execute the +decorated function when it receives a message from the topic, +default: None. If the topic is not specified, topic name will be +inferred from the decorated function name by stripping the defined prefix
  • decoder: Decoder to use to decode messages consumed from the topic, +default: json - By default, it uses json decoder to decode +bytes to json string and then it creates instance of pydantic +BaseModel. It also accepts custom decoder function.
  • executor: Type of executor to choose for consuming tasks. Avaliable options +are "SequentialExecutor" and "DynamicTaskExecutor". The default option is +"SequentialExecutor" which will execute the consuming tasks sequentially. +If the consuming tasks have high latency it is recommended to use +"DynamicTaskExecutor" which will wrap the consuming functions into tasks +and run them in on asyncio loop in background. This comes with a cost of +increased overhead so use it only in cases when your consume functions have +high latency such as database queries or some other type of networking.
  • prefix: Prefix stripped from the decorated function to define a topic name +if the topic argument is not passed, default: "on_". If the decorated +function name is not prefixed with the defined prefix and topic argument +is not passed, then this method will throw ValueError
  • *topics: optional list of topics to subscribe to. If not set, +call :meth:.subscribe or :meth:.assign before consuming records. +Passing topics directly is same as calling :meth:.subscribe API.
  • bootstrap_servers: a host[:port] string (or list of +host[:port] strings) that the consumer should contact to bootstrap +initial cluster metadata.

This does not have to be the full node list. +It just needs to have at least one broker that will respond to a +Metadata API Request. Default port is 9092. If no servers are +specified, will default to localhost:9092.

  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. Also +submitted to :class:~.consumer.group_coordinator.GroupCoordinator +for logging with respect to consumer group administration. Default: +aiokafka-{version}
  • group_id: name of the consumer group to join for dynamic +partition assignment (if enabled), and to use for fetching and +committing offsets. If None, auto-partition assignment (via +group coordinator) and offset commits are disabled. +Default: None
  • key_deserializer: Any callable that takes a +raw message key and returns a deserialized key.
  • value_deserializer: Any callable that takes a +raw message value and returns a deserialized value.
  • fetch_min_bytes: Minimum amount of data the server should +return for a fetch request, otherwise wait up to +fetch_max_wait_ms for more data to accumulate. Default: 1.
  • fetch_max_bytes: The maximum amount of data the server should +return for a fetch request. This is not an absolute maximum, if +the first message in the first non-empty partition of the fetch +is larger than this value, the message will still be returned +to ensure that the consumer can make progress. NOTE: consumer +performs fetches to multiple brokers in parallel so memory +usage will depend on the number of brokers containing +partitions for the topic. +Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).
  • fetch_max_wait_ms: The maximum amount of time in milliseconds +the server will block before answering the fetch request if +there isn't sufficient data to immediately satisfy the +requirement given by fetch_min_bytes. Default: 500.
  • max_partition_fetch_bytes: The maximum amount of data +per-partition the server will return. The maximum total memory +used for a request = #partitions * max_partition_fetch_bytes. +This size must be at least as large as the maximum message size +the server allows or else it is possible for the producer to +send messages larger than the consumer can fetch. If that +happens, the consumer can get stuck trying to fetch a large +message on a certain partition. Default: 1048576.
  • max_poll_records: The maximum number of records returned in a +single call to :meth:.getmany. Defaults None, no limit.
  • request_timeout_ms: Client request timeout in milliseconds. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • auto_offset_reset: A policy for resetting offsets on +:exc:.OffsetOutOfRangeError errors: earliest will move to the oldest +available message, latest will move to the most recent, and +none will raise an exception so you can handle this case. +Default: latest.
  • enable_auto_commit: If true the consumer's offset will be +periodically committed in the background. Default: True.
  • auto_commit_interval_ms: milliseconds between automatic +offset commits, if enable_auto_commit is True. Default: 5000.
  • check_crcs: Automatically check the CRC32 of the records +consumed. This ensures no on-the-wire or on-disk corruption to +the messages occurred. This check adds some overhead, so it may +be disabled in cases seeking extreme performance. Default: True
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • partition_assignment_strategy: List of objects to use to +distribute partition ownership amongst consumer instances when +group management is used. This preference is implicit in the order +of the strategies in the list. When assignment strategy changes: +to support a change to the assignment strategy, new versions must +enable support both for the old assignment strategy and the new +one. The coordinator will choose the old assignment strategy until +all members have been updated. Then it will choose the new +strategy. Default: [:class:.RoundRobinPartitionAssignor]
  • max_poll_interval_ms: Maximum allowed time between calls to +consume messages (e.g., :meth:.getmany). If this interval +is exceeded the consumer is considered failed and the group will +rebalance in order to reassign the partitions to another consumer +group member. If API methods block waiting for messages, that time +does not count against this timeout. See KIP-62_ for more +information. Default 300000
  • rebalance_timeout_ms: The maximum time server will wait for this +consumer to rejoin the group in a case of rebalance. In Java client +this behaviour is bound to max.poll.interval.ms configuration, +but as aiokafka will rejoin the group in the background, we +decouple this setting to allow finer tuning by users that use +:class:.ConsumerRebalanceListener to delay rebalacing. Defaults +to session_timeout_ms
  • session_timeout_ms: Client group session and failure detection +timeout. The consumer sends periodic heartbeats +(heartbeat.interval.ms) to indicate its liveness to the broker. +If no hearts are received by the broker for a group member within +the session timeout, the broker will remove the consumer from the +group and trigger a rebalance. The allowed range is configured with +the broker configuration properties +group.min.session.timeout.ms and group.max.session.timeout.ms. +Default: 10000
  • heartbeat_interval_ms: The expected time in milliseconds +between heartbeats to the consumer coordinator when using +Kafka's group management feature. Heartbeats are used to ensure +that the consumer's session stays active and to facilitate +rebalancing when new consumers join or leave the group. The +value must be set lower than session_timeout_ms, but typically +should be set no higher than 1/3 of that value. It can be +adjusted even lower to control the expected time for normal +rebalances. Default: 3000
  • consumer_timeout_ms: maximum wait timeout for background fetching +routine. Mostly defines how fast the system will see rebalance and +request new data for new partitions. Default: 200
  • api_version: specify which kafka API version to use. +:class:AIOKafkaConsumer supports Kafka API versions >=0.9 only. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more information see +:ref:ssl_auth. Default: None.
  • exclude_internal_topics: Whether records from internal topics +(such as offsets) should be exposed to the consumer. If set to True +the only way to receive records from an internal topic is +subscribing to it. Requires 0.10+ Default: True
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying None will +disable idle checks. Default: 540000 (9 minutes).
  • isolation_level: Controls how to read messages written +transactionally.

If set to read_committed, :meth:.getmany will only return +transactional messages which have been committed. +If set to read_uncommitted (the default), :meth:.getmany will +return all messages, even transactional messages which have been +aborted.

Non-transactional messages will be returned unconditionally in +either mode.

Messages will always be returned in offset order. Hence, in +read_committed mode, :meth:.getmany will only return +messages up to the last stable offset (LSO), which is the one less +than the offset of the first open transaction. In particular any +messages appearing after messages belonging to ongoing transactions +will be withheld until the relevant transaction has been completed. +As a result, read_committed consumers will not be able to read up +to the high watermark when there are in flight transactions. +Further, when in read_committed the seek_to_end method will +return the LSO. See method docs below. Default: read_uncommitted

  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: +PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: None
  • sasl_oauth_token_provider: OAuthBearer token provider instance. (See :mod:kafka.oauth.abstract). +Default: None

Returns:

  • : A function returning the same function

create_mocks

def create_mocks(self: fastkafka.FastKafka) -> None

Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock

produces

def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fa3e2864f70>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fa3e1879090>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]

Decorator registering the callback called when delivery report for a produced message is received

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the producer will send returned values from +the decorated function to, default: None- If the topic is not +specified, topic name will be inferred from the decorated function +name by stripping the defined prefix.
  • encoder: Encoder to use to encode messages before sending it to topic, +default: json - By default, it uses json encoder to convert +pydantic basemodel to json string and then encodes the string to bytes +using 'utf-8' encoding. It also accepts custom encoder function.
  • prefix: Prefix stripped from the decorated function to define a topic +name if the topic argument is not passed, default: "to_". If the +decorated function name is not prefixed with the defined prefix +and topic argument is not passed, then this method will throw ValueError
  • bootstrap_servers: a host[:port] string or list of +host[:port] strings that the producer should contact to +bootstrap initial cluster metadata. This does not have to be the +full node list. It just needs to have at least one broker that will +respond to a Metadata API Request. Default port is 9092. If no +servers are specified, will default to localhost:9092.
  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. +Default: aiokafka-producer-# (appended with a unique number +per instance)
  • key_serializer: used to convert user-supplied keys to bytes +If not :data:None, called as f(key), should return +:class:bytes. +Default: :data:None.
  • value_serializer: used to convert user-supplied message +values to :class:bytes. If not :data:None, called as +f(value), should return :class:bytes. +Default: :data:None.
  • acks: one of 0, 1, all. The number of acknowledgments +the producer requires the leader to have received before considering a +request complete. This controls the durability of records that are +sent. The following settings are common:
  • 0: Producer will not wait for any acknowledgment from the server +at all. The message will immediately be added to the socket +buffer and considered sent. No guarantee can be made that the +server has received the record in this case, and the retries +configuration will not take effect (as the client won't +generally know of any failures). The offset given back for each +record will always be set to -1.
  • 1: The broker leader will write the record to its local log but +will respond without awaiting full acknowledgement from all +followers. In this case should the leader fail immediately +after acknowledging the record but before the followers have +replicated it then the record will be lost.
  • all: The broker leader will wait for the full set of in-sync +replicas to acknowledge the record. This guarantees that the +record will not be lost as long as at least one in-sync replica +remains alive. This is the strongest available guarantee.

If unset, defaults to acks=1. If enable_idempotence is +:data:True defaults to acks=all

  • compression_type: The compression type for all data generated by +the producer. Valid values are gzip, snappy, lz4, zstd +or :data:None. +Compression is of full batches of data, so the efficacy of batching +will also impact the compression ratio (more batching means better +compression). Default: :data:None.
  • max_batch_size: Maximum size of buffered data per partition. +After this amount :meth:send coroutine will block until batch is +drained. +Default: 16384
  • linger_ms: The producer groups together any records that arrive +in between request transmissions into a single batched request. +Normally this occurs only under load when records arrive faster +than they can be sent out. However in some circumstances the client +may want to reduce the number of requests even under moderate load. +This setting accomplishes this by adding a small amount of +artificial delay; that is, if first request is processed faster, +than linger_ms, producer will wait linger_ms - process_time. +Default: 0 (i.e. no delay).
  • partitioner: Callable used to determine which partition +each message is assigned to. Called (after key serialization): +partitioner(key_bytes, all_partitions, available_partitions). +The default partitioner implementation hashes each non-None key +using the same murmur2 algorithm as the Java client so that +messages with the same key are assigned to the same partition. +When a key is :data:None, the message is delivered to a random partition +(filtered to partitions with available leaders only, if possible).
  • max_request_size: The maximum size of a request. This is also +effectively a cap on the maximum record size. Note that the server +has its own cap on record size which may be different from this. +This setting will limit the number of record batches the producer +will send in a single request to avoid sending huge requests. +Default: 1048576.
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • request_timeout_ms: Produce request timeout in milliseconds. +As it's sent as part of +:class:~kafka.protocol.produce.ProduceRequest (it's a blocking +call), maximum waiting time can be up to 2 * +request_timeout_ms. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • api_version: specify which kafka API version to use. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT. +Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more +information see :ref:ssl_auth. +Default: :data:None
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying :data:None will +disable idle checks. Default: 540000 (9 minutes).
  • enable_idempotence: When set to :data:True, the producer will +ensure that exactly one copy of each message is written in the +stream. If :data:False, producer retries due to broker failures, +etc., may write duplicates of the retried message in the stream. +Note that enabling idempotence acks to set to all. If it is not +explicitly set by the user it will be chosen. If incompatible +values are set, a :exc:ValueError will be thrown. +New in version 0.5.0.
  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values +are: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: :data:None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: :data:None
  • sasl_oauth_token_provider (: class:~aiokafka.abc.AbstractTokenProvider): +OAuthBearer token provider instance. (See +:mod:kafka.oauth.abstract). +Default: :data:None

Returns:

  • : A function returning the same function

Exceptions:

  • ValueError: when needed

run_in_background

def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]

Decorator to schedule a task to be run in the background.

This decorator is used to schedule a task to be run in the background when the app's _on_startup event is triggered.

Returns:

  • A decorator function that takes a background task as an input and stores it to be run in the backround.
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker/index.html b/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker/index.html new file mode 100644 index 0000000..8b99609 --- /dev/null +++ b/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker/index.html @@ -0,0 +1,32 @@ + + + + + +ApacheKafkaBroker | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

ApacheKafkaBroker

fastkafka.testing.ApacheKafkaBroker

ApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing.

__init__

def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None

Initialises the ApacheKafkaBroker object

Parameters:

  • data_dir: Path to the directory where the zookeepeer instance will save data
  • zookeeper_port: Port for clients (Kafka brokes) to connect
  • listener_port: Port on which the clients (producers and consumers) can connect

start

def start(self: fastkafka.testing.ApacheKafkaBroker) -> str

Starts a local kafka broker and zookeeper instance synchronously

Returns:

  • Kafka broker bootstrap server address in string format: add:port

stop

def stop(self: fastkafka.testing.ApacheKafkaBroker) -> None

Stops a local kafka broker and zookeeper instance synchronously

Returns:

  • None
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/api/fastkafka/testing/LocalRedpandaBroker/index.html b/docs/0.6.0/api/fastkafka/testing/LocalRedpandaBroker/index.html new file mode 100644 index 0000000..00a55ad --- /dev/null +++ b/docs/0.6.0/api/fastkafka/testing/LocalRedpandaBroker/index.html @@ -0,0 +1,32 @@ + + + + + +LocalRedpandaBroker | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

LocalRedpandaBroker

fastkafka.testing.LocalRedpandaBroker

LocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing.

__init__

def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug', **kwargs: Dict[str, Any]) -> None

Initialises the LocalRedpandaBroker object

Parameters:

  • listener_port: Port on which the clients (producers and consumers) can connect
  • tag: Tag of Redpanda image to use to start container
  • seastar_core: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)
  • memory: The amount of memory to make available to Redpanda
  • mode: Mode to use to load configuration properties in container
  • default_log_level: Log levels to use for Redpanda

get_service_config_string

def get_service_config_string(self, service: str, data_dir: pathlib.Path) -> str

Generates a configuration for a service

Parameters:

  • data_dir: Path to the directory where the zookeepeer instance will save data
  • service: "redpanda", defines which service to get config string for

start

def start(self: fastkafka.testing.LocalRedpandaBroker) -> str

Starts a local redpanda broker instance synchronously

Returns:

  • Redpanda broker bootstrap server address in string format: add:port

stop

def stop(self: fastkafka.testing.LocalRedpandaBroker) -> None

Stops a local redpanda broker instance synchronously

Returns:

  • None
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/api/fastkafka/testing/Tester/index.html b/docs/0.6.0/api/fastkafka/testing/Tester/index.html new file mode 100644 index 0000000..00d435f --- /dev/null +++ b/docs/0.6.0/api/fastkafka/testing/Tester/index.html @@ -0,0 +1,281 @@ + + + + + +Tester | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

Tester

fastkafka.testing.Tester

__init__

def __init__(self, app: Union[fastkafka.FastKafka, List[fastkafka.FastKafka]], broker: Optional[fastkafka.testing.ApacheKafkaBroker, fastkafka.testing.LocalRedpandaBroker, fastkafka._testing.in_memory_broker.InMemoryBroker] = None, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None

Mirror-like object for testing a FastKafka application

Can be used as context manager

Parameters:

  • data_dir: Path to the directory where the zookeepeer instance will save data
  • zookeeper_port: Port for clients (Kafka brokes) to connect
  • listener_port: Port on which the clients (producers and consumers) can connect

benchmark

def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]

Decorator to benchmark produces/consumes functions

Parameters:

  • interval: Period to use to calculate throughput. If value is of type int, +then it will be used as seconds. If value is of type timedelta, +then it will be used as it is. default: 1 - one second
  • sliding_window_size: The size of the sliding window to use to calculate +average throughput. default: None - By default average throughput is +not calculated

consumes

def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, prefix: str = 'on_', loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[pydantic.main.BaseModel], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel], NoneType], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], NoneType]]], typing.Union[typing.Callable[[pydantic.main.BaseModel], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], typing.Awaitable[NoneType]], typing.Callable[[pydantic.main.BaseModel], NoneType], typing.Callable[[pydantic.main.BaseModel, fastkafka.EventMetadata], NoneType]]]

Decorator registering the callback called when a message is received in a topic.

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the consumer will subscribe to and execute the +decorated function when it receives a message from the topic, +default: None. If the topic is not specified, topic name will be +inferred from the decorated function name by stripping the defined prefix
  • decoder: Decoder to use to decode messages consumed from the topic, +default: json - By default, it uses json decoder to decode +bytes to json string and then it creates instance of pydantic +BaseModel. It also accepts custom decoder function.
  • executor: Type of executor to choose for consuming tasks. Avaliable options +are "SequentialExecutor" and "DynamicTaskExecutor". The default option is +"SequentialExecutor" which will execute the consuming tasks sequentially. +If the consuming tasks have high latency it is recommended to use +"DynamicTaskExecutor" which will wrap the consuming functions into tasks +and run them in on asyncio loop in background. This comes with a cost of +increased overhead so use it only in cases when your consume functions have +high latency such as database queries or some other type of networking.
  • prefix: Prefix stripped from the decorated function to define a topic name +if the topic argument is not passed, default: "on_". If the decorated +function name is not prefixed with the defined prefix and topic argument +is not passed, then this method will throw ValueError
  • *topics: optional list of topics to subscribe to. If not set, +call :meth:.subscribe or :meth:.assign before consuming records. +Passing topics directly is same as calling :meth:.subscribe API.
  • bootstrap_servers: a host[:port] string (or list of +host[:port] strings) that the consumer should contact to bootstrap +initial cluster metadata.

This does not have to be the full node list. +It just needs to have at least one broker that will respond to a +Metadata API Request. Default port is 9092. If no servers are +specified, will default to localhost:9092.

  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. Also +submitted to :class:~.consumer.group_coordinator.GroupCoordinator +for logging with respect to consumer group administration. Default: +aiokafka-{version}
  • group_id: name of the consumer group to join for dynamic +partition assignment (if enabled), and to use for fetching and +committing offsets. If None, auto-partition assignment (via +group coordinator) and offset commits are disabled. +Default: None
  • key_deserializer: Any callable that takes a +raw message key and returns a deserialized key.
  • value_deserializer: Any callable that takes a +raw message value and returns a deserialized value.
  • fetch_min_bytes: Minimum amount of data the server should +return for a fetch request, otherwise wait up to +fetch_max_wait_ms for more data to accumulate. Default: 1.
  • fetch_max_bytes: The maximum amount of data the server should +return for a fetch request. This is not an absolute maximum, if +the first message in the first non-empty partition of the fetch +is larger than this value, the message will still be returned +to ensure that the consumer can make progress. NOTE: consumer +performs fetches to multiple brokers in parallel so memory +usage will depend on the number of brokers containing +partitions for the topic. +Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).
  • fetch_max_wait_ms: The maximum amount of time in milliseconds +the server will block before answering the fetch request if +there isn't sufficient data to immediately satisfy the +requirement given by fetch_min_bytes. Default: 500.
  • max_partition_fetch_bytes: The maximum amount of data +per-partition the server will return. The maximum total memory +used for a request = #partitions * max_partition_fetch_bytes. +This size must be at least as large as the maximum message size +the server allows or else it is possible for the producer to +send messages larger than the consumer can fetch. If that +happens, the consumer can get stuck trying to fetch a large +message on a certain partition. Default: 1048576.
  • max_poll_records: The maximum number of records returned in a +single call to :meth:.getmany. Defaults None, no limit.
  • request_timeout_ms: Client request timeout in milliseconds. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • auto_offset_reset: A policy for resetting offsets on +:exc:.OffsetOutOfRangeError errors: earliest will move to the oldest +available message, latest will move to the most recent, and +none will raise an exception so you can handle this case. +Default: latest.
  • enable_auto_commit: If true the consumer's offset will be +periodically committed in the background. Default: True.
  • auto_commit_interval_ms: milliseconds between automatic +offset commits, if enable_auto_commit is True. Default: 5000.
  • check_crcs: Automatically check the CRC32 of the records +consumed. This ensures no on-the-wire or on-disk corruption to +the messages occurred. This check adds some overhead, so it may +be disabled in cases seeking extreme performance. Default: True
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • partition_assignment_strategy: List of objects to use to +distribute partition ownership amongst consumer instances when +group management is used. This preference is implicit in the order +of the strategies in the list. When assignment strategy changes: +to support a change to the assignment strategy, new versions must +enable support both for the old assignment strategy and the new +one. The coordinator will choose the old assignment strategy until +all members have been updated. Then it will choose the new +strategy. Default: [:class:.RoundRobinPartitionAssignor]
  • max_poll_interval_ms: Maximum allowed time between calls to +consume messages (e.g., :meth:.getmany). If this interval +is exceeded the consumer is considered failed and the group will +rebalance in order to reassign the partitions to another consumer +group member. If API methods block waiting for messages, that time +does not count against this timeout. See KIP-62_ for more +information. Default 300000
  • rebalance_timeout_ms: The maximum time server will wait for this +consumer to rejoin the group in a case of rebalance. In Java client +this behaviour is bound to max.poll.interval.ms configuration, +but as aiokafka will rejoin the group in the background, we +decouple this setting to allow finer tuning by users that use +:class:.ConsumerRebalanceListener to delay rebalacing. Defaults +to session_timeout_ms
  • session_timeout_ms: Client group session and failure detection +timeout. The consumer sends periodic heartbeats +(heartbeat.interval.ms) to indicate its liveness to the broker. +If no hearts are received by the broker for a group member within +the session timeout, the broker will remove the consumer from the +group and trigger a rebalance. The allowed range is configured with +the broker configuration properties +group.min.session.timeout.ms and group.max.session.timeout.ms. +Default: 10000
  • heartbeat_interval_ms: The expected time in milliseconds +between heartbeats to the consumer coordinator when using +Kafka's group management feature. Heartbeats are used to ensure +that the consumer's session stays active and to facilitate +rebalancing when new consumers join or leave the group. The +value must be set lower than session_timeout_ms, but typically +should be set no higher than 1/3 of that value. It can be +adjusted even lower to control the expected time for normal +rebalances. Default: 3000
  • consumer_timeout_ms: maximum wait timeout for background fetching +routine. Mostly defines how fast the system will see rebalance and +request new data for new partitions. Default: 200
  • api_version: specify which kafka API version to use. +:class:AIOKafkaConsumer supports Kafka API versions >=0.9 only. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more information see +:ref:ssl_auth. Default: None.
  • exclude_internal_topics: Whether records from internal topics +(such as offsets) should be exposed to the consumer. If set to True +the only way to receive records from an internal topic is +subscribing to it. Requires 0.10+ Default: True
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying None will +disable idle checks. Default: 540000 (9 minutes).
  • isolation_level: Controls how to read messages written +transactionally.

If set to read_committed, :meth:.getmany will only return +transactional messages which have been committed. +If set to read_uncommitted (the default), :meth:.getmany will +return all messages, even transactional messages which have been +aborted.

Non-transactional messages will be returned unconditionally in +either mode.

Messages will always be returned in offset order. Hence, in +read_committed mode, :meth:.getmany will only return +messages up to the last stable offset (LSO), which is the one less +than the offset of the first open transaction. In particular any +messages appearing after messages belonging to ongoing transactions +will be withheld until the relevant transaction has been completed. +As a result, read_committed consumers will not be able to read up +to the high watermark when there are in flight transactions. +Further, when in read_committed the seek_to_end method will +return the LSO. See method docs below. Default: read_uncommitted

  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: +PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: None
  • sasl_oauth_token_provider: OAuthBearer token provider instance. (See :mod:kafka.oauth.abstract). +Default: None

Returns:

  • : A function returning the same function

create_mocks

def create_mocks(self: fastkafka.FastKafka) -> None

Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock

produces

def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fa3e2864f70>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fa3e1879090>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]

Decorator registering the callback called when delivery report for a produced message is received

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the producer will send returned values from +the decorated function to, default: None- If the topic is not +specified, topic name will be inferred from the decorated function +name by stripping the defined prefix.
  • encoder: Encoder to use to encode messages before sending it to topic, +default: json - By default, it uses json encoder to convert +pydantic basemodel to json string and then encodes the string to bytes +using 'utf-8' encoding. It also accepts custom encoder function.
  • prefix: Prefix stripped from the decorated function to define a topic +name if the topic argument is not passed, default: "to_". If the +decorated function name is not prefixed with the defined prefix +and topic argument is not passed, then this method will throw ValueError
  • bootstrap_servers: a host[:port] string or list of +host[:port] strings that the producer should contact to +bootstrap initial cluster metadata. This does not have to be the +full node list. It just needs to have at least one broker that will +respond to a Metadata API Request. Default port is 9092. If no +servers are specified, will default to localhost:9092.
  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. +Default: aiokafka-producer-# (appended with a unique number +per instance)
  • key_serializer: used to convert user-supplied keys to bytes +If not :data:None, called as f(key), should return +:class:bytes. +Default: :data:None.
  • value_serializer: used to convert user-supplied message +values to :class:bytes. If not :data:None, called as +f(value), should return :class:bytes. +Default: :data:None.
  • acks: one of 0, 1, all. The number of acknowledgments +the producer requires the leader to have received before considering a +request complete. This controls the durability of records that are +sent. The following settings are common:
  • 0: Producer will not wait for any acknowledgment from the server +at all. The message will immediately be added to the socket +buffer and considered sent. No guarantee can be made that the +server has received the record in this case, and the retries +configuration will not take effect (as the client won't +generally know of any failures). The offset given back for each +record will always be set to -1.
  • 1: The broker leader will write the record to its local log but +will respond without awaiting full acknowledgement from all +followers. In this case should the leader fail immediately +after acknowledging the record but before the followers have +replicated it then the record will be lost.
  • all: The broker leader will wait for the full set of in-sync +replicas to acknowledge the record. This guarantees that the +record will not be lost as long as at least one in-sync replica +remains alive. This is the strongest available guarantee.

If unset, defaults to acks=1. If enable_idempotence is +:data:True defaults to acks=all

  • compression_type: The compression type for all data generated by +the producer. Valid values are gzip, snappy, lz4, zstd +or :data:None. +Compression is of full batches of data, so the efficacy of batching +will also impact the compression ratio (more batching means better +compression). Default: :data:None.
  • max_batch_size: Maximum size of buffered data per partition. +After this amount :meth:send coroutine will block until batch is +drained. +Default: 16384
  • linger_ms: The producer groups together any records that arrive +in between request transmissions into a single batched request. +Normally this occurs only under load when records arrive faster +than they can be sent out. However in some circumstances the client +may want to reduce the number of requests even under moderate load. +This setting accomplishes this by adding a small amount of +artificial delay; that is, if first request is processed faster, +than linger_ms, producer will wait linger_ms - process_time. +Default: 0 (i.e. no delay).
  • partitioner: Callable used to determine which partition +each message is assigned to. Called (after key serialization): +partitioner(key_bytes, all_partitions, available_partitions). +The default partitioner implementation hashes each non-None key +using the same murmur2 algorithm as the Java client so that +messages with the same key are assigned to the same partition. +When a key is :data:None, the message is delivered to a random partition +(filtered to partitions with available leaders only, if possible).
  • max_request_size: The maximum size of a request. This is also +effectively a cap on the maximum record size. Note that the server +has its own cap on record size which may be different from this. +This setting will limit the number of record batches the producer +will send in a single request to avoid sending huge requests. +Default: 1048576.
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • request_timeout_ms: Produce request timeout in milliseconds. +As it's sent as part of +:class:~kafka.protocol.produce.ProduceRequest (it's a blocking +call), maximum waiting time can be up to 2 * +request_timeout_ms. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • api_version: specify which kafka API version to use. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT. +Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more +information see :ref:ssl_auth. +Default: :data:None
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying :data:None will +disable idle checks. Default: 540000 (9 minutes).
  • enable_idempotence: When set to :data:True, the producer will +ensure that exactly one copy of each message is written in the +stream. If :data:False, producer retries due to broker failures, +etc., may write duplicates of the retried message in the stream. +Note that enabling idempotence acks to set to all. If it is not +explicitly set by the user it will be chosen. If incompatible +values are set, a :exc:ValueError will be thrown. +New in version 0.5.0.
  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values +are: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: :data:None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: :data:None
  • sasl_oauth_token_provider (: class:~aiokafka.abc.AbstractTokenProvider): +OAuthBearer token provider instance. (See +:mod:kafka.oauth.abstract). +Default: :data:None

Returns:

  • : A function returning the same function

Exceptions:

  • ValueError: when needed

run_in_background

def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]

Decorator to schedule a task to be run in the background.

This decorator is used to schedule a task to be run in the background when the app's _on_startup event is triggered.

Returns:

  • A decorator function that takes a background task as an input and stores it to be run in the backround.

using_local_kafka

def using_local_kafka(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> Tester

Starts local Kafka broker used by the Tester instance

Parameters:

  • data_dir: Path to the directory where the zookeepeer instance will save data
  • zookeeper_port: Port for clients (Kafka brokes) to connect
  • listener_port: Port on which the clients (producers and consumers) can connect

Returns:

  • An instance of tester with Kafka as broker

using_local_redpanda

def using_local_redpanda(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug') -> Tester

Starts local Redpanda broker used by the Tester instance

Parameters:

  • listener_port: Port on which the clients (producers and consumers) can connect
  • tag: Tag of Redpanda image to use to start container
  • seastar_core: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)
  • memory: The amount of memory to make available to Redpanda
  • mode: Mode to use to load configuration properties in container
  • default_log_level: Log levels to use for Redpanda

Returns:

  • An instance of tester with Redpanda as broker
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/cli/fastkafka/index.html b/docs/0.6.0/cli/fastkafka/index.html new file mode 100644 index 0000000..b9d70fb --- /dev/null +++ b/docs/0.6.0/cli/fastkafka/index.html @@ -0,0 +1,32 @@ + + + + + +fastkafka | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

fastkafka

Usage:

$ fastkafka [OPTIONS] COMMAND [ARGS]...

Options:

  • --install-completion: Install completion for the current shell.
  • --show-completion: Show completion for the current shell, to copy it or customize the installation.
  • --help: Show this message and exit.

Commands:

  • docs: Commands for managing fastkafka app...
  • run: Runs Fast Kafka API application
  • testing: Commands for managing fastkafka testing

fastkafka docs

Commands for managing fastkafka app documentation

Usage:

$ fastkafka docs [OPTIONS] COMMAND [ARGS]...

Options:

  • --help: Show this message and exit.

Commands:

  • generate: Generates documentation for a FastKafka...
  • install_deps: Installs dependencies for FastKafka...
  • serve: Generates and serves documentation for a...

fastkafka docs generate

Generates documentation for a FastKafka application

Usage:

$ fastkafka docs generate [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --root-path TEXT: root path under which documentation will be created [default: .]
  • --help: Show this message and exit.

fastkafka docs install_deps

Installs dependencies for FastKafka documentation generation

Usage:

$ fastkafka docs install_deps [OPTIONS]

Options:

  • --help: Show this message and exit.

fastkafka docs serve

Generates and serves documentation for a FastKafka application

Usage:

$ fastkafka docs serve [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --root-path TEXT: root path under which documentation will be created [default: .]
  • --bind TEXT: Some info [default: 127.0.0.1]
  • --port INTEGER: Some info [default: 8000]
  • --help: Show this message and exit.

fastkafka run

Runs Fast Kafka API application

Usage:

$ fastkafka run [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --num-workers INTEGER: Number of FastKafka instances to run, defaults to number of CPU cores. [default: 64]
  • --kafka-broker TEXT: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. [required]
  • --help: Show this message and exit.

fastkafka testing

Commands for managing fastkafka testing

Usage:

$ fastkafka testing [OPTIONS] COMMAND [ARGS]...

Options:

  • --help: Show this message and exit.

Commands:

  • install_deps: Installs dependencies for FastKafka app...

fastkafka testing install_deps

Installs dependencies for FastKafka app testing

Usage:

$ fastkafka testing install_deps [OPTIONS]

Options:

  • --help: Show this message and exit.
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/cli/run_fastkafka_server_process/index.html b/docs/0.6.0/cli/run_fastkafka_server_process/index.html new file mode 100644 index 0000000..933c31e --- /dev/null +++ b/docs/0.6.0/cli/run_fastkafka_server_process/index.html @@ -0,0 +1,32 @@ + + + + + +run_fastkafka_server_process | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

run_fastkafka_server_process

Usage:

$ run_fastkafka_server_process [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --kafka-broker TEXT: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. [required]
  • --install-completion: Install completion for the current shell.
  • --show-completion: Show completion for the current shell, to copy it or customize the installation.
  • --help: Show this message and exit.
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/guides/Guide_00_FastKafka_Demo/index.html b/docs/0.6.0/guides/Guide_00_FastKafka_Demo/index.html new file mode 100644 index 0000000..7746c6b --- /dev/null +++ b/docs/0.6.0/guides/Guide_00_FastKafka_Demo/index.html @@ -0,0 +1,122 @@ + + + + + +FastKafka tutorial | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

FastKafka tutorial

FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.

Install

FastKafka works on macOS, Linux, and most Unix-style operating systems. +You can install it with pip as usual:

pip install fastkafka
try:
import fastkafka
except:
! pip install fastkafka

Running in Colab

You can start this interactive tutorial in Google Colab by clicking the +button below:

Open In Colab

Writing server code

Here is an example python script using FastKafka that takes data from a +Kafka topic, makes a prediction using a predictive model, and outputs +the prediction to another Kafka topic.

Preparing the demo model

First we will prepare our model using the Iris dataset so that we can +demonstrate the preditions using FastKafka. The following call downloads +the dataset and trains the model.

We will wrap the model creation into a lifespan of our app so that the +model is created just before the app is started.

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines two message classes for use in a FastKafka +application:

  • The IrisInputData class is used to represent input data for a +predictive model. It has four fields of type +NonNegativeFloat, +which is a subclass of float that only allows non-negative floating +point values.

  • The IrisPrediction class is used to represent the output of the +predictive model. It has a single field species of type string +representing the predicted species.

These message classes will be used to parse and validate incoming data +in Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used for +generating the documentation only and it is not being checked by the +actual server.

Next, an object of the +FastKafka +class is initialized with the minimum set of arguments:

  • kafka_brokers: a dictionary used for generation of documentation
from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the IrisInputData message class. Specifying the +type of the single argument is instructing the Pydantic to use +IrisInputData.parse_raw() on the consumed message before passing it +to the user defined function on_input_data.

  • The @produces decorator is applied to the to_predictions function, +which specifies that this function should produce a message to the +“predictions" Kafka topic whenever it is called. The to_predictions +function takes a single integer argument species_class representing +one of three possible strign values predicted by the mdoel. It creates +a new IrisPrediction message using this value and then returns it. +The framework will call the IrisPrediction.json().encode("utf-8") +function on the returned value and produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Testing the service

The service can be tested using the +Tester +instances which internally starts Kafka broker and zookeeper.

Before running tests, we have to install Java runtime and Apache Kafka +locally. To simplify the process, we provide the following convenience +command:

fastkafka testing install_deps
[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
from fastkafka.testing import Tester

msg = IrisInputData(
sepal_length=0.1,
sepal_width=0.2,
petal_length=0.3,
petal_width=0.4,
)

# Start Tester app and create local Kafka broker for testing
async with Tester(kafka_app) as tester:
# Send IrisInputData message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with IrisPrediction in predictions topic
await tester.awaited_mocks.on_predictions.assert_awaited_with(
IrisPrediction(species="setosa"), timeout=2
)
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Recap

We have created a Iris classification model and encapulated it into our +fastkafka application. The app will consume the IrisInputData from the +input_data topic and produce the predictions to predictions topic.

To test the app we have:

  1. Created the app

  2. Started our Tester class which mirrors the developed app topics for +testing purpuoses

  3. Sent IrisInputData message to input_data topic

  4. Asserted and checked that the developed iris classification service +has reacted to IrisInputData message

Running the service

The service can be started using builtin faskafka run CLI command. +Before we can do that, we will concatenate the code snippets from above +and save them in a file "application.py"

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

To run the service, you will need a running Kafka broker on localhost as +specified in the kafka_brokers parameter above. We can start the Kafka +broker locally using the +ApacheKafkaBroker. +Notice that the same happens automatically in the +Tester +as shown above.

[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()
[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.

'127.0.0.1:9092'

Then, we start the FastKafka service by running the following command in +the folder where the application.py file is located:

fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app

In the above command, we use --num-workers option to specify how many +workers to launch and we use --kafka-broker option to specify which +kafka broker configuration to use from earlier specified kafka_brokers

[1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[1200654]: [ERROR] aiokafka: Unable to update metadata from [0]
[1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[1200656]: [ERROR] aiokafka: Unable to update metadata from [0]
^C
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...

You need to interupt running of the cell above by selecting +Runtime->Interupt execution on the toolbar above.

Finally, we can stop the local Kafka Broker:

[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.

Documentation

The kafka app comes with builtin documentation generation using +AsyncApi HTML generator.

When running in Colab, we need to update Node.js first:

We need to install all dependancies for the generator using the +following command line:

fastkafka docs install_deps
[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed

To generate the documentation programatically you just need to call the +folloving command:

fastkafka docs generate application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.

. This will generate the asyncapi folder in relative path where all +your documentation will be saved. You can check out the content of it +with:

ls -l asyncapi
total 8
drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs
drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec

In docs folder you will find the servable static html file of your +documentation. This can also be served using our fastkafka docs serve +CLI command (more on that in our guides).

In spec folder you will find a asyncapi.yml file containing the async +API specification of your application.

We can locally preview the generated documentation by running the +following command:

fastkafka docs serve application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.


Serving documentation on http://127.0.0.1:8000
^C
Interupting serving of documentation and cleaning up...

From the parameters passed to the application constructor, we get the +documentation bellow:

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
bootstrap_servers="localhost:9092",
)

Kafka_servers

The following documentation snippet are for the consumer as specified in +the code above:

Kafka_consumer

The following documentation snippet are for the producer as specified in +the code above:

Kafka_producer

Finally, all messages as defined as subclasses of BaseModel are +documented as well:

Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/guides/Guide_01_Intro/index.html b/docs/0.6.0/guides/Guide_01_Intro/index.html new file mode 100644 index 0000000..80556a8 --- /dev/null +++ b/docs/0.6.0/guides/Guide_01_Intro/index.html @@ -0,0 +1,51 @@ + + + + + +Intro | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

Intro

This tutorial will show you how to use FastKafkaAPI, step by +step.

The goal of FastKafkaAPI is to simplify the use of Apache Kafka in +Python inspired by FastAPI look and feel.

In this Intro tutorial we’ll go trough the basic requirements to run the +demos presented in future steps.

Installing FastKafkaAPI

First step is to install FastKafkaAPI

$ pip install fastkafka

Preparing a Kafka broker

Next step is to prepare the Kafka environment, our consumers and +producers will need some channel of communication.

!!! info "Hey, your first info!"

If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. 

To go through the tutorial, we recommend that you use dockerized Kafka +brokers, if you have Docker and docker-compose installed the setup +should take you no time (if we exclude the container download times).

!!! warning "Listen! This is important."

To be able to setup this configuration you need to have Docker and docker-compose installed

See here for more info on <a href = \"https://docs.docker.com/\" target=\"_blank\">Docker</a> and <a href = \"https://docs.docker.com/compose/install/\" target=\"_blank\">docker compose</a>

To setup the recommended environment, first, create a new folder wher +you want to save your demo files (e.g. fastkafka_demo). Inside the new +folder create a new YAML file named kafka_demo.yml and copy the +following configuration into it:

version: "3"
services:
zookeeper:
image: wurstmeister/zookeeper
hostname: zookeeper
container_name: zookeeper
networks:
- fastkafka-network
ports:
- "2181:2181"
- "22:22"
- "2888:2888"
- "3888:3888"
kafka:
image: wurstmeister/kafka
container_name: kafka
ports:
- "9093:9093"
environment:
HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d' ' -f 2"
KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093
KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093
KAFKA_INTER_BROKER_LISTENER_NAME: INTER
KAFKA_CREATE_TOPICS: "hello:1:1"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
- zookeeper
healthcheck:
test: [ "CMD", "kafka-topics.sh", "--list", "--zookeeper", "zookeeper:2181" ]
interval: 5s
timeout: 10s
retries: 5
networks:
- fastkafka-network
networks:
fastkafka-network:
name: "fastkafka-network"

This configuration will start a single instance of Zookeeper, single +instance of Kafka broker and create a ‘hello’ topic (quite enough for a +start). To start the configuration, run:

$ docker-compose -f kafka_demo.yaml up -d --wait

This will start the necessary containers and wait till they report that +they are Healthy. After the command finishes, you are good to go to try +out the FastKafkaAPI capabilities! 🎊

Running the code

After installing FastKafkaAPI and initialising the Kafka broker you can +proceed to the ‘First Steps’ part of the tutorial. There, you will write +your first Kafka client and producer apps, run them, and interact with +them.

You are highly encouraged to follow along the tutorials not just by +reading trough them but by implementing the code examples in your own +environment. This will not only help you remember the use cases better +but also, hopefully, demonstrate to you the ease of use of this library.

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/guides/Guide_02_First_Steps/index.html b/docs/0.6.0/guides/Guide_02_First_Steps/index.html new file mode 100644 index 0000000..f048761 --- /dev/null +++ b/docs/0.6.0/guides/Guide_02_First_Steps/index.html @@ -0,0 +1,49 @@ + + + + + +First Steps | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

First Steps

Creating a simple Kafka consumer app

For our first demo we will create the simplest possible Kafka consumer +and run it using ‘fastkafka run’ command.

The consumer will:

  1. Connect to the Kafka Broker we setup in the Intro guide

  2. Listen to the hello topic

  3. Write any message received from the hello topic to stdout

To create the consumer, first, create a file named

hello_kafka_consumer.py and copy the following code to it:

from os import environ

from fastkafka import FastKafka
from pydantic import BaseModel, Field

kafka_server_url = environ["KAFKA_HOSTNAME"]
kafka_server_port = environ["KAFKA_PORT"]

kafka_brokers = {
"localhost": {
"description": "local development kafka",
"url": kafka_server_url,
"port": kafka_server_port
}
}

class HelloKafkaMsg(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_app = FastKafka(
kafka_brokers=kafka_brokers
)

@kafka_app.consumes()
async def on_hello(msg: HelloKafkaMsg):
print(f"Got data, msg={msg.msg}", flush=True)

!!! info "Kafka configuration"

This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').

!!! warning "Remember to flush"

Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.

To run this consumer, in your terminal, run:

fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app

After running the command, you should see something similar to the ouput +below:

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})
[878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization
[878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.

[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.

Now you can interact with your consumer, by sending the messages to the +subscribed ‘hello’ topic, don’t worry, we will cover this in the next +step of this guide.

Sending first message to your consumer

After we have created and run our first consumer, we should send a +message to it, to make sure it is working properly.

If you are using the Kafka setup as described in the Intro guide, you +can follow the steps listed here to send a message to the hello topic.

First, connect to your running kafka broker by running:

docker run -it kafka /bin/bash

Then, when connected to the container, run:

kafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello

This will open an interactive connection to the hello topic, now you can +write your mesages to the topic and they will be consumed by our +consumer.

In the shell, type:

{"msg":"hello"}

and press enter. This will send a hello message to the topic which will +be read by our running consumer and outputed to stdout.

Check the output of your consumer (terminal where you ran the ‘fastkafka +run’ command) and confirm that your consumer has read the Kafka message. +You shoud see something like this:

Got data, msg=hello

Creating a hello Kafka producer

Consuming messages is only a part of this Library functionality, the +other big part is producing the messages. So, let’s create our first +kafka producer which will send it’s greetings to our consumer +periodically.

The producer will:

  1. Connect to the Kafka Broker we setup in the Intro guide
  2. Connect to the hello topic
  3. Periodically send a message to the hello world topic

To create the producer, first, create a file named

hello_kafka_producer.py and copy the following code to it:

from os import environ

import asyncio
from pydantic import BaseModel, Field

from fastkafka import FastKafka
from fastkafka._components.logger import get_logger

kafka_server_url = environ["KAFKA_HOSTNAME"]
kafka_server_port = environ["KAFKA_PORT"]

kafka_brokers = {
"localhost": {
"description": "local development kafka",
"url": kafka_server_url,
"port": kafka_server_port
}
}

class HelloKafkaMsg(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_app = FastKafka(
kafka_brokers=kafka_brokers
)

logger = get_logger(__name__)

@kafka_app.produces()
async def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:
logger.info(f"Producing: {msg}")
return msg

@kafka_app.run_in_background()
async def hello_every_second():
while(True):
await to_hello(HelloKafkaMsg(msg="hello"))
await asyncio.sleep(1)

!!! info "Kafka configuration"

This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').

To run this producer, in your terminal, run:

fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app

After running the command, you should see something similar to the ouput +below:

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.

[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.

Now, while the producer is running, it will send a HelloKafkaMsg every +second to the hello kafka topic. If your consumer is still running, you +should see the messages appear in its log.

Recap

In this guide we have:

  1. Created a simple Kafka consumer using FastKafka
  2. Sent a message to our consumer trough Kafka
  3. Created a simple Kafka producer using FastKafka
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/guides/Guide_03_Authentication/index.html b/docs/0.6.0/guides/Guide_03_Authentication/index.html new file mode 100644 index 0000000..27c2351 --- /dev/null +++ b/docs/0.6.0/guides/Guide_03_Authentication/index.html @@ -0,0 +1,37 @@ + + + + + +Authentication | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

Authentication

TLS Authentication

sasl_mechanism (str) – Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN, +GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN

sasl_plain_username (str) – username for SASL PLAIN authentication. +Default: None

sasl_plain_password (str) – password for SASL PLAIN authentication. +Default: None

sasl_oauth_token_provider (AbstractTokenProvider) – OAuthBearer token +provider instance. (See kafka.oauth.abstract). Default: None

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/guides/Guide_04_Github_Actions_Workflow/index.html b/docs/0.6.0/guides/Guide_04_Github_Actions_Workflow/index.html new file mode 100644 index 0000000..ea1e528 --- /dev/null +++ b/docs/0.6.0/guides/Guide_04_Github_Actions_Workflow/index.html @@ -0,0 +1,42 @@ + + + + + +Deploy FastKafka docs to GitHub Pages | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

Deploy FastKafka docs to GitHub Pages

Getting started

Add your workflow file .github/workflows/fastkafka_docs_deploy.yml and +push it to your remote default branch.

Here is an example workflow:

name: Deploy FastKafka Generated Documentation to GitHub Pages

on:
push:
branches: [ "main", "master" ]
workflow_dispatch:

jobs:
deploy:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: airtai/workflows/fastkafka-ghp@main
with:
app: "test_fastkafka.application:kafka_app"

Options

Set app location

Input in the form of path:app, where path is the path to a Python +file and app is an object of type +FastKafka:

- name: Deploy
uses: airtai/workflows/fastkafka-ghp@main
with:
app: "test_fastkafka.application:kafka_app"

In the above example, +FastKafka +app is named as kafka_app and it is available in the application +submodule of the test_fastkafka module.

Example Repository

A +FastKafka-based +library that uses the above-mentioned workfow actions to publish +FastKafka docs to Github Pages can be found +here.

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/guides/Guide_05_Lifespan_Handler/index.html b/docs/0.6.0/guides/Guide_05_Lifespan_Handler/index.html new file mode 100644 index 0000000..9f5f5f5 --- /dev/null +++ b/docs/0.6.0/guides/Guide_05_Lifespan_Handler/index.html @@ -0,0 +1,75 @@ + + + + + +Lifespan Events | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

Lifespan Events

Did you know that you can define some special code that runs before and +after your Kafka application? This code will be executed just once, but +it covers the whole lifespan of your app! 🚀

Lets break it down:

You can define logic (code) that should be executed before the +application starts up. This is like a warm-up for your app, getting it +ready to consume and produce messages.

Similarly, you can define logic (code) that should be executed when the +application is shutting down. This is like a cool-down for your app, +making sure everything is properly closed and cleaned up.

By executing code before consuming and after producing, you cover the +entire lifecycle of your application 🎉

This is super handy for setting up shared resources that are needed +across consumers and producers, like a database connection pool or a +machine learning model. And the best part? You can clean up these +resources when the app is shutting down!

So lets give it a try and see how it can make your Kafka app even more +awesome! 💪

Lifespan example - Iris prediction model

Let’s dive into an example to see how you can leverage the lifecycle +handler to solve a common use case. Imagine that you have some machine +learning models that need to consume incoming messages and produce +response/prediction messages. These models are shared among consumers +and producers, which means you don’t want to load them for every +message.

Here’s where the lifecycle handler comes to the rescue! By loading the +model before the messages are consumed and produced, but only right +before the application starts receiving messages, you can ensure that +the model is ready to use without compromising the performance of your +tests. In the upcoming sections, we’ll walk you through how to +initialize an Iris species prediction model and use it in your developed +application.

Lifespan

You can define this startup and shutdown logic using the lifespan +parameter of the FastKafka app, and an async context manager.

Let’s start with an example and then see it in detail.

We create an async function lifespan() with yield like this:

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
from contextlib import asynccontextmanager

from fastkafka import FastKafka

ml_models = {}

@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
print("Loading the model!")
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)
yield
# Clean up the ML models and release the resources

print("Exiting, clearing model dict!")
ml_models.clear()

The first thing to notice, is that we are defining an async function +with yield. This is very similar to Dependencies with yield.

The first part of the function, before the yield, will be executed +before the application starts. And the part after the yield will +be executed after the application has finished.

This lifespan will create an iris_prediction model on application +startup and cleanup the references after the app is shutdown.

The lifespan will be passed an KafkaApp reference on startup of your +application, which you can use to reference your application on startup.

For demonstration sake, we also added prints so that when running the +app we can see that our lifespan was called.

Async context manager

Context managers can be used in with blocks, our lifespan, for example +could be used like this:

ml_models = {}
async with lifespan(None):
print(ml_models)

When you create a context manager or an async context manager, what it +does is that, before entering the with block, it will execute the code +before the yield, and after exiting the with block, it will execute +the code after the yield.

If you want to learn more about context managers and contextlib +decorators, please visit Python official +docs

App demo

FastKafka app

Lets now create our application using the created lifespan handler.

Notice how we passed our lifespan handler to the app when constructing +it trough the lifespan argument.

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local development kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Data modeling

Lets model the Iris data for our app:

from pydantic import BaseModel, Field, NonNegativeFloat

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Consumers and producers

Lets create a consumer and producer for our app that will generate +predictions from input iris data.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Final app

The final app looks like this:

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
from contextlib import asynccontextmanager

from pydantic import BaseModel, Field, NonNegativeFloat

from fastkafka import FastKafka

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")
ml_models = {}

@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
print("Loading the model!")
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)
yield
# Clean up the ML models and release the resources

print("Exiting, clearing model dict!")
ml_models.clear()

kafka_brokers = {
"localhost": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local development kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Running the app

Now we can run the app with your custom lifespan handler. Copy the code +above in lifespan_example.py and run it by running

fastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app

When you run the app, you should see a simmilar output to the one below:

[262292]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[262292]: Loading the model!
[262292]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Entering...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting send_stream
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Finished.
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[262292]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[262292]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[262292]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[262292]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 262292...
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Entering...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Exiting send_stream
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Finished.
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Stoping producer...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Finished
[262292]: Exiting, clearing model dict!
[INFO] fastkafka._server: terminate_asyncio_process(): Process 262292 terminated.

Recap

In this guide we have defined a lifespan handler and passed to our +FastKafka app.

Some important points are:

  1. Lifespan handler is implemented as +AsyncContextManager
  2. Code before yield in lifespan will be executed before +application startup
  3. Code after yield in lifespan will be executed after +application shutdown
  4. You can pass your lifespan handler to FastKafka app on +initialisation by passing a lifespan argument
+ + + + \ No newline at end of file diff --git a/docs/0.6.0/guides/Guide_06_Benchmarking_FastKafka/index.html b/docs/0.6.0/guides/Guide_06_Benchmarking_FastKafka/index.html new file mode 100644 index 0000000..766c47c --- /dev/null +++ b/docs/0.6.0/guides/Guide_06_Benchmarking_FastKafka/index.html @@ -0,0 +1,80 @@ + + + + + +Benchmarking FastKafka app | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

Benchmarking FastKafka app

Prerequisites

To benchmark a +FastKafka +project, you will need the following:

  1. A library built with +FastKafka.
  2. A running Kafka instance to benchmark the FastKafka application +against.

Creating FastKafka Code

Let’s create a +FastKafka-based +application and write it to the application.py file based on the +tutorial.

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

FastKafka +has a decorator for benchmarking which is appropriately called as +benchmark. Let’s edit our application.py file and add the +benchmark decorator to the consumes method.

# content of the "application.py" file with benchmark

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
@kafka_app.benchmark(interval=1, sliding_window_size=5)
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Here we are conducting a benchmark of a function that consumes data from +the input_data topic with an interval of 1 second and a sliding window +size of 5.

This benchmark method uses the interval parameter to calculate the +results over a specific time period, and the sliding_window_size +parameter to determine the maximum number of results to use in +calculating the average throughput and standard deviation.

This benchmark is important to ensure that the function is performing +optimally and to identify any areas for improvement.

Starting Kafka

If you already have a Kafka running somewhere, then you can skip this +step.

Please keep in mind that your benchmarking results may be affected by +bottlenecks such as network, CPU cores in the Kafka machine, or even the +Kafka configuration itself.

Installing Java and Kafka

We need a working Kafkainstance to benchmark our +FastKafka +app, and to run Kafka we need Java. Thankfully, +FastKafka +comes with a CLI to install both Java and Kafka on our machine.

So, let’s install Java and Kafka by executing the following command.

fastkafka testing install_deps

The above command will extract Kafka scripts at the location +“\$HOME/.local/kafka_2.13-3.3.2" on your machine.

Creating configuration for Zookeeper and Kafka

Now we need to start Zookeeper and Kafka separately, and to start +them we need zookeeper.properties and kafka.properties files.

Let’s create a folder inside the folder where Kafka scripts were +extracted and change directory into it.

mkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir

Let’s create a file called zookeeper.properties and write the +following content to the file:

dataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper
clientPort=2181
maxClientCnxns=0

Similarly, let’s create a file called kafka.properties and write the +following content to the file:

broker.id=0
listeners=PLAINTEXT://:9092

num.network.threads=3
num.io.threads=8
socket.send.buffer.bytes=102400
socket.receive.buffer.bytes=102400
socket.request.max.bytes=104857600

num.partitions=1
num.recovery.threads.per.data.dir=1
offsets.topic.replication.factor=1
transaction.state.log.replication.factor=1
transaction.state.log.min.isr=1

log.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs
log.flush.interval.messages=10000
log.flush.interval.ms=1000
log.retention.hours=168
log.retention.bytes=1073741824
log.segment.bytes=1073741824
log.retention.check.interval.ms=300000

zookeeper.connect=localhost:2181
zookeeper.connection.timeout.ms=18000

Starting Zookeeper and Kafka

We need two different terminals to run Zookeeper in one and Kafka in +another. Let’s open a new terminal and run the following commands to +start Zookeeper:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./zookeeper-server-start.sh ../data_dir/zookeeper.properties

Once Zookeeper is up and running, open a new terminal and execute the +follwing commands to start Kafka:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./kafka-server-start.sh ../data_dir/kafka.properties

Now we have both Zookeeper and Kafka up and running.

Creating topics in Kafka

In a new terminal, please execute the following command to create +necessary topics in Kafka:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092
./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092

Populating topics with dummy data

To benchmark our +FastKafka +app, we need some data in Kafka topics.

In the same terminal, let’s create some dummy data:

yes '{"sepal_length": 0.7739560486, "sepal_width": 0.8636615789, "petal_length": 0.6122663046, "petal_width": 0.1338914722}' | head -n 1000000 > /tmp/test_data

This command will create a file called test_data in the tmp folder +with one million rows of text. This will act as dummy data to populate +the input_data topic.

Let’s populate the created topic input_data with the dummy data which +we created above:

./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data

Now our topic input_data has one million records/messages in it. If +you want more messages in topic, you can simply execute the above +command again and again.

Benchmarking FastKafka

Once Zookeeper and Kafka are ready, benchmarking +FastKafka +app is as simple as running the fastkafka run command:

fastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app

This command will start the +FastKafka +app and begin consuming messages from Kafka, which we spun up earlier. +Additionally, the same command will output all of the benchmark +throughputs based on the interval and sliding_window_size values.

The output for the fastkafka run command is:

[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh
ost:9092', 'max_poll_records': 100}
[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc
[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin
[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1
[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition
=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)

Based on the output, when using 1 worker, our +FastKafka +app achieved a throughput of 93k messages per second and an +average throughput of 93k messages per second.

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html b/docs/0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html new file mode 100644 index 0000000..c74cef4 --- /dev/null +++ b/docs/0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html @@ -0,0 +1,150 @@ + + + + + +Encoding and Decoding Kafka Messages with FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

Encoding and Decoding Kafka Messages with FastKafka

Prerequisites

  1. A basic knowledge of +FastKafka +is needed to proceed with this guide. If you are not familiar with +FastKafka, +please go through the tutorial first.
  2. FastKafka +with its dependencies installed is needed. Please install +FastKafka +using the command - pip install fastkafka

Ways to Encode and Decode Messages with FastKafka

In python, by default, we send Kafka messages as bytes. Even if our +message is a string, we convert it to bytes and then send it to Kafka +topic. imilarly, while consuming messages, we consume them as bytes and +then convert them to strings.

In FastKafka, we specify message schema using Pydantic models as +mentioned in tutorial:

# Define Pydantic models for Kafka messages
from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Then, we send and receive messages as instances of Pydantic models which +we defined. So, FastKafka needs a way to encode/decode to these Pydantic +model messages to bytes in order to send/receive messages to/from Kafka +topics.

The @consumes and @produces methods of FastKafka accept a parameter +called decoder/encoder to decode/encode Kafka messages. FastKafka +provides three ways to encode and decode messages:

  1. json - This is the default encoder/decoder option in FastKafka. +While producing, this option converts our instance of Pydantic model +messages to a JSON string and then converts it to bytes before +sending it to the topic. While consuming, it converts bytes to a +JSON string and then constructs an instance of Pydantic model from +the JSON string.
  2. avro - This option uses Avro encoding/decoding to convert instances +of Pydantic model messages to bytes while producing, and while +consuming, it constructs an instance of Pydantic model from bytes.
  3. custom encoder/decoder - If you are not happy with the json or avro +encoder/decoder options, you can write your own encoder/decoder +functions and use them to encode/decode Pydantic messages.

1. Json encoder and decoder

The default option in FastKafka is json encoder/decoder. This option, +while producing, converts our instance of pydantic model messages to +json string and then converts to bytes before sending it to the topics. +While consuming it converts bytes to json string and then constructs +instance of pydantic model from json string.

We can use the application from tutorial as +is, and it will use the json encoder/decoder by default. But, for +clarity, let’s modify it to explicitly accept the ‘json’ encoder/decoder +parameter:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder="json")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder="json")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

In the above code, the @kafka_app.consumes decorator sets up a +consumer for the “input_data" topic, using the ‘json’ decoder to convert +the message payload to an instance of IrisInputData. The +@kafka_app.produces decorator sets up a producer for the “predictions" +topic, using the ‘json’ encoder to convert the instance of +IrisPrediction to message payload.

2. Avro encoder and decoder

What is Avro?

Avro is a row-oriented remote procedure call and data serialization +framework developed within Apache’s Hadoop project. It uses JSON for +defining data types and protocols, and serializes data in a compact +binary format. To learn more about the Apache Avro, please check out the +docs.

Installing FastKafka with Avro dependencies

FastKafka +with dependencies for Apache Avro installed is needed to use avro +encoder/decoder. Please install +FastKafka +with Avro support using the command - pip install fastkafka[avro]

Defining Avro Schema Using Pydantic Models

By default, you can use Pydantic model to define your message schemas. +FastKafka internally takes care of encoding and decoding avro messages, +based on the Pydantic models.

So, similar to the tutorial, the message schema will +remain as it is.

# Define Pydantic models for Avro messages
from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

No need to change anything to support avro. You can use existing +Pydantic models as is.

Reusing existing avro schema

If you are using some other library to send and receive avro encoded +messages, it is highly likely that you already have an Avro schema +defined.

Building pydantic models from avro schema dictionary

Let’s modify the above example and let’s assume we have schemas already +for IrisInputData and IrisPrediction which will look like below:

iris_input_data_schema = {
"type": "record",
"namespace": "IrisInputData",
"name": "IrisInputData",
"fields": [
{"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},
{"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},
{"doc": "Petal length in cm", "type": "double", "name": "petal_length"},
{"doc": "Petal width in cm", "type": "double", "name": "petal_width"},
],
}
iris_prediction_schema = {
"type": "record",
"namespace": "IrisPrediction",
"name": "IrisPrediction",
"fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],
}

We can easily construct pydantic models from avro schema using +avsc_to_pydantic +function which is included as part of +FastKafka +itself.

from fastkafka.encoder import avsc_to_pydantic

IrisInputData = avsc_to_pydantic(iris_input_data_schema)
print(IrisInputData.__fields__)

IrisPrediction = avsc_to_pydantic(iris_prediction_schema)
print(IrisPrediction.__fields__)

The above code will convert avro schema to pydantic models and will +print pydantic models’ fields. The output of the above is:

{'sepal_length': ModelField(name='sepal_length', type=float, required=True),
'sepal_width': ModelField(name='sepal_width', type=float, required=True),
'petal_length': ModelField(name='petal_length', type=float, required=True),
'petal_width': ModelField(name='petal_width', type=float, required=True)}

{'species': ModelField(name='species', type=str, required=True)}

This is exactly same as manually defining the pydantic models ourselves. +You don’t have to worry about not making any mistakes while converting +avro schema to pydantic models manually. You can easily and +automatically accomplish it by using +avsc_to_pydantic +function as demonstrated above.

Building pydantic models from .avsc file

Not all cases will have avro schema conveniently defined as a python +dictionary. You may have it stored as the proprietary .avsc files in +filesystem. Let’s see how to convert those .avsc files to pydantic +models.

Let’s assume our avro files are stored in files called +iris_input_data_schema.avsc and iris_prediction_schema.avsc. In that +case, following code converts the schema to pydantic models:

import json
from fastkafka.encoder import avsc_to_pydantic


with open("iris_input_data_schema.avsc", "rb") as f:
iris_input_data_schema = json.load(f)

with open("iris_prediction_schema.avsc", "rb") as f:
iris_prediction_schema = json.load(f)


IrisInputData = avsc_to_pydantic(iris_input_data_schema)
print(IrisInputData.__fields__)

IrisPrediction = avsc_to_pydantic(iris_prediction_schema)
print(IrisPrediction.__fields__)

Consume/Produce avro messages with FastKafka

FastKafka +provides @consumes and @produces methods to consume/produces +messages to/from a Kafka topic. This is explained in +tutorial.

The @consumes and @produces methods accepts a parameter called +decoder/encoder to decode/encode avro messages.

@kafka_app.consumes(topic="input_data", encoder="avro")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", decoder="avro")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

In the above example, in @consumes and @produces methods, we +explicitly instruct FastKafka to decode and encode messages using +the avro decoder/encoder instead of the default json +decoder/encoder.

Assembling it all together

Let’s rewrite the sample code found in +tutorial to use avro to decode and +encode messages:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


iris_input_data_schema = {
"type": "record",
"namespace": "IrisInputData",
"name": "IrisInputData",
"fields": [
{"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},
{"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},
{"doc": "Petal length in cm", "type": "double", "name": "petal_length"},
{"doc": "Petal width in cm", "type": "double", "name": "petal_width"},
],
}
iris_prediction_schema = {
"type": "record",
"namespace": "IrisPrediction",
"name": "IrisPrediction",
"fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],
}
# Or load schema from avsc files

from fastkafka.encoder import avsc_to_pydantic

IrisInputData = avsc_to_pydantic(iris_input_data_schema)
IrisPrediction = avsc_to_pydantic(iris_prediction_schema)


from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder="avro")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder="avro")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

The above code is a sample implementation of using FastKafka to consume +and produce Avro-encoded messages from/to a Kafka topic. The code +defines two Avro schemas for the input data and the prediction result. +It then uses the +avsc_to_pydantic +function from the FastKafka library to convert the Avro schema into +Pydantic models, which will be used to decode and encode Avro messages.

The +FastKafka +class is then instantiated with the broker details, and two functions +decorated with @kafka_app.consumes and @kafka_app.produces are +defined to consume messages from the “input_data" topic and produce +messages to the “predictions" topic, respectively. The functions uses +the decoder=“avro" and encoder=“avro" parameters to decode and encode +the Avro messages.

In summary, the above code demonstrates a straightforward way to use +Avro-encoded messages with FastKafka to build a message processing +pipeline.

3. Custom encoder and decoder

If you are not happy with the json or avro encoder/decoder options, you +can write your own encoder/decoder functions and use them to +encode/decode Pydantic messages.

Writing a custom encoder and decoder

In this section, let’s see how to write a custom encoder and decoder +which obfuscates kafka message with simple +ROT13 cipher.

import codecs
import json
from typing import Any

from pydantic.main import ModelMetaclass


def custom_encoder(msg: BaseModel) -> bytes:
msg_str = msg.json()
obfuscated = codecs.encode(msg_str, 'rot13')
raw_bytes = obfuscated.encode("utf-8")
return raw_bytes

def custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:
obfuscated = raw_msg.decode("utf-8")
msg_str = codecs.decode(obfuscated, 'rot13')
msg_dict = json.loads(msg_str)
return cls(**msg_dict)

The above code defines two custom functions for encoding and decoding +messages in a Kafka application using the FastKafka library.

The encoding function, custom_encoder(), takes a message msg which +is an instance of a Pydantic model, converts it to a JSON string using +the json() method, obfuscates the resulting string using the ROT13 +algorithm from the codecs module, and finally encodes the obfuscated +string as raw bytes using the UTF-8 encoding.

The decoding function, custom_decoder(), takes a raw message raw_msg +in bytes format, a Pydantic class to construct instance with cls +parameter. It first decodes the raw message from UTF-8 encoding, then +uses the ROT13 algorithm to de-obfuscate the string. Finally, it loads +the resulting JSON string using the json.loads() method and returns a +new instance of the specified cls class initialized with the decoded +dictionary.

These functions can be used with FastKafka’s encoder and decoder +parameters to customize the serialization and deserialization of +messages in Kafka topics.

Let’s test the above code

i = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)

encoded = custom_encoder(i)
display(encoded)

decoded = custom_decoder(encoded, IrisInputData)
display(decoded)

This will result in following output

b'{"frcny_yratgu": 0.5, "frcny_jvqgu": 0.5, "crgny_yratgu": 0.5, "crgny_jvqgu": 0.5}'

IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)

Assembling it all together

Let’s rewrite the sample code found in +tutorial to use our custom decoder and +encoder functions:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")


import codecs
import json
from typing import Any

from pydantic.main import ModelMetaclass


def custom_encoder(msg: BaseModel) -> bytes:
msg_str = msg.json()
obfuscated = codecs.encode(msg_str, 'rot13')
raw_bytes = obfuscated.encode("utf-8")
return raw_bytes

def custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:
obfuscated = raw_msg.decode("utf-8")
msg_str = codecs.decode(obfuscated, 'rot13')
msg_dict = json.loads(msg_str)
return cls(**msg_dict)


from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder=custom_decoder)
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder=custom_encoder)
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

This code defines a custom encoder and decoder functions for encoding +and decoding messages sent through a Kafka messaging system.

The custom encoder function takes a message represented as a +BaseModel and encodes it as bytes by first converting it to a JSON +string and then obfuscating it using the ROT13 encoding. The obfuscated +message is then converted to bytes using UTF-8 encoding and returned.

The custom decoder function takes in the bytes representing an +obfuscated message, decodes it using UTF-8 encoding, then decodes the +ROT13 obfuscation, and finally loads it as a dictionary using the json +module. This dictionary is then converted to a BaseModel instance +using the cls parameter.

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/guides/Guide_11_Consumes_Basics/index.html b/docs/0.6.0/guides/Guide_11_Consumes_Basics/index.html new file mode 100644 index 0000000..69c4ad8 --- /dev/null +++ b/docs/0.6.0/guides/Guide_11_Consumes_Basics/index.html @@ -0,0 +1,88 @@ + + + + + +@consumes basics | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

@consumes basics

You can use @consumes decorator to consume messages from Kafka topics.

In this guide we will create a simple FastKafka app that will consume +HelloWorld messages from hello_world topic.

Import FastKafka

To use the @consumes decorator, first we need to import the base +FastKafka app to create our application.

from fastkafka import FastKafka

In this demo we will log the messages to the output so that we can +inspect and verify that our app is consuming properly. For that we need +to import the logger.

from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

Define the structure of the messages

Next, you need to define the structure of the messages you want to +consume from the topic using pydantic. For +the guide we’ll stick to something basic, but you are free to define any +complex message structure you wish in your project, just make sure it +can be JSON encoded.

Let’s import BaseModel and Field from pydantic and create a simple +HelloWorld class containing one string parameter msg

from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

Create a base FastKafka app

Now we will create and define a base FastKafka app, replace the +<url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values of your +Kafka bootstrap server



kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

Create a consumer function and decorate it with @consumes

Let’s create a consumer function that will consume HelloWorld messages +from hello_world topic and log them.

@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

The function decorated with the @consumes decorator will be called +when a message is produced to Kafka.

The message will then be injected into the typed msg argument of the +function and its type will be used to parse the message.

In this example case, when the message is sent into a hello_world +topic, it will be parsed into a HelloWorld class and on_hello_world +function will be called with the parsed class as msg argument value.

Final app

Your app code should look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)


kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)
@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Run the app

Now we can run the app. Copy the code above in consumer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app

After running the command, you should see this output in your terminal:

[513863]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[513863]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})
[513863]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[513863]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 513863...
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._server: terminate_asyncio_process(): Process 513863 terminated.

Send the message to kafka topic

Lets send a HelloWorld message to the hello_world topic and check if +our consumer kafka application has logged the received message. In your +terminal, run:

echo {\"msg\": \"Hello world\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>
print(consumer_task.value[1].decode("UTF-8"))

You should see the “Got msg: msg='Hello world'" being logged by your +consumer.

Choosing a topic

You probably noticed that you didn’t define which topic you are +receiving the message from, this is because the @consumes decorator +determines the topic by default from your function name. The decorator +will take your function name and strip the default “on_" prefix from it +and use the rest as the topic name. In this example case, the topic is +hello_world.

You can choose your custom prefix by defining the prefix parameter in +consumes decorator, like this:

Also, you can define the topic name completely by defining the topic +in parameter in consumes decorator, like this:

Message data

The message received from kafka is translated from binary JSON +representation int the class defined by typing of msg parameter in the +function decorated by the @consumes decorator.

In this example case, the message will be parsed into a HelloWorld +class.

Message metadata

If you need any of Kafka message metadata such as timestamp, partition +or headers you can access the metadata by adding a EventMetadata typed +argument to your consumes function and the metadata from the incoming +message will be automatically injected when calling the consumes +function.

Let’s demonstrate that.

Create a consumer function with metadata

The only difference from the original basic consume function is that we +are now passing the meta: EventMetadata argument to the function. The +@consumes decorator will register that and, when a message is +consumed, it will also pass the metadata to your function. Now you can +use the metadata in your consume function. Lets log it to see what it +contains.

First, we need to import the EventMetadata

Now we can add the meta argument to our consuming function.

Your final app should look like this:

Now lets run the app and send a message to the broker to see the logged +message metadata.

You should see a similar log as the one below and the metadata being +logged in your app.

As you can see in the log, from the metadata you now have the +information about the partition, offset, timestamp, key and headers. +🎉

Dealing with high latency consuming functions

If your functions have high latency due to, for example, lengthy +database calls you will notice a big decrease in performance. This is +due to the issue of how the consumes decorator executes your consume +functions when consumeing events. By default, the consume function will +run the consuming funtions for one topic sequentially, this is the most +straightforward approach and results with the least amount of overhead.

But, to handle those high latency tasks and run them in parallel, +FastKafka has a +DynamicTaskExecutor +prepared for your consumers. This executor comes with additional +overhead, so use it only when you need to handle high latency functions.

Lets demonstrate how to use it.

decorate_consumes_executor = """@app.consumes(executor="DynamicTaskExecutor")
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")
"""
md(f"```python\n{decorate_consumes}\n```")
@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Lets send a HelloWorld message to the hello_world topic and check if +our consumer kafka application has logged the received message. In your +terminal, run:

echo {\"msg\": \"Hello world\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>
print(consumer_task.value[1].decode("UTF-8"))
[6814]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:50361'
[6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:50361', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[6814]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})
[6814]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}
[6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[6814]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}.
[6814]: ConsumerRecord(topic='hello_world', partition=0, offset=0, timestamp=1683803949271, timestamp_type=0, key=None, value=b'{"msg": "Hello world"}', checksum=None, serialized_key_size=-1, serialized_value_size=22, headers=())
[6814]: [INFO] consumer_example: Got msg: msg='Hello world'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 6814...
[6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[6814]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._server: terminate_asyncio_process(): Process 6814 terminated.

You should see the “Got msg: msg='Hello world'" being logged by your +consumer.

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/guides/Guide_21_Produces_Basics/index.html b/docs/0.6.0/guides/Guide_21_Produces_Basics/index.html new file mode 100644 index 0000000..5c27257 --- /dev/null +++ b/docs/0.6.0/guides/Guide_21_Produces_Basics/index.html @@ -0,0 +1,62 @@ + + + + + +@produces basics | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

@produces basics

You can use @produces decorator to produce messages to Kafka topics.

In this guide we will create a simple FastKafka app that will produce +hello world messages to hello_world topic.

Import FastKafka

To use the @produces decorator, frist we need to import the base +FastKafka app to create our application.

from fastkafka import FastKafka

Define the structure of the messages

Next, you need to define the structure of the messages you want to send +to the topic using pydantic. For the guide +we’ll stick to something basic, but you are free to define any complex +message structure you wish in your project, just make sure it can be +JSON encoded.

Let’s import BaseModel and Field from pydantic and create a simple +HelloWorld class containing one string parameter msg

from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

Create a base FastKafka app

Now we will create and define a base FastKafka app, replace the +<url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values of your +Kafka bootstrap server



kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

Create a producer function and decorate it with @produces

Let’s create a producer function that will produce HelloWorld messages +to hello_world topic:


@app.produces()
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Now you can call your defined function as any normal python function in +your code. The side effect of calling the function will be that the +value you are returning will also be sent to a kafka topic.

By default, the topic is determined from your function name, the “to_" +prefix is stripped and what is left over is used as a topic name. I this +case, that is hello_world.

Instruct the app to start sending HelloWorld messages

Let’s use @run_in_background decorator to instruct our app to send +HelloWorld messages to hello_world topic every second.


import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Final app

Your app code should look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)


kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.produces()
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Run the app

script_file = "producer_example.py"
cmd = "fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app"
md(
f"Now we can run the app. Copy the code above in producer_example.py and run it by running\n```shell\n{cmd}\n```"
)

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app

After running the command, you should see this output in your terminal:

[84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization
[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.

Check if the message was sent to the Kafka topic

Lets check the topic and see if there is a “Hello world!" message in the +hello_world topic. In your terminal run:

kafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>

You should see the {“msg": “Hello world!"} messages in your topic.

Choosing a topic

You probably noticed that you didn’t define which topic you are sending +the message to, this is because the @produces decorator determines the +topic by default from your function name. The decorator will take your +function name and strip the default “to_" prefix from it and use the +rest as the topic name. In this example case, the topic is +hello_world.

!!! warn "New topics"

Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.

You can choose your custom prefix by defining the prefix parameter in +produces decorator, like this:


@app.produces(prefix="send_to_")
async def send_to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Also, you can define the topic name completely by defining the topic +in parameter in produces decorator, like this:


@app.produces(topic="my_special_topic")
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Message data

The return value from your function will be translated JSON string and +then to bytes and sent to defined Kafka topic. The typing of the return +value is used for generating the documentation for your Kafka app.

In this example case, the return value is HelloWorld class which will be +translated into JSON formatted string and then to bytes. The translated +data will then be sent to Kafka. In the from of: +b'{"msg": "Hello world!"}'

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/guides/Guide_22_Partition_Keys/index.html b/docs/0.6.0/guides/Guide_22_Partition_Keys/index.html new file mode 100644 index 0000000..5ff72d9 --- /dev/null +++ b/docs/0.6.0/guides/Guide_22_Partition_Keys/index.html @@ -0,0 +1,55 @@ + + + + + +Defining a partition key | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

Defining a partition key

Partition keys are used in Apache Kafka to determine which partition a +message should be written to. This ensures that related messages are +kept together in the same partition, which can be useful for ensuring +order or for grouping related messages together for efficient +processing. Additionally, partitioning data across multiple partitions +allows Kafka to distribute load across multiple brokers and scale +horizontally, while replicating data across multiple brokers provides +fault tolerance.

You can define your partition keys when using the @produces decorator, +this guide will demonstrate to you this feature.

Return a key from the producing function

To define a key for the message that you want to produce to Kafka topic, +you need to wrap the response into +KafkaEvent +class and set the key value. Check the example below:


from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:
return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")

In the example, we want to return the HelloWorld message class with +the key defined as my_key. So, we wrap the message and key into a +KafkaEvent class and return it as such.

While generating the documentation, the +KafkaEvent +class will be unwrapped and the HelloWorld class will be documented in +the definition of message type, same way if you didn’t use the key.

!!! info "Which key to choose?"

Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.

App example

We will modify the app example from @producer basics guide to return +the HelloWorld with our key. The final app will look like this (make +sure you replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:
return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")

import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Run the app

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app

After running the command, you should see this output in your terminal:

[347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.

Check if the message was sent to the Kafka topic with the desired key

Lets check the topic and see if there is a “Hello world!" message in the +hello_world topic with the defined key. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the my_key {“msg": “Hello world!"} messages in your +topic appearing, the my_key part of the message is the key that we +defined in our producing function.

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/guides/Guide_23_Batch_Producing/index.html b/docs/0.6.0/guides/Guide_23_Batch_Producing/index.html new file mode 100644 index 0000000..80086df --- /dev/null +++ b/docs/0.6.0/guides/Guide_23_Batch_Producing/index.html @@ -0,0 +1,55 @@ + + + + + +Batch producing | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

Batch producing

If you want to send your data in batches @produces decorator makes +that possible for you. By returning a list of messages you want to +send in a batch the producer will collect the messages and send them in +a batch to a Kafka broker.

This guide will demonstrate how to use this feature.

Return a batch from the producing function

To define a batch that you want to produce to Kafka topic, you need to +return the List of the messages that you want to be batched from your +producing function.


from typing import List

@app.produces()
async def to_hello_world(msgs: List[str]) -> List[HelloWorld]:
return [HelloWorld(msg=msg) for msg in msgs]

In the example, we want to return the HelloWorld message class batch +that is created from a list of msgs we passed into our producing +function.

Lets also prepare a backgound task that will send a batch of “hello +world" messages when the app starts.


@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

App example

We will modify the app example from @producer +basics guide to return the +HelloWorld batch. The final app will look like this (make sure you +replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


import asyncio
from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

from typing import List

@app.produces()
async def to_hello_world(msgs: List[str]) -> List[HelloWorld]:
return [HelloWorld(msg=msg) for msg in msgs]

Run the app

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app

After running the command, you should see this output in your terminal:

[46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task
[46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.

Check if the batch was sent to the Kafka topic with the defined key

Lets check the topic and see if there are “Hello world" messages in the +hello_world topic. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the batch of messages in your topic.

Batch key

To define a key for your batch like in Defining a partition +key guide you can wrap the +returning value in a +KafkaEvent +class. To learn more about defining a partition ke and +KafkaEvent +class, please, have a look at Defining a partition +key guide.

Let’s demonstrate that.

To define a key, we just need to modify our producing function, like +this:


from typing import List
from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:
return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")

Now our app looks like this:


import asyncio
from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

from typing import List
from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:
return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")

Check if the batch was sent to the Kafka topic

Lets check the topic and see if there are “Hello world" messages in the +hello_world topic, containing a defined key. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the batch of messages with the defined key in your topic.

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html b/docs/0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html new file mode 100644 index 0000000..62d61b0 --- /dev/null +++ b/docs/0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html @@ -0,0 +1,73 @@ + + + + + +Deploying FastKafka using Docker | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

Deploying FastKafka using Docker

Building a Docker Image

To build a Docker image for a FastKafka project, we need the following +items:

  1. A library that is built using FastKafka.
  2. A file in which the requirements are specified. This could be a +requirements.txt file, a setup.py file, or even a wheel file.
  3. A Dockerfile to build an image that will include the two files +mentioned above.

Creating FastKafka Code

Let’s create a +FastKafka-based +application and write it to the application.py file based on the +tutorial.

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Creating requirements.txt file

The above code only requires fastkafka. So, we will add only +fastkafka to the requirements.txt file, but you can add additional +requirements to it as well.

fastkafka>=0.3.0

Here we are using requirements.txt to store the project’s +dependencies. However, other methods like setup.py, pipenv, and +wheel files can also be used. setup.py is commonly used for +packaging and distributing Python modules, while pipenv is a tool used +for managing virtual environments and package dependencies. wheel +files are built distributions of Python packages that can be installed +with pip.

Creating Dockerfile

# (1)
FROM python:3.9-slim-bullseye
# (2)
WORKDIR /project
# (3)
COPY application.py requirements.txt /project/
# (4)
RUN pip install --no-cache-dir --upgrade -r /project/requirements.txt
# (5)
CMD ["fastkafka", "run", "--num-workers", "2", "--kafka-broker", "production", "application:kafka_app"]
  1. Start from the official Python base image.

  2. Set the current working directory to /project.

    This is where we’ll put the requirements.txt file and the +application.py file.

  3. Copy the application.py file and requirements.txt file inside +the /project directory.

  4. Install the package dependencies in the requirements file.

    The --no-cache-dir option tells pip to not save the downloaded +packages locally, as that is only if pip was going to be run again +to install the same packages, but that’s not the case when working +with containers.

    The --upgrade option tells pip to upgrade the packages if they +are already installed.

  5. Set the command to run the fastkafka run command.

    CMD takes a list of strings, each of these strings is what you +would type in the command line separated by spaces.

    This command will be run from the current working directory, the +same /project directory you set above with WORKDIR /project.

    We supply additional parameters --num-workers and --kafka-broker +for the run command. Finally, we specify the location of our +fastkafka application location as a command argument.

    To learn more about fastkafka run command please check the CLI +docs.

Build the Docker Image

Now that all the files are in place, let’s build the container image.

  1. Go to the project directory (where your Dockerfile is, containing +your application.py file).

  2. Run the following command to build the image:

    docker build -t fastkafka_project_image .

    This command will create a docker image with the name +fastkafka_project_image and the latest tag.

That’s it! You have now built a docker image for your FastKafka project.

Start the Docker Container

Run a container based on the built image:

docker run -d --name fastkafka_project_container fastkafka_project_image

Additional Security

Trivy is an open-source tool that scans Docker images for +vulnerabilities. It can be integrated into your CI/CD pipeline to ensure +that your images are secure and free from known vulnerabilities. Here’s +how you can use trivy to scan your fastkafka_project_image:

  1. Install trivy on your local machine by following the instructions +provided in the official trivy +documentation.

  2. Run the following command to scan your fastkafka_project_image:

    trivy image fastkafka_project_image

    This command will scan your fastkafka_project_image for any +vulnerabilities and provide you with a report of its findings.

  3. Fix any vulnerabilities identified by trivy. You can do this by +updating the vulnerable package to a more secure version or by using +a different package altogether.

  4. Rebuild your fastkafka_project_image and repeat steps 2 and 3 +until trivy reports no vulnerabilities.

By using trivy to scan your Docker images, you can ensure that your +containers are secure and free from known vulnerabilities.

Example repo

A +FastKafka +based library which uses above mentioned Dockerfile to build a docker +image can be found +here

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html b/docs/0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html new file mode 100644 index 0000000..0f974e2 --- /dev/null +++ b/docs/0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html @@ -0,0 +1,143 @@ + + + + + +Using Redpanda to test FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

Using Redpanda to test FastKafka

What is FastKafka?

FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.

What is Redpanda?

Redpanda is a drop-in replacement for Kafka. Most of the Kafka tools +work out of the box with Redpanda.

From redpanda.com:

Redpanda is a Kafka®-compatible streaming data platform that is proven +to be 10x faster and 6x lower in total costs. It is also JVM-free, +ZooKeeper®-free, Jepsen-tested and source available.

Some of the advantages of Redpanda over Kafka are

  1. A single binary with built-in everything, no ZooKeeper® or JVM +needed.
  2. Costs upto 6X less than Kafka.
  3. Up to 10x lower average latencies and up to 6x faster Kafka +transactions without compromising correctness.

To learn more about Redpanda, please visit their +website or checkout this blog +post +comparing Redpanda and Kafka’s performance benchmarks.

Example repo

A sample fastkafka-based library that uses Redpanda for testing, based +on this guide, can be found +here.

The process

Here are the steps we’ll be walking through to build our example:

  1. Set up the prerequisites.
  2. Clone the example repo.
  3. Explain how to write an application using FastKafka.
  4. Explain how to write a test case to test FastKafka with Redpanda.
  5. Run the test case and produce/consume messages.

1. Prerequisites

Before starting, make sure you have the following prerequisites set up:

  1. Python 3.x: A Python 3.x installation is required to run +FastKafka. You can download the latest version of Python from the +official website. You’ll also +need to have pip installed and updated, which is Python’s package +installer.
  2. Docker Desktop: Docker is used to run Redpanda, which is +required for testing FastKafka. You can download and install Docker +Desktop from the official +website.
  3. Git: You’ll need to have Git installed to clone the example +repo. You can download Git from the official +website.

2. Cloning and setting up the example repo

To get started with the example code, clone the GitHub +repository by +running the following command in your terminal:

git clone https://github.com/airtai/sample_fastkafka_with_redpanda.git
cd sample_fastkafka_with_redpanda

This will create a new directory called sample_fastkafka_with_redpanda +and download all the necessary files.

Create a virtual environment

Before writing any code, let’s create a new virtual +environment +for our project.

A virtual environment is an isolated environment for a Python project, +which allows you to manage project-specific dependencies and avoid +conflicts between different projects.

To create a new virtual environment, run the following commands in your +terminal:

python3 -m venv venv

This will create a new directory called venv in your project +directory, which will contain the virtual environment.

To activate the virtual environment, run the following command:

source venv/bin/activate

This will change your shell’s prompt to indicate that you are now +working inside the virtual environment.

Finally, run the following command to upgrade pip, the Python package +installer:

pip install --upgrade pip

Install Python dependencies

Next, let’s install the required Python dependencies. In this guide, +we’ll be using +FastKafka +to write our application code and pytest and pytest-asyncio to test +it.

You can install the dependencies from the requirements.txt file +provided in the cloned repository by running:

pip install -r requirements.txt

This will install all the required packages and their dependencies.

3. Writing server code

The application.py file in the cloned repository demonstrates how to +use FastKafka to consume messages from a Kafka topic, make predictions +using a predictive model, and publish the predictions to another Kafka +topic. Here is an explanation of the code:

Preparing the demo model

First we will prepare our model using the Iris dataset so that we can +demonstrate the predictions using FastKafka. The following call +downloads the dataset and trains the model.

We will wrap the model creation into a lifespan of our app so that the +model is created just before the app is started.

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines two message classes for use in a FastKafka +application:

  • The IrisInputData class is used to represent input data for a +predictive model. It has four fields of type +NonNegativeFloat, +which is a subclass of float that only allows non-negative floating +point values.

  • The IrisPrediction class is used to represent the output of the +predictive model. It has a single field species of type string +representing the predicted species.

These message classes will be used to parse and validate incoming data +in Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used both +to generate documentation and to later run the server against one of the +given kafka broker.

Next, an instance of the +FastKafka +class is initialized with the minimum required arguments:

  • kafka_brokers: a dictionary used for generating documentation
from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the IrisInputData message class. Specifying the +type of the single argument is instructing the Pydantic to use +IrisInputData.parse_raw() on the consumed message before passing it +to the user defined function on_input_data.

  • The @produces decorator is applied to the to_predictions function, +which specifies that this function should produce a message to the +“predictions" Kafka topic whenever it is called. The to_predictions +function takes a single integer argument species_class representing +one of three possible strign values predicted by the mdoel. It creates +a new IrisPrediction message using this value and then returns it. +The framework will call the IrisPrediction.json().encode("utf-8") +function on the returned value and produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

4. Writing the test code

The service can be tested using the +Tester +instance which can be configured to start a Redpanda +broker for testing +purposes. The test.py file in the cloned repository contains the +following code for testing.

import pytest
from application import IrisInputData, IrisPrediction, kafka_app

from fastkafka.testing import Tester

msg = IrisInputData(
sepal_length=0.1,
sepal_width=0.2,
petal_length=0.3,
petal_width=0.4,
)


@pytest.mark.asyncio
async def test():
# Start Tester app and create local Redpanda broker for testing
async with Tester(kafka_app).using_local_redpanda(
tag="v23.1.2", listener_port=9092
) as tester:
# Send IrisInputData message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with IrisPrediction in predictions topic
await tester.awaited_mocks.on_predictions.assert_awaited_with(
IrisPrediction(species="setosa"), timeout=2
)

The +Tester +module utilizes uses +LocalRedpandaBroker +to start and stop a Redpanda broker for testing purposes using Docker

5. Running the tests

We can run the tests which is in test.py file by executing the +following command:

pytest test.py

This will start a Redpanda broker using Docker and executes tests. The +output of the command is:

(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest
============================== test session starts ===============================
platform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0
rootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py
plugins: asyncio-0.21.0, anyio-3.6.2
asyncio: mode=strict
collected 1 item

test.py . [100%]

=============================== 1 passed in 7.28s ================================
(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$

Running the tests with the Redpanda broker ensures that your code is +working correctly with a real Kafka-like message broker, making your +tests more reliable.

Recap

We have created an Iris classification model and encapulated it into our +FastKafka +application. The app will consume the IrisInputData from the +input_data topic and produce the predictions to predictions topic.

To test the app we have:

  1. Created the app

  2. Started our +Tester +class with Redpanda broker which mirrors the developed app topics +for testing purposes

  3. Sent IrisInputData message to input_data topic

  4. Asserted and checked that the developed iris classification service +has reacted to IrisInputData message

+ + + + \ No newline at end of file diff --git a/docs/0.6.0/index.html b/docs/0.6.0/index.html new file mode 100644 index 0000000..6fb6894 --- /dev/null +++ b/docs/0.6.0/index.html @@ -0,0 +1,139 @@ + + + + + +FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.6.0

FastKafka

Effortless Kafka integration for your web services

PyPI PyPI -
+Downloads PyPI - Python
+Version

GitHub Workflow
+Status +CodeQL +Dependency
+Review

GitHub


FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.


⭐⭐⭐ Stay in touch ⭐⭐⭐

Please show your support and stay in touch by:

Your support helps us to stay in touch with you and encourages us to +continue developing and improving the library. Thank you for your +support!


🐝🐝🐝 We were busy lately 🐝🐝🐝

Activity

Install

FastKafka works on macOS, Linux, and most Unix-style operating systems. +You can install base version of fastkafka with pip as usual:

pip install fastkafka

To install fastkafka with testing features please use:

pip install fastkafka[test]

To install fastkafka with asyncapi docs please use:

pip install fastkafka[docs]

To install fastkafka with all the features please use:

pip install fastkafka[test,docs]

Tutorial

You can start an interactive tutorial in Google Colab by clicking the +button below:

Open In Colab

Writing server code

Here is an example python script using FastKafka that takes data from a +Kafka topic, makes a prediction using a predictive model, and outputs +the prediction to another Kafka topic.

Preparing the demo model

First we will prepare our model using the Iris dataset so that we can +demonstrate the predictions using FastKafka. The following call +downloads the dataset and trains the model.

We will wrap the model creation into a lifespan of our app so that the +model is created just before the app is started.

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines two message classes for use in a FastKafka +application:

  • The IrisInputData class is used to represent input data for a +predictive model. It has four fields of type +NonNegativeFloat, +which is a subclass of float that only allows non-negative floating +point values.

  • The IrisPrediction class is used to represent the output of the +predictive model. It has a single field species of type string +representing the predicted species.

These message classes will be used to parse and validate incoming data +in Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used for +both generating the documentation and later to run the actual server +against one of the given kafka broker.

Next, an object of the +FastKafka +class is initialized with the minimum set of arguments:

  • kafka_brokers: a dictionary used for generation of documentation
from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the IrisInputData message class. Specifying the +type of the single argument is instructing the Pydantic to use +IrisInputData.parse_raw() on the consumed message before passing it +to the user defined function on_input_data.

  • The @produces decorator is applied to the to_predictions function, +which specifies that this function should produce a message to the +“predictions" Kafka topic whenever it is called. The to_predictions +function takes a single integer argument species_class representing +one of three possible strign values predicted by the mdoel. It creates +a new IrisPrediction message using this value and then returns it. +The framework will call the IrisPrediction.json().encode("utf-8") +function on the returned value and produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Testing the service

The service can be tested using the +Tester +instances which internally starts InMemory implementation of Kafka +broker.

The Tester will redirect your consumes and produces decorated functions +to the InMemory Kafka broker so that you can quickly test your app +without the need for a running Kafka broker and all its dependencies.

from fastkafka.testing import Tester

msg = IrisInputData(
sepal_length=0.1,
sepal_width=0.2,
petal_length=0.3,
petal_width=0.4,
)

# Start Tester app and create InMemory Kafka broker for testing
async with Tester(kafka_app) as tester:
# Send IrisInputData message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with IrisPrediction in predictions topic
await tester.awaited_mocks.on_predictions.assert_awaited_with(
IrisPrediction(species="setosa"), timeout=2
)
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Recap

We have created a Iris classification model and encapulated it into our +fastkafka application. The app will consume the IrisInputData from the +input_data topic and produce the predictions to predictions topic.

To test the app we have:

  1. Created the app

  2. Started our Tester class which mirrors the developed app topics for +testing purposes

  3. Sent IrisInputData message to input_data topic

  4. Asserted and checked that the developed iris classification service +has reacted to IrisInputData message

Running the service

The service can be started using builtin faskafka run CLI command. +Before we can do that, we will concatenate the code snippets from above +and save them in a file "application.py"

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

To run the service, you will need a running Kafka broker on localhost as +specified in the kafka_brokers parameter above. We can start the Kafka +broker locally using the +ApacheKafkaBroker.

To use +ApacheKafkaBroker, +you need to install JRE and Kafka to your environment. To simplify this +process, fastkafka comes with a CLI command that does just that, to run +it, in your terminal execute the following:

fastkafka testing install_deps

Now we can run +ApacheKafkaBroker +that will start a Kafka broker instance for us.

from fastkafka.testing import ApacheKafkaBroker

broker = ApacheKafkaBroker(apply_nest_asyncio=True)

broker.start()
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()
[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.

'127.0.0.1:9092'

Then, we start the FastKafka service by running the following command in +the folder where the application.py file is located:

fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app

In the above command, we use --num-workers option to specify how many +workers to launch and we use --kafka-broker option to specify which +kafka broker configuration to use from earlier specified kafka_brokers

[801767]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[801765]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[801767]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[801765]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[801767]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[801767]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[801765]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[801765]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[801765]: [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata
[801765]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[801767]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[801767]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[801767]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[801765]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[801767]: [ERROR] aiokafka: Unable to update metadata from [0]
[801765]: [ERROR] aiokafka: Unable to update metadata from [0]
^C
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 801765...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 801767...
[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[801765]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[801767]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.

You need to interupt running of the cell above by selecting +Runtime->Interupt execution on the toolbar above.

Finally, we can stop the local Kafka Broker:

broker.stop()
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 801303...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 801303 was already terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 800930...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 800930 was already terminated.
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.

Documentation

The kafka app comes with builtin documentation generation using +AsyncApi HTML generator.

AsyncApi requires Node.js to be installed and we provide the following +convenience command line for it:

fastkafka docs install_deps
[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed

To generate the documentation programatically you just need to call the +folloving command:

fastkafka docs generate application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/asyncapi/docs.

. This will generate the asyncapi folder in relative path where all +your documentation will be saved. You can check out the content of it +with:

ls -l asyncapi
total 8
drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 09:14 docs
drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 09:14 spec

In docs folder you will find the servable static html file of your +documentation. This can also be served using our fastkafka docs serve +CLI command (more on that in our guides).

In spec folder you will find a asyncapi.yml file containing the async +API specification of your application.

We can locally preview the generated documentation by running the +following command:

fastkafka docs serve application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/asyncapi/docs.


Serving documentation on http://127.0.0.1:8000
^C
Interupting serving of documentation and cleaning up...

From the parameters passed to the application constructor, we get the +documentation bellow:

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
)

Kafka_servers

The following documentation snippet are for the consumer as specified in +the code above:

Kafka_consumer

The following documentation snippet are for the producer as specified in +the code above:

Kafka_producer

Finally, all messages as defined as subclasses of BaseModel are +documented as well:

Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)

License

FastKafka is licensed under the Apache License 2.0

A permissive license whose main conditions require preservation of +copyright and license notices. Contributors provide an express grant of +patent rights. Licensed works, modifications, and larger works may be +distributed under different terms and without source code.

The full text of the license can be found +here.

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/CHANGELOG/index.html b/docs/0.7.0/CHANGELOG/index.html new file mode 100644 index 0000000..15009b7 --- /dev/null +++ b/docs/0.7.0/CHANGELOG/index.html @@ -0,0 +1,33 @@ + + + + + +Release notes | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Release notes

0.7.0

New Features

  • Optional description argument to consumes and produces decorator implemented (#338), thanks to @Sternakt

    • Consumes and produces decorators now have optional description argument that is used instead of function docstring in async doc generation when specified
  • FastKafka Windows OS support enabled (#326), thanks to @kumaranvpl

    • FastKafka can now run on Windows
  • FastKafka and FastAPI integration implemented (#304), thanks to @kumaranvpl

    • FastKafka can now be run alongside FastAPI
  • Batch consuming option to consumers implemented (#298), thanks to @Sternakt

    • Consumers can consume events in batches by specifying msg type of consuming function as List[YourMsgType]
  • Removed support for synchronous produce functions (#295), thanks to @kumaranvpl

  • Added default broker values and update docs (#292), thanks to @Sternakt

Bugs Squashed

  • Fix index.ipynb to be runnable in colab (#342)

  • Use cli option root_path docs generate and serve CLI commands (#341), thanks to @kumaranvpl

  • Fix incorrect asyncapi docs path on fastkafka docs serve command (#335), thanks to @Sternakt

    • Serve docs now takes app root_path argument into consideration when specified in app
  • Fix typo (supress_timestamps->suppress_timestamps) and remove fix for enabling timestamps (#315)

  • Fix logs printing timestamps (#308)

  • Fix topics with dots causing failure of tester instantiation (#306), thanks to @Sternakt

    • Specified topics can now have "." in their names

0.6.0

New Features

  • Timestamps added to CLI commands (#283), thanks to @davorrunje

  • Added option to process messages concurrently (#278), thanks to @Sternakt

    • A new executor option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies.
  • Add consumes and produces functions to app (#274), thanks to @Sternakt

  • Export encoders, decoders from fastkafka.encoder (#246), thanks to @kumaranvpl
  • Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. (#239)
  • UI Improvement: Post screenshots with links to the actual messages in testimonials section (#228)

Bugs Squashed

  • Batch testing fix (#280), thanks to @Sternakt

  • Tester breaks when using Batching or KafkaEvent producers (#279)

  • Consumer loop callbacks are not executing in parallel (#276)

0.5.0

New Features

  • Significant speedup of Kafka producer (#236), thanks to @Sternakt

Bugs Squashed

0.4.0

New Features

0.3.1

  • README.md file updated

0.3.0

New Features

  • Guide for fastkafka produces using partition key (#172), thanks to @Sternakt

    • Closes #161
  • Add support for Redpanda for testing and deployment (#181), thanks to @kumaranvpl

  • Remove bootstrap_servers from init and use the name of broker as an option when running/testing (#134)

  • Add a GH action file to check for broken links in the docs (#163)

  • Optimize requirements for testing and docs (#151)

  • Break requirements into base and optional for testing and dev (#124)

    • Minimize base requirements needed just for running the service.
  • Add link to example git repo into guide for building docs using actions (#81)

  • Add logging for run_in_background (#46)

  • Implement partition Key mechanism for producers (#16)

Bugs Squashed

  • Implement checks for npm installation and version (#176), thanks to @Sternakt

    • Closes #158 by checking if the npx is installed and more verbose error handling
  • Fix the helper.py link in CHANGELOG.md (#165)

  • fastkafka docs install_deps fails (#157)

    • Unexpected internal error: [Errno 2] No such file or directory: 'npx'
  • Broken links in docs (#141)

  • fastkafka run is not showing up in CLI docs (#132)

0.2.3

  • Fixed broken links on PyPi index page

0.2.2

New Features

  • Extract JDK and Kafka installation out of LocalKafkaBroker (#131)

  • PyYAML version relaxed (#119), thanks to @davorrunje

  • Replace docker based kafka with local (#68)

    • replace docker compose with a simple docker run (standard run_jupyter.sh should do)
    • replace all tests to use LocalKafkaBroker
    • update documentation

Bugs Squashed

  • Fix broken link for FastKafka docs in index notebook (#145)

  • Fix encoding issues when loading setup.py on windows OS (#135)

0.2.0

New Features

  • Replace kafka container with LocalKafkaBroker (#112)
      • Replace kafka container with LocalKafkaBroker in tests
  • Remove kafka container from tests environment
  • Fix failing tests

Bugs Squashed

  • Fix random failing in CI (#109)

0.1.3

  • version update in init.py

0.1.2

New Features

  • Git workflow action for publishing Kafka docs (#78)

Bugs Squashed

  • Include missing requirement (#110)
    • Typer is imported in this file but it is not included in settings.ini
    • Add aiohttp which is imported in this file
    • Add nbformat which is imported in _components/helpers.py
    • Add nbconvert which is imported in _components/helpers.py

0.1.1

Bugs Squashed

  • JDK install fails on Python 3.8 (#106)

0.1.0

Initial release

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/CONTRIBUTING/index.html b/docs/0.7.0/CONTRIBUTING/index.html new file mode 100644 index 0000000..dddcaff --- /dev/null +++ b/docs/0.7.0/CONTRIBUTING/index.html @@ -0,0 +1,36 @@ + + + + + +Contributing to fastkafka | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Contributing to fastkafka

First off, thanks for taking the time to contribute! ❤️

All types of contributions are encouraged and valued. See the Table of Contents for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. 🎉

And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:

  • Star the project
  • Tweet about it
  • Refer this project in your project's readme
  • Mention the project at local meetups and tell your friends/colleagues

Table of Contents

I Have a Question

If you want to ask a question, we assume that you have read the available Documentation.

Before you ask a question, it is best to search for existing Issues that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue.

If you then still feel the need to ask a question and need clarification, we recommend the following:

  • Contact us on Discord
  • Open an Issue
    • Provide as much context as you can about what you're running into

We will then take care of the issue as soon as possible.

I Want To Contribute

When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.

Reporting Bugs

Before Submitting a Bug Report

A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible.

  • Make sure that you are using the latest version.
  • Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the documentation. If you are looking for support, you might want to check this section).
  • To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the bug tracker.
  • Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue.
  • Collect information about the bug:
    • Stack trace (Traceback)
    • OS, Platform and Version (Windows, Linux, macOS, x86, ARM)
    • Python version
    • Possibly your input and the output
    • Can you reliably reproduce the issue? And can you also reproduce it with older versions?

How Do I Submit a Good Bug Report?

We use GitHub issues to track bugs and errors. If you run into an issue with the project:

  • Open an Issue. (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)
  • Explain the behavior you would expect and the actual behavior.
  • Please provide as much context as possible and describe the reproduction steps that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case.
  • Provide the information you collected in the previous section.

Once it's filed:

  • The project team will label the issue accordingly.
  • A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as needs-repro. Bugs with the needs-repro tag will not be addressed until they are reproduced.
  • If the team is able to reproduce the issue, it will be marked needs-fix, as well as possibly other tags (such as critical), and the issue will be left to be implemented.

Suggesting Enhancements

This section guides you through submitting an enhancement suggestion for fastkafka, including completely new features and minor improvements to existing functionality. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions.

Before Submitting an Enhancement

  • Make sure that you are using the latest version.
  • Read the documentation carefully and find out if the functionality is already covered, maybe by an individual configuration.
  • Perform a search to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one.
  • Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library.
  • If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on Discord

How Do I Submit a Good Enhancement Suggestion?

Enhancement suggestions are tracked as GitHub issues.

  • Use a clear and descriptive title for the issue to identify the suggestion.
  • Provide a step-by-step description of the suggested enhancement in as many details as possible.
  • Describe the current behavior and explain which behavior you expected to see instead and why. At this point you can also tell which alternatives do not work for you.
  • Explain why this enhancement would be useful to most fastkafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.

Your First Code Contribution

A great way to start contributing to FastKafka would be by solving an issue tagged with "good first issue". To find a list of issues that are tagged as "good first issue" and are suitable for newcomers, please visit the following link: Good first issues

These issues are beginner-friendly and provide a great opportunity to get started with contributing to FastKafka. Choose an issue that interests you, follow the contribution process mentioned in Way of working and Before a PR, and help us make FastKafka even better!

If you have any questions or need further assistance, feel free to reach out to us. Happy coding!

Development

Prepare the dev environment

To start contributing to fastkafka, you first have to prepare the development environment.

Clone the fastkafka repository

To clone the repository, run the following command in the CLI:

git clone https://github.com/airtai/fastkafka.git

Optional: create a virtual python environment

To prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your fastkafka project by running:

python3 -m venv fastkafka-env

And to activate your virtual environment run:

source fastkafka-env/bin/activate

To learn more about virtual environments, please have a look at official python documentation

Install fastkafka

To install fastkafka, navigate to the root directory of the cloned fastkafka project and run:

pip install fastkafka -e [."dev"]

Install JRE and Kafka toolkit

To be able to run tests and use all the functionalities of fastkafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:

  1. Use our fastkafka testing install-deps CLI command which will install JRE and Kafka toolkit for you in your .local folder +OR
  2. Install JRE and Kafka manually. +To do this, please refer to JDK and JRE installation guide and Apache Kafka quickstart

Install npm

To be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:

  1. Use our fastkafka docs install_deps CLI command which will install npm for you in your .local folder +OR
  2. Install npm manually. +To do this, please refer to NPM installation guide

Install docusaurus

To generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of fastkafka project.

Check if everything works

After installing fastkafka and all the necessary dependencies, run nbdev_test in the root of fastkafka project. This will take a couple of minutes as it will run all the tests on fastkafka project. If everythng is setup correctly, you will get a "Success." message in your terminal, otherwise please refer to previous steps.

Way of working

The development of fastkafka is done in Jupyter notebooks. Inside the nbs directory you will find all the source code of fastkafka, this is where you will implement your changes.

The testing, cleanup and exporting of the code is being handled by nbdev, please, before starting the work on fastkafka, get familiar with it by reading nbdev documentation.

The general philosopy you should follow when writing code for fastkafka is:

  • Function should be an atomic functionality, short and concise
    • Good rule of thumb: your function should be 5-10 lines long usually
  • If there are more than 2 params, enforce keywording using *
    • E.g.: def function(param1, *, param2, param3): ...
  • Define typing of arguments and return value
    • If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected
  • After the function cell, write test cells using the assert keyword
    • Whenever you implement something you should test that functionality immediately in the cells below
  • Add Google style python docstrings when function is implemented and tested

Before a PR

After you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as smooth for you and us, please do the following before opening the request (all the commands are to be run in the root of fastkafka project):

  1. Format your notebooks: nbqa black nbs
  2. Close, shutdown, and clean the metadata from your notebooks: nbdev_clean
  3. Export your code: nbdev_export
  4. Run the tests: nbdev_test
  5. Test code typing: mypy fastkafka
  6. Test code safety with bandit: bandit -r fastkafka
  7. Test code safety with semgrep: semgrep --config auto -r fastkafka

When you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge.

Attribution

This guide is based on the contributing-gen. Make your own!

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/LICENSE/index.html b/docs/0.7.0/LICENSE/index.html new file mode 100644 index 0000000..d6a9596 --- /dev/null +++ b/docs/0.7.0/LICENSE/index.html @@ -0,0 +1,168 @@ + + + + + +LICENSE | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

LICENSE

Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/

TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

  1. Definitions.

    "License" shall mean the terms and conditions for use, reproduction, +and distribution as defined by Sections 1 through 9 of this document.

    "Licensor" shall mean the copyright owner or entity authorized by +the copyright owner that is granting the License.

    "Legal Entity" shall mean the union of the acting entity and all +other entities that control, are controlled by, or are under common +control with that entity. For the purposes of this definition, +"control" means (i) the power, direct or indirect, to cause the +direction or management of such entity, whether by contract or +otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity.

    "You" (or "Your") shall mean an individual or Legal Entity +exercising permissions granted by this License.

    "Source" form shall mean the preferred form for making modifications, +including but not limited to software source code, documentation +source, and configuration files.

    "Object" form shall mean any form resulting from mechanical +transformation or translation of a Source form, including but +not limited to compiled object code, generated documentation, +and conversions to other media types.

    "Work" shall mean the work of authorship, whether in Source or +Object form, made available under the License, as indicated by a +copyright notice that is included in or attached to the work +(an example is provided in the Appendix below).

    "Derivative Works" shall mean any work, whether in Source or Object +form, that is based on (or derived from) the Work and for which the +editorial revisions, annotations, elaborations, or other modifications +represent, as a whole, an original work of authorship. For the purposes +of this License, Derivative Works shall not include works that remain +separable from, or merely link (or bind by name) to the interfaces of, +the Work and Derivative Works thereof.

    "Contribution" shall mean any work of authorship, including +the original version of the Work and any modifications or additions +to that Work or Derivative Works thereof, that is intentionally +submitted to Licensor for inclusion in the Work by the copyright owner +or by an individual or Legal Entity authorized to submit on behalf of +the copyright owner. For the purposes of this definition, "submitted" +means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, +and issue tracking systems that are managed by, or on behalf of, the +Licensor for the purpose of discussing and improving the Work, but +excluding communication that is conspicuously marked or otherwise +designated in writing by the copyright owner as "Not a Contribution."

    "Contributor" shall mean Licensor and any individual or Legal Entity +on behalf of whom a Contribution has been received by Licensor and +subsequently incorporated within the Work.

  2. Grant of Copyright License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the +Work and such Derivative Works in Source or Object form.

  3. Grant of Patent License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +(except as stated in this section) patent license to make, have made, +use, offer to sell, sell, import, and otherwise transfer the Work, +where such license applies only to those patent claims licensable +by such Contributor that are necessarily infringed by their +Contribution(s) alone or by combination of their Contribution(s) +with the Work to which such Contribution(s) was submitted. If You +institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work +or a Contribution incorporated within the Work constitutes direct +or contributory patent infringement, then any patent licenses +granted to You under this License for that Work shall terminate +as of the date such litigation is filed.

  4. Redistribution. You may reproduce and distribute copies of the +Work or Derivative Works thereof in any medium, with or without +modifications, and in Source or Object form, provided that You +meet the following conditions:

    (a) You must give any other recipients of the Work or +Derivative Works a copy of this License; and

    (b) You must cause any modified files to carry prominent notices +stating that You changed the files; and

    (c) You must retain, in the Source form of any Derivative Works +that You distribute, all copyright, patent, trademark, and +attribution notices from the Source form of the Work, +excluding those notices that do not pertain to any part of +the Derivative Works; and

    (d) If the Work includes a "NOTICE" text file as part of its +distribution, then any Derivative Works that You distribute must +include a readable copy of the attribution notices contained +within such NOTICE file, excluding those notices that do not +pertain to any part of the Derivative Works, in at least one +of the following places: within a NOTICE text file distributed +as part of the Derivative Works; within the Source form or +documentation, if provided along with the Derivative Works; or, +within a display generated by the Derivative Works, if and +wherever such third-party notices normally appear. The contents +of the NOTICE file are for informational purposes only and +do not modify the License. You may add Your own attribution +notices within Derivative Works that You distribute, alongside +or as an addendum to the NOTICE text from the Work, provided +that such additional attribution notices cannot be construed +as modifying the License.

    You may add Your own copyright statement to Your modifications and +may provide additional or different license terms and conditions +for use, reproduction, or distribution of Your modifications, or +for any such Derivative Works as a whole, provided Your use, +reproduction, and distribution of the Work otherwise complies with +the conditions stated in this License.

  5. Submission of Contributions. Unless You explicitly state otherwise, +any Contribution intentionally submitted for inclusion in the Work +by You to the Licensor shall be under the terms and conditions of +this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify +the terms of any separate license agreement you may have executed +with Licensor regarding such Contributions.

  6. Trademarks. This License does not grant permission to use the trade +names, trademarks, service marks, or product names of the Licensor, +except as required for reasonable and customary use in describing the +origin of the Work and reproducing the content of the NOTICE file.

  7. Disclaimer of Warranty. Unless required by applicable law or +agreed to in writing, Licensor provides the Work (and each +Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +implied, including, without limitation, any warranties or conditions +of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A +PARTICULAR PURPOSE. You are solely responsible for determining the +appropriateness of using or redistributing the Work and assume any +risks associated with Your exercise of permissions under this License.

  8. Limitation of Liability. In no event and under no legal theory, +whether in tort (including negligence), contract, or otherwise, +unless required by applicable law (such as deliberate and grossly +negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, +incidental, or consequential damages of any character arising as a +result of this License or out of the use or inability to use the +Work (including but not limited to damages for loss of goodwill, +work stoppage, computer failure or malfunction, or any and all +other commercial damages or losses), even if such Contributor +has been advised of the possibility of such damages.

  9. Accepting Warranty or Additional Liability. While redistributing +the Work or Derivative Works thereof, You may choose to offer, +and charge a fee for, acceptance of support, warranty, indemnity, +or other liability obligations and/or rights consistent with this +License. However, in accepting such obligations, You may act only +on Your own behalf and on Your sole responsibility, not on behalf +of any other Contributor, and only if You agree to indemnify, +defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason +of your accepting any such warranty or additional liability.

    END OF TERMS AND CONDITIONS

    APPENDIX: How to apply the Apache License to your work.

    To apply the Apache License to your work, attach the following +boilerplate notice, with the fields enclosed by brackets "[]" +replaced with your own identifying information. (Don't include +the brackets!) The text should be enclosed in the appropriate +comment syntax for the file format. We also recommend that a +file or class name and description of purpose be included on the +same "printed page" as the copyright notice for easier +identification within third-party archives.

    Copyright [yyyy][name of copyright owner]

    Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

    Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License.

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/api/fastkafka/EventMetadata/index.html b/docs/0.7.0/api/fastkafka/EventMetadata/index.html new file mode 100644 index 0000000..a557ed7 --- /dev/null +++ b/docs/0.7.0/api/fastkafka/EventMetadata/index.html @@ -0,0 +1,32 @@ + + + + + +EventMetadata | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

EventMetadata

fastkafka.EventMetadata

A class for encapsulating Kafka record metadata.

Parameters:

  • topic: The topic this record is received from
  • partition: The partition from which this record is received
  • offset: The position of this record in the corresponding Kafka partition
  • timestamp: The timestamp of this record
  • timestamp_type: The timestamp type of this record
  • key: The key (or None if no key is specified)
  • value: The value
  • serialized_key_size: The size of the serialized, uncompressed key in bytes
  • serialized_value_size: The size of the serialized, uncompressed value in bytes
  • headers: The headers

create_event_metadata

def create_event_metadata(record: aiokafka.structs.ConsumerRecord) -> EventMetadata

Creates an instance of EventMetadata from a ConsumerRecord.

Parameters:

  • record: The Kafka ConsumerRecord.

Returns:

  • The created EventMetadata instance.
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/api/fastkafka/KafkaEvent/index.html b/docs/0.7.0/api/fastkafka/KafkaEvent/index.html new file mode 100644 index 0000000..4f1d327 --- /dev/null +++ b/docs/0.7.0/api/fastkafka/KafkaEvent/index.html @@ -0,0 +1,32 @@ + + + + + +KafkaEvent | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

KafkaEvent

fastkafka.KafkaEvent

A generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel

Parameters:

  • message: The message contained in the Kafka event, can be of type pydantic.BaseModel.
  • key: The optional key used to identify the Kafka event.
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/api/fastkafka/encoder/AvroBase/index.html b/docs/0.7.0/api/fastkafka/encoder/AvroBase/index.html new file mode 100644 index 0000000..95ddbff --- /dev/null +++ b/docs/0.7.0/api/fastkafka/encoder/AvroBase/index.html @@ -0,0 +1,32 @@ + + + + + +AvroBase | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

AvroBase

fastkafka.encoder.AvroBase

This is base pydantic class that will add some methods

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/api/fastkafka/encoder/avro_decoder/index.html b/docs/0.7.0/api/fastkafka/encoder/avro_decoder/index.html new file mode 100644 index 0000000..6a54932 --- /dev/null +++ b/docs/0.7.0/api/fastkafka/encoder/avro_decoder/index.html @@ -0,0 +1,32 @@ + + + + + +avro_decoder | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

avro_decoder

fastkafka.encoder.avro_decoder

avro_decoder

def avro_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any

Decoder to decode avro encoded messages to pydantic model instance

Parameters:

  • raw_msg: Avro encoded bytes message received from Kafka topic
  • cls: Pydantic class; This pydantic class will be used to construct instance of same class

Returns:

  • An instance of given pydantic class
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/api/fastkafka/encoder/avro_encoder/index.html b/docs/0.7.0/api/fastkafka/encoder/avro_encoder/index.html new file mode 100644 index 0000000..38b2fe7 --- /dev/null +++ b/docs/0.7.0/api/fastkafka/encoder/avro_encoder/index.html @@ -0,0 +1,32 @@ + + + + + +avro_encoder | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

avro_encoder

fastkafka.encoder.avro_encoder

avro_encoder

def avro_encoder(msg: pydantic.main.BaseModel) -> bytes

Encoder to encode pydantic instances to avro message

Parameters:

  • msg: An instance of pydantic basemodel

Returns:

  • A bytes message which is encoded from pydantic basemodel
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/api/fastkafka/encoder/avsc_to_pydantic/index.html b/docs/0.7.0/api/fastkafka/encoder/avsc_to_pydantic/index.html new file mode 100644 index 0000000..93531ec --- /dev/null +++ b/docs/0.7.0/api/fastkafka/encoder/avsc_to_pydantic/index.html @@ -0,0 +1,32 @@ + + + + + +avsc_to_pydantic | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

avsc_to_pydantic

fastkafka.encoder.avsc_to_pydantic

avsc_to_pydantic

def avsc_to_pydantic(schema: Dict[str, Any]) -> ModelMetaclass

Generate pydantic model from given Avro Schema

Parameters:

  • schema: Avro schema in dictionary format

Returns:

  • Pydantic model class built from given avro schema
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/api/fastkafka/encoder/json_decoder/index.html b/docs/0.7.0/api/fastkafka/encoder/json_decoder/index.html new file mode 100644 index 0000000..0816857 --- /dev/null +++ b/docs/0.7.0/api/fastkafka/encoder/json_decoder/index.html @@ -0,0 +1,32 @@ + + + + + +json_decoder | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

json_decoder

fastkafka.encoder.json_decoder

json_decoder

def json_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any

Decoder to decode json string in bytes to pydantic model instance

Parameters:

  • raw_msg: Bytes message received from Kafka topic
  • cls: Pydantic class; This pydantic class will be used to construct instance of same class

Returns:

  • An instance of given pydantic class
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/api/fastkafka/encoder/json_encoder/index.html b/docs/0.7.0/api/fastkafka/encoder/json_encoder/index.html new file mode 100644 index 0000000..0166b84 --- /dev/null +++ b/docs/0.7.0/api/fastkafka/encoder/json_encoder/index.html @@ -0,0 +1,32 @@ + + + + + +json_encoder | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

json_encoder

fastkafka.encoder.json_encoder

json_encoder

def json_encoder(msg: pydantic.main.BaseModel) -> bytes

Encoder to encode pydantic instances to json string

Parameters:

  • msg: An instance of pydantic basemodel

Returns:

  • Json string in bytes which is encoded from pydantic basemodel
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/api/fastkafka/executors/DynamicTaskExecutor/index.html b/docs/0.7.0/api/fastkafka/executors/DynamicTaskExecutor/index.html new file mode 100644 index 0000000..98861f7 --- /dev/null +++ b/docs/0.7.0/api/fastkafka/executors/DynamicTaskExecutor/index.html @@ -0,0 +1,35 @@ + + + + + +DynamicTaskExecutor | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

DynamicTaskExecutor

fastkafka.executors.DynamicTaskExecutor

A class that implements a dynamic task executor for processing consumer records.

The DynamicTaskExecutor class extends the StreamExecutor class and provides functionality +for running a tasks in parallel using asyncio.Task.

__init__

def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000, size: int = 100000) -> None

Create an instance of DynamicTaskExecutor

Parameters:

  • throw_exceptions: Flag indicating whether exceptions should be thrown ot logged. +Defaults to False.
  • max_buffer_size: Maximum buffer size for the memory object stream. +Defaults to 100_000.
  • size: Size of the task pool. Defaults to 100_000.

run

def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None

Runs the dynamic task executor.

Parameters:

  • is_shutting_down_f: Function to check if the executor is shutting down.
  • generator: Generator function for retrieving consumer records.
  • processor: Processor function for processing consumer records.

Returns:

  • None
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/api/fastkafka/executors/SequentialExecutor/index.html b/docs/0.7.0/api/fastkafka/executors/SequentialExecutor/index.html new file mode 100644 index 0000000..a4187fc --- /dev/null +++ b/docs/0.7.0/api/fastkafka/executors/SequentialExecutor/index.html @@ -0,0 +1,35 @@ + + + + + +SequentialExecutor | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

SequentialExecutor

fastkafka.executors.SequentialExecutor

A class that implements a sequential executor for processing consumer records.

The SequentialExecutor class extends the StreamExecutor class and provides functionality +for running processing tasks in sequence by awaiting their coroutines.

__init__

def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000) -> None

Create an instance of SequentialExecutor

Parameters:

  • throw_exceptions: Flag indicating whether exceptions should be thrown or logged. +Defaults to False.
  • max_buffer_size: Maximum buffer size for the memory object stream. +Defaults to 100_000.

run

def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None

Runs the sequential executor.

Parameters:

  • is_shutting_down_f: Function to check if the executor is shutting down.
  • generator: Generator function for retrieving consumer records.
  • processor: Processor function for processing consumer records.

Returns:

  • None
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/api/fastkafka/index.html b/docs/0.7.0/api/fastkafka/index.html new file mode 100644 index 0000000..86f1d4e --- /dev/null +++ b/docs/0.7.0/api/fastkafka/index.html @@ -0,0 +1,496 @@ + + + + + +FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

FastKafka

fastkafka.FastKafka

__init__

def __init__(self, title: Optional[str] = None, description: Optional[str] = None, version: Optional[str] = None, contact: Optional[Dict[str, str]] = None, kafka_brokers: Optional[Dict[str, Any]] = None, root_path: Optional[pathlib.Path, str] = None, lifespan: Optional[Callable[[ForwardRef('FastKafka')], AsyncContextManager[NoneType]]] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7f1c4d890f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7f1c4c8a1210>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None, group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, exclude_internal_topics=True, isolation_level='read_uncommitted') -> None

Creates FastKafka application

Parameters:

  • title: optional title for the documentation. If None, +the title will be set to empty string
  • description: optional description for the documentation. If +None, the description will be set to empty string
  • version: optional version for the documentation. If None, +the version will be set to empty string
  • contact: optional contact for the documentation. If None, the +contact will be set to placeholder values: +name='Author' url=HttpUrl(' https://www.google.com ', ) email='noreply@gmail.com'
  • kafka_brokers: dictionary describing kafka brokers used for setting +the bootstrap server when running the applicationa and for +generating documentation. Defaults to +{ +"localhost": { +"url": "localhost", +"description": "local kafka broker", +"port": "9092", +} +}
  • root_path: path to where documentation will be created
  • lifespan: asynccontextmanager that is used for setting lifespan hooks. +aenter is called before app start and aexit after app stop. +The lifespan is called whe application is started as async context +manager, e.g.:async with kafka_app...
  • bootstrap_servers: a host[:port] string or list of +host[:port] strings that the producer should contact to +bootstrap initial cluster metadata. This does not have to be the +full node list. It just needs to have at least one broker that will +respond to a Metadata API Request. Default port is 9092. If no +servers are specified, will default to localhost:9092.
  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. +Default: aiokafka-producer-# (appended with a unique number +per instance)
  • key_serializer: used to convert user-supplied keys to bytes +If not :data:None, called as f(key), should return +:class:bytes. +Default: :data:None.
  • value_serializer: used to convert user-supplied message +values to :class:bytes. If not :data:None, called as +f(value), should return :class:bytes. +Default: :data:None.
  • acks: one of 0, 1, all. The number of acknowledgments +the producer requires the leader to have received before considering a +request complete. This controls the durability of records that are +sent. The following settings are common:
  • 0: Producer will not wait for any acknowledgment from the server +at all. The message will immediately be added to the socket +buffer and considered sent. No guarantee can be made that the +server has received the record in this case, and the retries +configuration will not take effect (as the client won't +generally know of any failures). The offset given back for each +record will always be set to -1.
  • 1: The broker leader will write the record to its local log but +will respond without awaiting full acknowledgement from all +followers. In this case should the leader fail immediately +after acknowledging the record but before the followers have +replicated it then the record will be lost.
  • all: The broker leader will wait for the full set of in-sync +replicas to acknowledge the record. This guarantees that the +record will not be lost as long as at least one in-sync replica +remains alive. This is the strongest available guarantee.

If unset, defaults to acks=1. If enable_idempotence is +:data:True defaults to acks=all

  • compression_type: The compression type for all data generated by +the producer. Valid values are gzip, snappy, lz4, zstd +or :data:None. +Compression is of full batches of data, so the efficacy of batching +will also impact the compression ratio (more batching means better +compression). Default: :data:None.
  • max_batch_size: Maximum size of buffered data per partition. +After this amount :meth:send coroutine will block until batch is +drained. +Default: 16384
  • linger_ms: The producer groups together any records that arrive +in between request transmissions into a single batched request. +Normally this occurs only under load when records arrive faster +than they can be sent out. However in some circumstances the client +may want to reduce the number of requests even under moderate load. +This setting accomplishes this by adding a small amount of +artificial delay; that is, if first request is processed faster, +than linger_ms, producer will wait linger_ms - process_time. +Default: 0 (i.e. no delay).
  • partitioner: Callable used to determine which partition +each message is assigned to. Called (after key serialization): +partitioner(key_bytes, all_partitions, available_partitions). +The default partitioner implementation hashes each non-None key +using the same murmur2 algorithm as the Java client so that +messages with the same key are assigned to the same partition. +When a key is :data:None, the message is delivered to a random partition +(filtered to partitions with available leaders only, if possible).
  • max_request_size: The maximum size of a request. This is also +effectively a cap on the maximum record size. Note that the server +has its own cap on record size which may be different from this. +This setting will limit the number of record batches the producer +will send in a single request to avoid sending huge requests. +Default: 1048576.
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • request_timeout_ms: Produce request timeout in milliseconds. +As it's sent as part of +:class:~kafka.protocol.produce.ProduceRequest (it's a blocking +call), maximum waiting time can be up to 2 * +request_timeout_ms. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • api_version: specify which kafka API version to use. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT. +Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more +information see :ref:ssl_auth. +Default: :data:None
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying :data:None will +disable idle checks. Default: 540000 (9 minutes).
  • enable_idempotence: When set to :data:True, the producer will +ensure that exactly one copy of each message is written in the +stream. If :data:False, producer retries due to broker failures, +etc., may write duplicates of the retried message in the stream. +Note that enabling idempotence acks to set to all. If it is not +explicitly set by the user it will be chosen. If incompatible +values are set, a :exc:ValueError will be thrown. +New in version 0.5.0.
  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values +are: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: :data:None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: :data:None
  • sasl_oauth_token_provider (: class:~aiokafka.abc.AbstractTokenProvider): +OAuthBearer token provider instance. (See +:mod:kafka.oauth.abstract). +Default: :data:None
  • *topics: optional list of topics to subscribe to. If not set, +call :meth:.subscribe or :meth:.assign before consuming records. +Passing topics directly is same as calling :meth:.subscribe API.
  • group_id: name of the consumer group to join for dynamic +partition assignment (if enabled), and to use for fetching and +committing offsets. If None, auto-partition assignment (via +group coordinator) and offset commits are disabled. +Default: None
  • key_deserializer: Any callable that takes a +raw message key and returns a deserialized key.
  • value_deserializer: Any callable that takes a +raw message value and returns a deserialized value.
  • fetch_min_bytes: Minimum amount of data the server should +return for a fetch request, otherwise wait up to +fetch_max_wait_ms for more data to accumulate. Default: 1.
  • fetch_max_bytes: The maximum amount of data the server should +return for a fetch request. This is not an absolute maximum, if +the first message in the first non-empty partition of the fetch +is larger than this value, the message will still be returned +to ensure that the consumer can make progress. NOTE: consumer +performs fetches to multiple brokers in parallel so memory +usage will depend on the number of brokers containing +partitions for the topic. +Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).
  • fetch_max_wait_ms: The maximum amount of time in milliseconds +the server will block before answering the fetch request if +there isn't sufficient data to immediately satisfy the +requirement given by fetch_min_bytes. Default: 500.
  • max_partition_fetch_bytes: The maximum amount of data +per-partition the server will return. The maximum total memory +used for a request = #partitions * max_partition_fetch_bytes. +This size must be at least as large as the maximum message size +the server allows or else it is possible for the producer to +send messages larger than the consumer can fetch. If that +happens, the consumer can get stuck trying to fetch a large +message on a certain partition. Default: 1048576.
  • max_poll_records: The maximum number of records returned in a +single call to :meth:.getmany. Defaults None, no limit.
  • auto_offset_reset: A policy for resetting offsets on +:exc:.OffsetOutOfRangeError errors: earliest will move to the oldest +available message, latest will move to the most recent, and +none will raise an exception so you can handle this case. +Default: latest.
  • enable_auto_commit: If true the consumer's offset will be +periodically committed in the background. Default: True.
  • auto_commit_interval_ms: milliseconds between automatic +offset commits, if enable_auto_commit is True. Default: 5000.
  • check_crcs: Automatically check the CRC32 of the records +consumed. This ensures no on-the-wire or on-disk corruption to +the messages occurred. This check adds some overhead, so it may +be disabled in cases seeking extreme performance. Default: True
  • partition_assignment_strategy: List of objects to use to +distribute partition ownership amongst consumer instances when +group management is used. This preference is implicit in the order +of the strategies in the list. When assignment strategy changes: +to support a change to the assignment strategy, new versions must +enable support both for the old assignment strategy and the new +one. The coordinator will choose the old assignment strategy until +all members have been updated. Then it will choose the new +strategy. Default: [:class:.RoundRobinPartitionAssignor]
  • max_poll_interval_ms: Maximum allowed time between calls to +consume messages (e.g., :meth:.getmany). If this interval +is exceeded the consumer is considered failed and the group will +rebalance in order to reassign the partitions to another consumer +group member. If API methods block waiting for messages, that time +does not count against this timeout. See KIP-62_ for more +information. Default 300000
  • rebalance_timeout_ms: The maximum time server will wait for this +consumer to rejoin the group in a case of rebalance. In Java client +this behaviour is bound to max.poll.interval.ms configuration, +but as aiokafka will rejoin the group in the background, we +decouple this setting to allow finer tuning by users that use +:class:.ConsumerRebalanceListener to delay rebalacing. Defaults +to session_timeout_ms
  • session_timeout_ms: Client group session and failure detection +timeout. The consumer sends periodic heartbeats +(heartbeat.interval.ms) to indicate its liveness to the broker. +If no hearts are received by the broker for a group member within +the session timeout, the broker will remove the consumer from the +group and trigger a rebalance. The allowed range is configured with +the broker configuration properties +group.min.session.timeout.ms and group.max.session.timeout.ms. +Default: 10000
  • heartbeat_interval_ms: The expected time in milliseconds +between heartbeats to the consumer coordinator when using +Kafka's group management feature. Heartbeats are used to ensure +that the consumer's session stays active and to facilitate +rebalancing when new consumers join or leave the group. The +value must be set lower than session_timeout_ms, but typically +should be set no higher than 1/3 of that value. It can be +adjusted even lower to control the expected time for normal +rebalances. Default: 3000
  • consumer_timeout_ms: maximum wait timeout for background fetching +routine. Mostly defines how fast the system will see rebalance and +request new data for new partitions. Default: 200
  • exclude_internal_topics: Whether records from internal topics +(such as offsets) should be exposed to the consumer. If set to True +the only way to receive records from an internal topic is +subscribing to it. Requires 0.10+ Default: True
  • isolation_level: Controls how to read messages written +transactionally.

If set to read_committed, :meth:.getmany will only return +transactional messages which have been committed. +If set to read_uncommitted (the default), :meth:.getmany will +return all messages, even transactional messages which have been +aborted.

Non-transactional messages will be returned unconditionally in +either mode.

Messages will always be returned in offset order. Hence, in +read_committed mode, :meth:.getmany will only return +messages up to the last stable offset (LSO), which is the one less +than the offset of the first open transaction. In particular any +messages appearing after messages belonging to ongoing transactions +will be withheld until the relevant transaction has been completed. +As a result, read_committed consumers will not be able to read up +to the high watermark when there are in flight transactions. +Further, when in read_committed the seek_to_end method will +return the LSO. See method docs below. Default: read_uncommitted

  • sasl_oauth_token_provider: OAuthBearer token provider instance. (See :mod:kafka.oauth.abstract). +Default: None

benchmark

def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]

Decorator to benchmark produces/consumes functions

Parameters:

  • interval: Period to use to calculate throughput. If value is of type int, +then it will be used as seconds. If value is of type timedelta, +then it will be used as it is. default: 1 - one second
  • sliding_window_size: The size of the sliding window to use to calculate +average throughput. default: None - By default average throughput is +not calculated

consumes

def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, prefix: str = 'on_', description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]], typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]]

Decorator registering the callback called when a message is received in a topic.

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the consumer will subscribe to and execute the +decorated function when it receives a message from the topic, +default: None. If the topic is not specified, topic name will be +inferred from the decorated function name by stripping the defined prefix
  • decoder: Decoder to use to decode messages consumed from the topic, +default: json - By default, it uses json decoder to decode +bytes to json string and then it creates instance of pydantic +BaseModel. It also accepts custom decoder function.
  • executor: Type of executor to choose for consuming tasks. Avaliable options +are "SequentialExecutor" and "DynamicTaskExecutor". The default option is +"SequentialExecutor" which will execute the consuming tasks sequentially. +If the consuming tasks have high latency it is recommended to use +"DynamicTaskExecutor" which will wrap the consuming functions into tasks +and run them in on asyncio loop in background. This comes with a cost of +increased overhead so use it only in cases when your consume functions have +high latency such as database queries or some other type of networking.
  • prefix: Prefix stripped from the decorated function to define a topic name +if the topic argument is not passed, default: "on_". If the decorated +function name is not prefixed with the defined prefix and topic argument +is not passed, then this method will throw ValueError
  • brokers: Optional argument specifying multiple broker clusters for consuming +messages from different Kafka clusters in FastKafka.
  • description: Optional description of the consuming function async docs. +If not provided, consuming function doc attr will be used.
  • *topics: optional list of topics to subscribe to. If not set, +call :meth:.subscribe or :meth:.assign before consuming records. +Passing topics directly is same as calling :meth:.subscribe API.
  • bootstrap_servers: a host[:port] string (or list of +host[:port] strings) that the consumer should contact to bootstrap +initial cluster metadata.

This does not have to be the full node list. +It just needs to have at least one broker that will respond to a +Metadata API Request. Default port is 9092. If no servers are +specified, will default to localhost:9092.

  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. Also +submitted to :class:~.consumer.group_coordinator.GroupCoordinator +for logging with respect to consumer group administration. Default: +aiokafka-{version}
  • group_id: name of the consumer group to join for dynamic +partition assignment (if enabled), and to use for fetching and +committing offsets. If None, auto-partition assignment (via +group coordinator) and offset commits are disabled. +Default: None
  • key_deserializer: Any callable that takes a +raw message key and returns a deserialized key.
  • value_deserializer: Any callable that takes a +raw message value and returns a deserialized value.
  • fetch_min_bytes: Minimum amount of data the server should +return for a fetch request, otherwise wait up to +fetch_max_wait_ms for more data to accumulate. Default: 1.
  • fetch_max_bytes: The maximum amount of data the server should +return for a fetch request. This is not an absolute maximum, if +the first message in the first non-empty partition of the fetch +is larger than this value, the message will still be returned +to ensure that the consumer can make progress. NOTE: consumer +performs fetches to multiple brokers in parallel so memory +usage will depend on the number of brokers containing +partitions for the topic. +Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).
  • fetch_max_wait_ms: The maximum amount of time in milliseconds +the server will block before answering the fetch request if +there isn't sufficient data to immediately satisfy the +requirement given by fetch_min_bytes. Default: 500.
  • max_partition_fetch_bytes: The maximum amount of data +per-partition the server will return. The maximum total memory +used for a request = #partitions * max_partition_fetch_bytes. +This size must be at least as large as the maximum message size +the server allows or else it is possible for the producer to +send messages larger than the consumer can fetch. If that +happens, the consumer can get stuck trying to fetch a large +message on a certain partition. Default: 1048576.
  • max_poll_records: The maximum number of records returned in a +single call to :meth:.getmany. Defaults None, no limit.
  • request_timeout_ms: Client request timeout in milliseconds. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • auto_offset_reset: A policy for resetting offsets on +:exc:.OffsetOutOfRangeError errors: earliest will move to the oldest +available message, latest will move to the most recent, and +none will raise an exception so you can handle this case. +Default: latest.
  • enable_auto_commit: If true the consumer's offset will be +periodically committed in the background. Default: True.
  • auto_commit_interval_ms: milliseconds between automatic +offset commits, if enable_auto_commit is True. Default: 5000.
  • check_crcs: Automatically check the CRC32 of the records +consumed. This ensures no on-the-wire or on-disk corruption to +the messages occurred. This check adds some overhead, so it may +be disabled in cases seeking extreme performance. Default: True
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • partition_assignment_strategy: List of objects to use to +distribute partition ownership amongst consumer instances when +group management is used. This preference is implicit in the order +of the strategies in the list. When assignment strategy changes: +to support a change to the assignment strategy, new versions must +enable support both for the old assignment strategy and the new +one. The coordinator will choose the old assignment strategy until +all members have been updated. Then it will choose the new +strategy. Default: [:class:.RoundRobinPartitionAssignor]
  • max_poll_interval_ms: Maximum allowed time between calls to +consume messages (e.g., :meth:.getmany). If this interval +is exceeded the consumer is considered failed and the group will +rebalance in order to reassign the partitions to another consumer +group member. If API methods block waiting for messages, that time +does not count against this timeout. See KIP-62_ for more +information. Default 300000
  • rebalance_timeout_ms: The maximum time server will wait for this +consumer to rejoin the group in a case of rebalance. In Java client +this behaviour is bound to max.poll.interval.ms configuration, +but as aiokafka will rejoin the group in the background, we +decouple this setting to allow finer tuning by users that use +:class:.ConsumerRebalanceListener to delay rebalacing. Defaults +to session_timeout_ms
  • session_timeout_ms: Client group session and failure detection +timeout. The consumer sends periodic heartbeats +(heartbeat.interval.ms) to indicate its liveness to the broker. +If no hearts are received by the broker for a group member within +the session timeout, the broker will remove the consumer from the +group and trigger a rebalance. The allowed range is configured with +the broker configuration properties +group.min.session.timeout.ms and group.max.session.timeout.ms. +Default: 10000
  • heartbeat_interval_ms: The expected time in milliseconds +between heartbeats to the consumer coordinator when using +Kafka's group management feature. Heartbeats are used to ensure +that the consumer's session stays active and to facilitate +rebalancing when new consumers join or leave the group. The +value must be set lower than session_timeout_ms, but typically +should be set no higher than 1/3 of that value. It can be +adjusted even lower to control the expected time for normal +rebalances. Default: 3000
  • consumer_timeout_ms: maximum wait timeout for background fetching +routine. Mostly defines how fast the system will see rebalance and +request new data for new partitions. Default: 200
  • api_version: specify which kafka API version to use. +:class:AIOKafkaConsumer supports Kafka API versions >=0.9 only. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more information see +:ref:ssl_auth. Default: None.
  • exclude_internal_topics: Whether records from internal topics +(such as offsets) should be exposed to the consumer. If set to True +the only way to receive records from an internal topic is +subscribing to it. Requires 0.10+ Default: True
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying None will +disable idle checks. Default: 540000 (9 minutes).
  • isolation_level: Controls how to read messages written +transactionally.

If set to read_committed, :meth:.getmany will only return +transactional messages which have been committed. +If set to read_uncommitted (the default), :meth:.getmany will +return all messages, even transactional messages which have been +aborted.

Non-transactional messages will be returned unconditionally in +either mode.

Messages will always be returned in offset order. Hence, in +read_committed mode, :meth:.getmany will only return +messages up to the last stable offset (LSO), which is the one less +than the offset of the first open transaction. In particular any +messages appearing after messages belonging to ongoing transactions +will be withheld until the relevant transaction has been completed. +As a result, read_committed consumers will not be able to read up +to the high watermark when there are in flight transactions. +Further, when in read_committed the seek_to_end method will +return the LSO. See method docs below. Default: read_uncommitted

  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: +PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: None
  • sasl_oauth_token_provider: OAuthBearer token provider instance. (See :mod:kafka.oauth.abstract). +Default: None

Returns:

  • : A function returning the same function

create_docs

def create_docs(self: fastkafka.FastKafka) -> None

Create the asyncapi documentation based on the configured consumers and producers.

This function exports the asyncapi specification based on the configured consumers +and producers in the FastKafka instance. It generates the asyncapi documentation by +extracting the topics and callbacks from the consumers and producers.

Note: +The asyncapi documentation is saved to the location specified by the _asyncapi_path +attribute of the FastKafka instance.

Returns:

  • None

create_mocks

def create_mocks(self: fastkafka.FastKafka) -> None

Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock

fastapi_lifespan

def fastapi_lifespan(self: fastkafka.FastKafka, kafka_broker_name: str) -> typing.Callable[[ForwardRef('FastAPI')], typing.AsyncIterator[NoneType]]

Method for managing the lifespan of a FastAPI application with a specific Kafka broker.

Parameters:

  • kafka_broker_name: The name of the Kafka broker to start FastKafka

Returns:

  • Lifespan function to use for initializing FastAPI

get_topics

def get_topics(self: fastkafka.FastKafka) -> typing.Iterable[str]

Get all topics for both producing and consuming.

Returns:

  • A set of topics for both producing and consuming.

produces

def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7f1c4d890f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7f1c4c8a1210>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]

Decorator registering the callback called when delivery report for a produced message is received

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the producer will send returned values from +the decorated function to, default: None- If the topic is not +specified, topic name will be inferred from the decorated function +name by stripping the defined prefix.
  • encoder: Encoder to use to encode messages before sending it to topic, +default: json - By default, it uses json encoder to convert +pydantic basemodel to json string and then encodes the string to bytes +using 'utf-8' encoding. It also accepts custom encoder function.
  • prefix: Prefix stripped from the decorated function to define a topic +name if the topic argument is not passed, default: "to_". If the +decorated function name is not prefixed with the defined prefix +and topic argument is not passed, then this method will throw ValueError
  • brokers: Optional argument specifying multiple broker clusters for consuming +messages from different Kafka clusters in FastKafka.
  • description: Optional description of the producing function async docs. +If not provided, producing function doc attr will be used.
  • bootstrap_servers: a host[:port] string or list of +host[:port] strings that the producer should contact to +bootstrap initial cluster metadata. This does not have to be the +full node list. It just needs to have at least one broker that will +respond to a Metadata API Request. Default port is 9092. If no +servers are specified, will default to localhost:9092.
  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. +Default: aiokafka-producer-# (appended with a unique number +per instance)
  • key_serializer: used to convert user-supplied keys to bytes +If not :data:None, called as f(key), should return +:class:bytes. +Default: :data:None.
  • value_serializer: used to convert user-supplied message +values to :class:bytes. If not :data:None, called as +f(value), should return :class:bytes. +Default: :data:None.
  • acks: one of 0, 1, all. The number of acknowledgments +the producer requires the leader to have received before considering a +request complete. This controls the durability of records that are +sent. The following settings are common:
  • 0: Producer will not wait for any acknowledgment from the server +at all. The message will immediately be added to the socket +buffer and considered sent. No guarantee can be made that the +server has received the record in this case, and the retries +configuration will not take effect (as the client won't +generally know of any failures). The offset given back for each +record will always be set to -1.
  • 1: The broker leader will write the record to its local log but +will respond without awaiting full acknowledgement from all +followers. In this case should the leader fail immediately +after acknowledging the record but before the followers have +replicated it then the record will be lost.
  • all: The broker leader will wait for the full set of in-sync +replicas to acknowledge the record. This guarantees that the +record will not be lost as long as at least one in-sync replica +remains alive. This is the strongest available guarantee.

If unset, defaults to acks=1. If enable_idempotence is +:data:True defaults to acks=all

  • compression_type: The compression type for all data generated by +the producer. Valid values are gzip, snappy, lz4, zstd +or :data:None. +Compression is of full batches of data, so the efficacy of batching +will also impact the compression ratio (more batching means better +compression). Default: :data:None.
  • max_batch_size: Maximum size of buffered data per partition. +After this amount :meth:send coroutine will block until batch is +drained. +Default: 16384
  • linger_ms: The producer groups together any records that arrive +in between request transmissions into a single batched request. +Normally this occurs only under load when records arrive faster +than they can be sent out. However in some circumstances the client +may want to reduce the number of requests even under moderate load. +This setting accomplishes this by adding a small amount of +artificial delay; that is, if first request is processed faster, +than linger_ms, producer will wait linger_ms - process_time. +Default: 0 (i.e. no delay).
  • partitioner: Callable used to determine which partition +each message is assigned to. Called (after key serialization): +partitioner(key_bytes, all_partitions, available_partitions). +The default partitioner implementation hashes each non-None key +using the same murmur2 algorithm as the Java client so that +messages with the same key are assigned to the same partition. +When a key is :data:None, the message is delivered to a random partition +(filtered to partitions with available leaders only, if possible).
  • max_request_size: The maximum size of a request. This is also +effectively a cap on the maximum record size. Note that the server +has its own cap on record size which may be different from this. +This setting will limit the number of record batches the producer +will send in a single request to avoid sending huge requests. +Default: 1048576.
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • request_timeout_ms: Produce request timeout in milliseconds. +As it's sent as part of +:class:~kafka.protocol.produce.ProduceRequest (it's a blocking +call), maximum waiting time can be up to 2 * +request_timeout_ms. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • api_version: specify which kafka API version to use. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT. +Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more +information see :ref:ssl_auth. +Default: :data:None
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying :data:None will +disable idle checks. Default: 540000 (9 minutes).
  • enable_idempotence: When set to :data:True, the producer will +ensure that exactly one copy of each message is written in the +stream. If :data:False, producer retries due to broker failures, +etc., may write duplicates of the retried message in the stream. +Note that enabling idempotence acks to set to all. If it is not +explicitly set by the user it will be chosen. If incompatible +values are set, a :exc:ValueError will be thrown. +New in version 0.5.0.
  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values +are: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: :data:None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: :data:None
  • sasl_oauth_token_provider (: class:~aiokafka.abc.AbstractTokenProvider): +OAuthBearer token provider instance. (See +:mod:kafka.oauth.abstract). +Default: :data:None

Returns:

  • : A function returning the same function

Exceptions:

  • ValueError: when needed

run_in_background

def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]

Decorator to schedule a task to be run in the background.

This decorator is used to schedule a task to be run in the background when the app's _on_startup event is triggered.

Returns:

  • A decorator function that takes a background task as an input and stores it to be run in the backround.

set_kafka_broker

def set_kafka_broker(self, kafka_broker_name: str) -> None

Sets the Kafka broker to start FastKafka with

Parameters:

  • kafka_broker_name: The name of the Kafka broker to start FastKafka

Returns:

  • None

Exceptions:

  • ValueError: If the provided kafka_broker_name is not found in dictionary of kafka_brokers
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/api/fastkafka/testing/ApacheKafkaBroker/index.html b/docs/0.7.0/api/fastkafka/testing/ApacheKafkaBroker/index.html new file mode 100644 index 0000000..bae13dd --- /dev/null +++ b/docs/0.7.0/api/fastkafka/testing/ApacheKafkaBroker/index.html @@ -0,0 +1,32 @@ + + + + + +ApacheKafkaBroker | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

ApacheKafkaBroker

fastkafka.testing.ApacheKafkaBroker

ApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing.

__init__

def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None

Initialises the ApacheKafkaBroker object

Parameters:

  • data_dir: Path to the directory where the zookeepeer instance will save data
  • zookeeper_port: Port for clients (Kafka brokes) to connect
  • listener_port: Port on which the clients (producers and consumers) can connect

get_service_config_string

def get_service_config_string(self: fastkafka.testing.ApacheKafkaBroker, service: str, data_dir: pathlib.Path) -> str

Gets the configuration string for a service.

Parameters:

  • service: Name of the service ("kafka" or "zookeeper").
  • data_dir: Path to the directory where the service will save data.

Returns:

  • The service configuration string.

start

def start(self: fastkafka.testing.ApacheKafkaBroker) -> str

Starts a local Kafka broker and ZooKeeper instance synchronously.

Returns:

  • The Kafka broker bootstrap server address in string format: host:port.

stop

def stop(self: fastkafka.testing.ApacheKafkaBroker) -> None

Stops a local kafka broker and zookeeper instance synchronously

Returns:

  • None
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/api/fastkafka/testing/LocalRedpandaBroker/index.html b/docs/0.7.0/api/fastkafka/testing/LocalRedpandaBroker/index.html new file mode 100644 index 0000000..03c04e2 --- /dev/null +++ b/docs/0.7.0/api/fastkafka/testing/LocalRedpandaBroker/index.html @@ -0,0 +1,32 @@ + + + + + +LocalRedpandaBroker | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

LocalRedpandaBroker

fastkafka.testing.LocalRedpandaBroker

LocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing.

__init__

def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug', **kwargs: Dict[str, Any]) -> None

Initialises the LocalRedpandaBroker object

Parameters:

  • listener_port: Port on which the clients (producers and consumers) can connect
  • tag: Tag of Redpanda image to use to start container
  • seastar_core: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)
  • memory: The amount of memory to make available to Redpanda
  • mode: Mode to use to load configuration properties in container
  • default_log_level: Log levels to use for Redpanda

get_service_config_string

def get_service_config_string(self, service: str, data_dir: pathlib.Path) -> str

Generates a configuration for a service

Parameters:

  • data_dir: Path to the directory where the zookeepeer instance will save data
  • service: "redpanda", defines which service to get config string for

start

def start(self: fastkafka.testing.LocalRedpandaBroker) -> str

Starts a local redpanda broker instance synchronously

Returns:

  • Redpanda broker bootstrap server address in string format: add:port

stop

def stop(self: fastkafka.testing.LocalRedpandaBroker) -> None

Stops a local redpanda broker instance synchronously

Returns:

  • None
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/api/fastkafka/testing/Tester/index.html b/docs/0.7.0/api/fastkafka/testing/Tester/index.html new file mode 100644 index 0000000..e13c626 --- /dev/null +++ b/docs/0.7.0/api/fastkafka/testing/Tester/index.html @@ -0,0 +1,289 @@ + + + + + +Tester | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Tester

fastkafka.testing.Tester

__init__

def __init__(self, app: Union[fastkafka.FastKafka, List[fastkafka.FastKafka]], broker: Optional[fastkafka.testing.ApacheKafkaBroker, fastkafka.testing.LocalRedpandaBroker, fastkafka._testing.in_memory_broker.InMemoryBroker] = None, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None

Mirror-like object for testing a FastKafka application

Can be used as context manager

Parameters:

  • app: The FastKafka application to be tested.
  • broker: An optional broker to start and to use for testing.
  • data_dir: Path to the directory where the zookeepeer instance will save data
  • zookeeper_port: Port for clients (Kafka brokes) to connect
  • listener_port: Port on which the clients (producers and consumers) can connect

benchmark

def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]

Decorator to benchmark produces/consumes functions

Parameters:

  • interval: Period to use to calculate throughput. If value is of type int, +then it will be used as seconds. If value is of type timedelta, +then it will be used as it is. default: 1 - one second
  • sliding_window_size: The size of the sliding window to use to calculate +average throughput. default: None - By default average throughput is +not calculated

consumes

def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, prefix: str = 'on_', description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.0', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]], typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]]

Decorator registering the callback called when a message is received in a topic.

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the consumer will subscribe to and execute the +decorated function when it receives a message from the topic, +default: None. If the topic is not specified, topic name will be +inferred from the decorated function name by stripping the defined prefix
  • decoder: Decoder to use to decode messages consumed from the topic, +default: json - By default, it uses json decoder to decode +bytes to json string and then it creates instance of pydantic +BaseModel. It also accepts custom decoder function.
  • executor: Type of executor to choose for consuming tasks. Avaliable options +are "SequentialExecutor" and "DynamicTaskExecutor". The default option is +"SequentialExecutor" which will execute the consuming tasks sequentially. +If the consuming tasks have high latency it is recommended to use +"DynamicTaskExecutor" which will wrap the consuming functions into tasks +and run them in on asyncio loop in background. This comes with a cost of +increased overhead so use it only in cases when your consume functions have +high latency such as database queries or some other type of networking.
  • prefix: Prefix stripped from the decorated function to define a topic name +if the topic argument is not passed, default: "on_". If the decorated +function name is not prefixed with the defined prefix and topic argument +is not passed, then this method will throw ValueError
  • brokers: Optional argument specifying multiple broker clusters for consuming +messages from different Kafka clusters in FastKafka.
  • description: Optional description of the consuming function async docs. +If not provided, consuming function doc attr will be used.
  • *topics: optional list of topics to subscribe to. If not set, +call :meth:.subscribe or :meth:.assign before consuming records. +Passing topics directly is same as calling :meth:.subscribe API.
  • bootstrap_servers: a host[:port] string (or list of +host[:port] strings) that the consumer should contact to bootstrap +initial cluster metadata.

This does not have to be the full node list. +It just needs to have at least one broker that will respond to a +Metadata API Request. Default port is 9092. If no servers are +specified, will default to localhost:9092.

  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. Also +submitted to :class:~.consumer.group_coordinator.GroupCoordinator +for logging with respect to consumer group administration. Default: +aiokafka-{version}
  • group_id: name of the consumer group to join for dynamic +partition assignment (if enabled), and to use for fetching and +committing offsets. If None, auto-partition assignment (via +group coordinator) and offset commits are disabled. +Default: None
  • key_deserializer: Any callable that takes a +raw message key and returns a deserialized key.
  • value_deserializer: Any callable that takes a +raw message value and returns a deserialized value.
  • fetch_min_bytes: Minimum amount of data the server should +return for a fetch request, otherwise wait up to +fetch_max_wait_ms for more data to accumulate. Default: 1.
  • fetch_max_bytes: The maximum amount of data the server should +return for a fetch request. This is not an absolute maximum, if +the first message in the first non-empty partition of the fetch +is larger than this value, the message will still be returned +to ensure that the consumer can make progress. NOTE: consumer +performs fetches to multiple brokers in parallel so memory +usage will depend on the number of brokers containing +partitions for the topic. +Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).
  • fetch_max_wait_ms: The maximum amount of time in milliseconds +the server will block before answering the fetch request if +there isn't sufficient data to immediately satisfy the +requirement given by fetch_min_bytes. Default: 500.
  • max_partition_fetch_bytes: The maximum amount of data +per-partition the server will return. The maximum total memory +used for a request = #partitions * max_partition_fetch_bytes. +This size must be at least as large as the maximum message size +the server allows or else it is possible for the producer to +send messages larger than the consumer can fetch. If that +happens, the consumer can get stuck trying to fetch a large +message on a certain partition. Default: 1048576.
  • max_poll_records: The maximum number of records returned in a +single call to :meth:.getmany. Defaults None, no limit.
  • request_timeout_ms: Client request timeout in milliseconds. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • auto_offset_reset: A policy for resetting offsets on +:exc:.OffsetOutOfRangeError errors: earliest will move to the oldest +available message, latest will move to the most recent, and +none will raise an exception so you can handle this case. +Default: latest.
  • enable_auto_commit: If true the consumer's offset will be +periodically committed in the background. Default: True.
  • auto_commit_interval_ms: milliseconds between automatic +offset commits, if enable_auto_commit is True. Default: 5000.
  • check_crcs: Automatically check the CRC32 of the records +consumed. This ensures no on-the-wire or on-disk corruption to +the messages occurred. This check adds some overhead, so it may +be disabled in cases seeking extreme performance. Default: True
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • partition_assignment_strategy: List of objects to use to +distribute partition ownership amongst consumer instances when +group management is used. This preference is implicit in the order +of the strategies in the list. When assignment strategy changes: +to support a change to the assignment strategy, new versions must +enable support both for the old assignment strategy and the new +one. The coordinator will choose the old assignment strategy until +all members have been updated. Then it will choose the new +strategy. Default: [:class:.RoundRobinPartitionAssignor]
  • max_poll_interval_ms: Maximum allowed time between calls to +consume messages (e.g., :meth:.getmany). If this interval +is exceeded the consumer is considered failed and the group will +rebalance in order to reassign the partitions to another consumer +group member. If API methods block waiting for messages, that time +does not count against this timeout. See KIP-62_ for more +information. Default 300000
  • rebalance_timeout_ms: The maximum time server will wait for this +consumer to rejoin the group in a case of rebalance. In Java client +this behaviour is bound to max.poll.interval.ms configuration, +but as aiokafka will rejoin the group in the background, we +decouple this setting to allow finer tuning by users that use +:class:.ConsumerRebalanceListener to delay rebalacing. Defaults +to session_timeout_ms
  • session_timeout_ms: Client group session and failure detection +timeout. The consumer sends periodic heartbeats +(heartbeat.interval.ms) to indicate its liveness to the broker. +If no hearts are received by the broker for a group member within +the session timeout, the broker will remove the consumer from the +group and trigger a rebalance. The allowed range is configured with +the broker configuration properties +group.min.session.timeout.ms and group.max.session.timeout.ms. +Default: 10000
  • heartbeat_interval_ms: The expected time in milliseconds +between heartbeats to the consumer coordinator when using +Kafka's group management feature. Heartbeats are used to ensure +that the consumer's session stays active and to facilitate +rebalancing when new consumers join or leave the group. The +value must be set lower than session_timeout_ms, but typically +should be set no higher than 1/3 of that value. It can be +adjusted even lower to control the expected time for normal +rebalances. Default: 3000
  • consumer_timeout_ms: maximum wait timeout for background fetching +routine. Mostly defines how fast the system will see rebalance and +request new data for new partitions. Default: 200
  • api_version: specify which kafka API version to use. +:class:AIOKafkaConsumer supports Kafka API versions >=0.9 only. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more information see +:ref:ssl_auth. Default: None.
  • exclude_internal_topics: Whether records from internal topics +(such as offsets) should be exposed to the consumer. If set to True +the only way to receive records from an internal topic is +subscribing to it. Requires 0.10+ Default: True
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying None will +disable idle checks. Default: 540000 (9 minutes).
  • isolation_level: Controls how to read messages written +transactionally.

If set to read_committed, :meth:.getmany will only return +transactional messages which have been committed. +If set to read_uncommitted (the default), :meth:.getmany will +return all messages, even transactional messages which have been +aborted.

Non-transactional messages will be returned unconditionally in +either mode.

Messages will always be returned in offset order. Hence, in +read_committed mode, :meth:.getmany will only return +messages up to the last stable offset (LSO), which is the one less +than the offset of the first open transaction. In particular any +messages appearing after messages belonging to ongoing transactions +will be withheld until the relevant transaction has been completed. +As a result, read_committed consumers will not be able to read up +to the high watermark when there are in flight transactions. +Further, when in read_committed the seek_to_end method will +return the LSO. See method docs below. Default: read_uncommitted

  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: +PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: None
  • sasl_oauth_token_provider: OAuthBearer token provider instance. (See :mod:kafka.oauth.abstract). +Default: None

Returns:

  • : A function returning the same function

create_docs

def create_docs(self: fastkafka.FastKafka) -> None

Create the asyncapi documentation based on the configured consumers and producers.

This function exports the asyncapi specification based on the configured consumers +and producers in the FastKafka instance. It generates the asyncapi documentation by +extracting the topics and callbacks from the consumers and producers.

Note: +The asyncapi documentation is saved to the location specified by the _asyncapi_path +attribute of the FastKafka instance.

Returns:

  • None

create_mocks

def create_mocks(self: fastkafka.FastKafka) -> None

Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock

fastapi_lifespan

def fastapi_lifespan(self: fastkafka.FastKafka, kafka_broker_name: str) -> typing.Callable[[ForwardRef('FastAPI')], typing.AsyncIterator[NoneType]]

Method for managing the lifespan of a FastAPI application with a specific Kafka broker.

Parameters:

  • kafka_broker_name: The name of the Kafka broker to start FastKafka

Returns:

  • Lifespan function to use for initializing FastAPI

get_topics

def get_topics(self: fastkafka.FastKafka) -> typing.Iterable[str]

Get all topics for both producing and consuming.

Returns:

  • A set of topics for both producing and consuming.

produces

def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7f1c4d890f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7f1c4c8a1210>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]

Decorator registering the callback called when delivery report for a produced message is received

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the producer will send returned values from +the decorated function to, default: None- If the topic is not +specified, topic name will be inferred from the decorated function +name by stripping the defined prefix.
  • encoder: Encoder to use to encode messages before sending it to topic, +default: json - By default, it uses json encoder to convert +pydantic basemodel to json string and then encodes the string to bytes +using 'utf-8' encoding. It also accepts custom encoder function.
  • prefix: Prefix stripped from the decorated function to define a topic +name if the topic argument is not passed, default: "to_". If the +decorated function name is not prefixed with the defined prefix +and topic argument is not passed, then this method will throw ValueError
  • brokers: Optional argument specifying multiple broker clusters for consuming +messages from different Kafka clusters in FastKafka.
  • description: Optional description of the producing function async docs. +If not provided, producing function doc attr will be used.
  • bootstrap_servers: a host[:port] string or list of +host[:port] strings that the producer should contact to +bootstrap initial cluster metadata. This does not have to be the +full node list. It just needs to have at least one broker that will +respond to a Metadata API Request. Default port is 9092. If no +servers are specified, will default to localhost:9092.
  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. +Default: aiokafka-producer-# (appended with a unique number +per instance)
  • key_serializer: used to convert user-supplied keys to bytes +If not :data:None, called as f(key), should return +:class:bytes. +Default: :data:None.
  • value_serializer: used to convert user-supplied message +values to :class:bytes. If not :data:None, called as +f(value), should return :class:bytes. +Default: :data:None.
  • acks: one of 0, 1, all. The number of acknowledgments +the producer requires the leader to have received before considering a +request complete. This controls the durability of records that are +sent. The following settings are common:
  • 0: Producer will not wait for any acknowledgment from the server +at all. The message will immediately be added to the socket +buffer and considered sent. No guarantee can be made that the +server has received the record in this case, and the retries +configuration will not take effect (as the client won't +generally know of any failures). The offset given back for each +record will always be set to -1.
  • 1: The broker leader will write the record to its local log but +will respond without awaiting full acknowledgement from all +followers. In this case should the leader fail immediately +after acknowledging the record but before the followers have +replicated it then the record will be lost.
  • all: The broker leader will wait for the full set of in-sync +replicas to acknowledge the record. This guarantees that the +record will not be lost as long as at least one in-sync replica +remains alive. This is the strongest available guarantee.

If unset, defaults to acks=1. If enable_idempotence is +:data:True defaults to acks=all

  • compression_type: The compression type for all data generated by +the producer. Valid values are gzip, snappy, lz4, zstd +or :data:None. +Compression is of full batches of data, so the efficacy of batching +will also impact the compression ratio (more batching means better +compression). Default: :data:None.
  • max_batch_size: Maximum size of buffered data per partition. +After this amount :meth:send coroutine will block until batch is +drained. +Default: 16384
  • linger_ms: The producer groups together any records that arrive +in between request transmissions into a single batched request. +Normally this occurs only under load when records arrive faster +than they can be sent out. However in some circumstances the client +may want to reduce the number of requests even under moderate load. +This setting accomplishes this by adding a small amount of +artificial delay; that is, if first request is processed faster, +than linger_ms, producer will wait linger_ms - process_time. +Default: 0 (i.e. no delay).
  • partitioner: Callable used to determine which partition +each message is assigned to. Called (after key serialization): +partitioner(key_bytes, all_partitions, available_partitions). +The default partitioner implementation hashes each non-None key +using the same murmur2 algorithm as the Java client so that +messages with the same key are assigned to the same partition. +When a key is :data:None, the message is delivered to a random partition +(filtered to partitions with available leaders only, if possible).
  • max_request_size: The maximum size of a request. This is also +effectively a cap on the maximum record size. Note that the server +has its own cap on record size which may be different from this. +This setting will limit the number of record batches the producer +will send in a single request to avoid sending huge requests. +Default: 1048576.
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • request_timeout_ms: Produce request timeout in milliseconds. +As it's sent as part of +:class:~kafka.protocol.produce.ProduceRequest (it's a blocking +call), maximum waiting time can be up to 2 * +request_timeout_ms. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • api_version: specify which kafka API version to use. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL. Default: PLAINTEXT. +Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more +information see :ref:ssl_auth. +Default: :data:None
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying :data:None will +disable idle checks. Default: 540000 (9 minutes).
  • enable_idempotence: When set to :data:True, the producer will +ensure that exactly one copy of each message is written in the +stream. If :data:False, producer retries due to broker failures, +etc., may write duplicates of the retried message in the stream. +Note that enabling idempotence acks to set to all. If it is not +explicitly set by the user it will be chosen. If incompatible +values are set, a :exc:ValueError will be thrown. +New in version 0.5.0.
  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values +are: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: :data:None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: :data:None
  • sasl_oauth_token_provider (: class:~aiokafka.abc.AbstractTokenProvider): +OAuthBearer token provider instance. (See +:mod:kafka.oauth.abstract). +Default: :data:None

Returns:

  • : A function returning the same function

Exceptions:

  • ValueError: when needed

run_in_background

def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]

Decorator to schedule a task to be run in the background.

This decorator is used to schedule a task to be run in the background when the app's _on_startup event is triggered.

Returns:

  • A decorator function that takes a background task as an input and stores it to be run in the backround.

set_kafka_broker

def set_kafka_broker(self, kafka_broker_name: str) -> None

Sets the Kafka broker to start FastKafka with

Parameters:

  • kafka_broker_name: The name of the Kafka broker to start FastKafka

Returns:

  • None

Exceptions:

  • ValueError: If the provided kafka_broker_name is not found in dictionary of kafka_brokers

using_local_kafka

def using_local_kafka(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> Tester

Starts local Kafka broker used by the Tester instance

Parameters:

  • data_dir: Path to the directory where the zookeepeer instance will save data
  • zookeeper_port: Port for clients (Kafka brokes) to connect
  • listener_port: Port on which the clients (producers and consumers) can connect

Returns:

  • An instance of tester with Kafka as broker

using_local_redpanda

def using_local_redpanda(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug') -> Tester

Starts local Redpanda broker used by the Tester instance

Parameters:

  • listener_port: Port on which the clients (producers and consumers) can connect
  • tag: Tag of Redpanda image to use to start container
  • seastar_core: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)
  • memory: The amount of memory to make available to Redpanda
  • mode: Mode to use to load configuration properties in container
  • default_log_level: Log levels to use for Redpanda

Returns:

  • An instance of tester with Redpanda as broker
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/cli/fastkafka/index.html b/docs/0.7.0/cli/fastkafka/index.html new file mode 100644 index 0000000..35d8623 --- /dev/null +++ b/docs/0.7.0/cli/fastkafka/index.html @@ -0,0 +1,32 @@ + + + + + +fastkafka | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

fastkafka

Usage:

$ fastkafka [OPTIONS] COMMAND [ARGS]...

Options:

  • --install-completion: Install completion for the current shell.
  • --show-completion: Show completion for the current shell, to copy it or customize the installation.
  • --help: Show this message and exit.

Commands:

  • docs: Commands for managing fastkafka app...
  • run: Runs Fast Kafka API application
  • testing: Commands for managing fastkafka testing

fastkafka docs

Commands for managing fastkafka app documentation

Usage:

$ fastkafka docs [OPTIONS] COMMAND [ARGS]...

Options:

  • --help: Show this message and exit.

Commands:

  • generate: Generates documentation for a FastKafka...
  • install_deps: Installs dependencies for FastKafka...
  • serve: Generates and serves documentation for a...

fastkafka docs generate

Generates documentation for a FastKafka application

Usage:

$ fastkafka docs generate [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --root-path TEXT: root path under which documentation will be created; default is current directory
  • --help: Show this message and exit.

fastkafka docs install_deps

Installs dependencies for FastKafka documentation generation

Usage:

$ fastkafka docs install_deps [OPTIONS]

Options:

  • --help: Show this message and exit.

fastkafka docs serve

Generates and serves documentation for a FastKafka application

Usage:

$ fastkafka docs serve [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --root-path TEXT: root path under which documentation will be created; default is current directory
  • --bind TEXT: Some info [default: 127.0.0.1]
  • --port INTEGER: Some info [default: 8000]
  • --help: Show this message and exit.

fastkafka run

Runs Fast Kafka API application

Usage:

$ fastkafka run [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --num-workers INTEGER: Number of FastKafka instances to run, defaults to number of CPU cores. [default: 4]
  • --kafka-broker TEXT: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. [default: localhost]
  • --help: Show this message and exit.

fastkafka testing

Commands for managing fastkafka testing

Usage:

$ fastkafka testing [OPTIONS] COMMAND [ARGS]...

Options:

  • --help: Show this message and exit.

Commands:

  • install_deps: Installs dependencies for FastKafka app...

fastkafka testing install_deps

Installs dependencies for FastKafka app testing

Usage:

$ fastkafka testing install_deps [OPTIONS]

Options:

  • --help: Show this message and exit.
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/cli/run_fastkafka_server_process/index.html b/docs/0.7.0/cli/run_fastkafka_server_process/index.html new file mode 100644 index 0000000..5e1e783 --- /dev/null +++ b/docs/0.7.0/cli/run_fastkafka_server_process/index.html @@ -0,0 +1,32 @@ + + + + + +run_fastkafka_server_process | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

run_fastkafka_server_process

Usage:

$ run_fastkafka_server_process [OPTIONS] APP

Arguments:

  • APP: Input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --kafka-broker TEXT: Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class. [required]
  • --install-completion: Install completion for the current shell.
  • --show-completion: Show completion for the current shell, to copy it or customize the installation.
  • --help: Show this message and exit.
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_00_FastKafka_Demo/index.html b/docs/0.7.0/guides/Guide_00_FastKafka_Demo/index.html new file mode 100644 index 0000000..d746672 --- /dev/null +++ b/docs/0.7.0/guides/Guide_00_FastKafka_Demo/index.html @@ -0,0 +1,117 @@ + + + + + +FastKafka tutorial | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

FastKafka tutorial

FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.

Install

FastKafka works on macOS, Linux, and most Unix-style operating systems. +You can install it with pip as usual:

pip install fastkafka
try:
import fastkafka
except:
! pip install fastkafka

Running in Colab

You can start this interactive tutorial in Google Colab by clicking the +button below:

Open In Colab

Writing server code

Here is an example python script using FastKafka that takes data from a +Kafka topic, makes a prediction using a predictive model, and outputs +the prediction to another Kafka topic.

Preparing the demo model

First we will prepare our model using the Iris dataset so that we can +demonstrate the preditions using FastKafka. The following call downloads +the dataset and trains the model.

We will wrap the model creation into a lifespan of our app so that the +model is created just before the app is started.

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines two message classes for use in a FastKafka +application:

  • The IrisInputData class is used to represent input data for a +predictive model. It has four fields of type +NonNegativeFloat, +which is a subclass of float that only allows non-negative floating +point values.

  • The IrisPrediction class is used to represent the output of the +predictive model. It has a single field species of type string +representing the predicted species.

These message classes will be used to parse and validate incoming data +in Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used for +generating the documentation only and it is not being checked by the +actual server.

Next, an object of the FastKafka class is initialized with the minimum +set of arguments:

  • kafka_brokers: a dictionary used for generation of documentation
from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the IrisInputData message class. Specifying the +type of the single argument is instructing the Pydantic to use +IrisInputData.parse_raw() on the consumed message before passing it +to the user defined function on_input_data.

  • The @produces decorator is applied to the to_predictions function, +which specifies that this function should produce a message to the +“predictions" Kafka topic whenever it is called. The to_predictions +function takes a single integer argument species_class representing +one of three possible strign values predicted by the mdoel. It creates +a new IrisPrediction message using this value and then returns it. +The framework will call the IrisPrediction.json().encode("utf-8") +function on the returned value and produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Testing the service

The service can be tested using the Tester instances which internally +starts Kafka broker and zookeeper.

Before running tests, we have to install Java runtime and Apache Kafka +locally. To simplify the process, we provide the following convenience +command:

fastkafka testing install_deps
[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
from fastkafka.testing import Tester

msg = IrisInputData(
sepal_length=0.1,
sepal_width=0.2,
petal_length=0.3,
petal_width=0.4,
)

# Start Tester app and create local Kafka broker for testing
async with Tester(kafka_app) as tester:
# Send IrisInputData message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with IrisPrediction in predictions topic
await tester.awaited_mocks.on_predictions.assert_awaited_with(
IrisPrediction(species="setosa"), timeout=2
)
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Recap

We have created a Iris classification model and encapulated it into our +fastkafka application. The app will consume the IrisInputData from the +input_data topic and produce the predictions to predictions topic.

To test the app we have:

  1. Created the app

  2. Started our Tester class which mirrors the developed app topics for +testing purpuoses

  3. Sent IrisInputData message to input_data topic

  4. Asserted and checked that the developed iris classification service +has reacted to IrisInputData message

Running the service

The service can be started using builtin faskafka run CLI command. +Before we can do that, we will concatenate the code snippets from above +and save them in a file "application.py"

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

To run the service, you will need a running Kafka broker on localhost as +specified in the kafka_brokers parameter above. We can start the Kafka +broker locally using the ApacheKafkaBroker. Notice that the same +happens automatically in the Tester as shown above.

[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()
[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.

'127.0.0.1:9092'

Then, we start the FastKafka service by running the following command in +the folder where the application.py file is located:

fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app

In the above command, we use --num-workers option to specify how many +workers to launch and we use --kafka-broker option to specify which +kafka broker configuration to use from earlier specified kafka_brokers

[1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[1200654]: [ERROR] aiokafka: Unable to update metadata from [0]
[1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[1200656]: [ERROR] aiokafka: Unable to update metadata from [0]
^C
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...

You need to interupt running of the cell above by selecting +Runtime->Interupt execution on the toolbar above.

Finally, we can stop the local Kafka Broker:

[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.

Documentation

The kafka app comes with builtin documentation generation using +AsyncApi HTML generator.

When running in Colab, we need to update Node.js first:

We need to install all dependancies for the generator using the +following command line:

fastkafka docs install_deps
[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed

To generate the documentation programatically you just need to call the +folloving command:

fastkafka docs generate application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.

. This will generate the asyncapi folder in relative path where all +your documentation will be saved. You can check out the content of it +with:

ls -l asyncapi
total 8
drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs
drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec

In docs folder you will find the servable static html file of your +documentation. This can also be served using our fastkafka docs serve +CLI command (more on that in our guides).

In spec folder you will find a asyncapi.yml file containing the async +API specification of your application.

We can locally preview the generated documentation by running the +following command:

fastkafka docs serve application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.


Serving documentation on http://127.0.0.1:8000
^C
Interupting serving of documentation and cleaning up...

From the parameters passed to the application constructor, we get the +documentation bellow:

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
bootstrap_servers="localhost:9092",
)

Kafka_servers

The following documentation snippet are for the consumer as specified in +the code above:

Kafka_consumer

The following documentation snippet are for the producer as specified in +the code above:

Kafka_producer

Finally, all messages as defined as subclasses of BaseModel are +documented as well:

Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_01_Intro/index.html b/docs/0.7.0/guides/Guide_01_Intro/index.html new file mode 100644 index 0000000..0201653 --- /dev/null +++ b/docs/0.7.0/guides/Guide_01_Intro/index.html @@ -0,0 +1,51 @@ + + + + + +Intro | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Intro

This tutorial will show you how to use FastKafkaAPI, step by +step.

The goal of FastKafkaAPI is to simplify the use of Apache Kafka in +Python inspired by FastAPI look and feel.

In this Intro tutorial we’ll go trough the basic requirements to run the +demos presented in future steps.

Installing FastKafkaAPI

First step is to install FastKafkaAPI

$ pip install fastkafka

Preparing a Kafka broker

Next step is to prepare the Kafka environment, our consumers and +producers will need some channel of communication.

!!! info "Hey, your first info!"

If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. 

To go through the tutorial, we recommend that you use dockerized Kafka +brokers, if you have Docker and docker-compose installed the setup +should take you no time (if we exclude the container download times).

!!! warning "Listen! This is important."

To be able to setup this configuration you need to have Docker and docker-compose installed

See here for more info on <a href = \"https://docs.docker.com/\" target=\"_blank\">Docker</a> and <a href = \"https://docs.docker.com/compose/install/\" target=\"_blank\">docker compose</a>

To setup the recommended environment, first, create a new folder wher +you want to save your demo files (e.g. fastkafka_demo). Inside the new +folder create a new YAML file named kafka_demo.yml and copy the +following configuration into it:

version: "3"
services:
zookeeper:
image: wurstmeister/zookeeper
hostname: zookeeper
container_name: zookeeper
networks:
- fastkafka-network
ports:
- "2181:2181"
- "22:22"
- "2888:2888"
- "3888:3888"
kafka:
image: wurstmeister/kafka
container_name: kafka
ports:
- "9093:9093"
environment:
HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d' ' -f 2"
KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093
KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093
KAFKA_INTER_BROKER_LISTENER_NAME: INTER
KAFKA_CREATE_TOPICS: "hello:1:1"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
- zookeeper
healthcheck:
test: [ "CMD", "kafka-topics.sh", "--list", "--zookeeper", "zookeeper:2181" ]
interval: 5s
timeout: 10s
retries: 5
networks:
- fastkafka-network
networks:
fastkafka-network:
name: "fastkafka-network"

This configuration will start a single instance of Zookeeper, single +instance of Kafka broker and create a ‘hello’ topic (quite enough for a +start). To start the configuration, run:

$ docker-compose -f kafka_demo.yaml up -d --wait

This will start the necessary containers and wait till they report that +they are Healthy. After the command finishes, you are good to go to try +out the FastKafkaAPI capabilities! 🎊

Running the code

After installing FastKafkaAPI and initialising the Kafka broker you can +proceed to the ‘First Steps’ part of the tutorial. There, you will write +your first Kafka client and producer apps, run them, and interact with +them.

You are highly encouraged to follow along the tutorials not just by +reading trough them but by implementing the code examples in your own +environment. This will not only help you remember the use cases better +but also, hopefully, demonstrate to you the ease of use of this library.

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_02_First_Steps/index.html b/docs/0.7.0/guides/Guide_02_First_Steps/index.html new file mode 100644 index 0000000..ca83593 --- /dev/null +++ b/docs/0.7.0/guides/Guide_02_First_Steps/index.html @@ -0,0 +1,49 @@ + + + + + +First Steps | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

First Steps

Creating a simple Kafka consumer app

For our first demo we will create the simplest possible Kafka consumer +and run it using ‘fastkafka run’ command.

The consumer will:

  1. Connect to the Kafka Broker we setup in the Intro guide

  2. Listen to the hello topic

  3. Write any message received from the hello topic to stdout

To create the consumer, first, create a file named

hello_kafka_consumer.py and copy the following code to it:

from os import environ

from fastkafka import FastKafka
from pydantic import BaseModel, Field

kafka_server_url = environ["KAFKA_HOSTNAME"]
kafka_server_port = environ["KAFKA_PORT"]

kafka_brokers = {
"localhost": {
"description": "local development kafka",
"url": kafka_server_url,
"port": kafka_server_port
}
}

class HelloKafkaMsg(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_app = FastKafka(
kafka_brokers=kafka_brokers
)

@kafka_app.consumes()
async def on_hello(msg: HelloKafkaMsg):
print(f"Got data, msg={msg.msg}", flush=True)

!!! info "Kafka configuration"

This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').

!!! warning "Remember to flush"

Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.

To run this consumer, in your terminal, run:

fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app

After running the command, you should see something similar to the ouput +below:

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})
[878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization
[878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.

[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.

Now you can interact with your consumer, by sending the messages to the +subscribed ‘hello’ topic, don’t worry, we will cover this in the next +step of this guide.

Sending first message to your consumer

After we have created and run our first consumer, we should send a +message to it, to make sure it is working properly.

If you are using the Kafka setup as described in the Intro guide, you +can follow the steps listed here to send a message to the hello topic.

First, connect to your running kafka broker by running:

docker run -it kafka /bin/bash

Then, when connected to the container, run:

kafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello

This will open an interactive connection to the hello topic, now you can +write your mesages to the topic and they will be consumed by our +consumer.

In the shell, type:

{"msg":"hello"}

and press enter. This will send a hello message to the topic which will +be read by our running consumer and outputed to stdout.

Check the output of your consumer (terminal where you ran the ‘fastkafka +run’ command) and confirm that your consumer has read the Kafka message. +You shoud see something like this:

Got data, msg=hello

Creating a hello Kafka producer

Consuming messages is only a part of this Library functionality, the +other big part is producing the messages. So, let’s create our first +kafka producer which will send it’s greetings to our consumer +periodically.

The producer will:

  1. Connect to the Kafka Broker we setup in the Intro guide
  2. Connect to the hello topic
  3. Periodically send a message to the hello world topic

To create the producer, first, create a file named

hello_kafka_producer.py and copy the following code to it:

from os import environ

import asyncio
from pydantic import BaseModel, Field

from fastkafka import FastKafka
from fastkafka._components.logger import get_logger

kafka_server_url = environ["KAFKA_HOSTNAME"]
kafka_server_port = environ["KAFKA_PORT"]

kafka_brokers = {
"localhost": {
"description": "local development kafka",
"url": kafka_server_url,
"port": kafka_server_port
}
}

class HelloKafkaMsg(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_app = FastKafka(
kafka_brokers=kafka_brokers
)

logger = get_logger(__name__)

@kafka_app.produces()
async def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:
logger.info(f"Producing: {msg}")
return msg

@kafka_app.run_in_background()
async def hello_every_second():
while(True):
await to_hello(HelloKafkaMsg(msg="hello"))
await asyncio.sleep(1)

!!! info "Kafka configuration"

This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').

To run this producer, in your terminal, run:

fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app

After running the command, you should see something similar to the ouput +below:

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.

[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.

Now, while the producer is running, it will send a HelloKafkaMsg every +second to the hello kafka topic. If your consumer is still running, you +should see the messages appear in its log.

Recap

In this guide we have:

  1. Created a simple Kafka consumer using FastKafka
  2. Sent a message to our consumer trough Kafka
  3. Created a simple Kafka producer using FastKafka
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_03_Authentication/index.html b/docs/0.7.0/guides/Guide_03_Authentication/index.html new file mode 100644 index 0000000..3a4955d --- /dev/null +++ b/docs/0.7.0/guides/Guide_03_Authentication/index.html @@ -0,0 +1,37 @@ + + + + + +Authentication | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Authentication

TLS Authentication

sasl_mechanism (str) – Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN, +GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN

sasl_plain_username (str) – username for SASL PLAIN authentication. +Default: None

sasl_plain_password (str) – password for SASL PLAIN authentication. +Default: None

sasl_oauth_token_provider (AbstractTokenProvider) – OAuthBearer token +provider instance. (See kafka.oauth.abstract). Default: None

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_04_Github_Actions_Workflow/index.html b/docs/0.7.0/guides/Guide_04_Github_Actions_Workflow/index.html new file mode 100644 index 0000000..47b025c --- /dev/null +++ b/docs/0.7.0/guides/Guide_04_Github_Actions_Workflow/index.html @@ -0,0 +1,37 @@ + + + + + +Deploy FastKafka docs to GitHub Pages | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Deploy FastKafka docs to GitHub Pages

Getting started

Add your workflow file .github/workflows/fastkafka_docs_deploy.yml and +push it to your remote default branch.

Here is an example workflow:

name: Deploy FastKafka Generated Documentation to GitHub Pages

on:
push:
branches: [ "main", "master" ]
workflow_dispatch:

jobs:
deploy:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: airtai/workflows/fastkafka-ghp@main
with:
app: "test_fastkafka.application:kafka_app"

Options

Set app location

Input in the form of path:app, where path is the path to a Python +file and app is an object of type FastKafka:

- name: Deploy
uses: airtai/workflows/fastkafka-ghp@main
with:
app: "test_fastkafka.application:kafka_app"

In the above example, FastKafka app is named as kafka_app and it is +available in the application submodule of the test_fastkafka module.

Example Repository

A FastKafka-based library that uses the above-mentioned workfow +actions to publish FastKafka docs to Github Pages can be found +here.

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_05_Lifespan_Handler/index.html b/docs/0.7.0/guides/Guide_05_Lifespan_Handler/index.html new file mode 100644 index 0000000..ad4b2cd --- /dev/null +++ b/docs/0.7.0/guides/Guide_05_Lifespan_Handler/index.html @@ -0,0 +1,75 @@ + + + + + +Lifespan Events | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Lifespan Events

Did you know that you can define some special code that runs before and +after your Kafka application? This code will be executed just once, but +it covers the whole lifespan of your app! 🚀

Lets break it down:

You can define logic (code) that should be executed before the +application starts up. This is like a warm-up for your app, getting it +ready to consume and produce messages.

Similarly, you can define logic (code) that should be executed when the +application is shutting down. This is like a cool-down for your app, +making sure everything is properly closed and cleaned up.

By executing code before consuming and after producing, you cover the +entire lifecycle of your application 🎉

This is super handy for setting up shared resources that are needed +across consumers and producers, like a database connection pool or a +machine learning model. And the best part? You can clean up these +resources when the app is shutting down!

So lets give it a try and see how it can make your Kafka app even more +awesome! 💪

Lifespan example - Iris prediction model

Let’s dive into an example to see how you can leverage the lifecycle +handler to solve a common use case. Imagine that you have some machine +learning models that need to consume incoming messages and produce +response/prediction messages. These models are shared among consumers +and producers, which means you don’t want to load them for every +message.

Here’s where the lifecycle handler comes to the rescue! By loading the +model before the messages are consumed and produced, but only right +before the application starts receiving messages, you can ensure that +the model is ready to use without compromising the performance of your +tests. In the upcoming sections, we’ll walk you through how to +initialize an Iris species prediction model and use it in your developed +application.

Lifespan

You can define this startup and shutdown logic using the lifespan +parameter of the FastKafka app, and an async context manager.

Let’s start with an example and then see it in detail.

We create an async function lifespan() with yield like this:

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
from contextlib import asynccontextmanager

from fastkafka import FastKafka

ml_models = {}

@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
print("Loading the model!")
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)
yield
# Clean up the ML models and release the resources

print("Exiting, clearing model dict!")
ml_models.clear()

The first thing to notice, is that we are defining an async function +with yield. This is very similar to Dependencies with yield.

The first part of the function, before the yield, will be executed +before the application starts. And the part after the yield will +be executed after the application has finished.

This lifespan will create an iris_prediction model on application +startup and cleanup the references after the app is shutdown.

The lifespan will be passed an KafkaApp reference on startup of your +application, which you can use to reference your application on startup.

For demonstration sake, we also added prints so that when running the +app we can see that our lifespan was called.

Async context manager

Context managers can be used in with blocks, our lifespan, for example +could be used like this:

ml_models = {}
async with lifespan(None):
print(ml_models)

When you create a context manager or an async context manager, what it +does is that, before entering the with block, it will execute the code +before the yield, and after exiting the with block, it will execute +the code after the yield.

If you want to learn more about context managers and contextlib +decorators, please visit Python official +docs

App demo

FastKafka app

Lets now create our application using the created lifespan handler.

Notice how we passed our lifespan handler to the app when constructing +it trough the lifespan argument.

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local development kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Data modeling

Lets model the Iris data for our app:

from pydantic import BaseModel, Field, NonNegativeFloat

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Consumers and producers

Lets create a consumer and producer for our app that will generate +predictions from input iris data.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Final app

The final app looks like this:

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
from contextlib import asynccontextmanager

from pydantic import BaseModel, Field, NonNegativeFloat

from fastkafka import FastKafka

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")
ml_models = {}

@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
print("Loading the model!")
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)
yield
# Clean up the ML models and release the resources

print("Exiting, clearing model dict!")
ml_models.clear()

kafka_brokers = {
"localhost": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local development kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Running the app

Now we can run the app with your custom lifespan handler. Copy the code +above in lifespan_example.py and run it by running

fastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app

When you run the app, you should see a simmilar output to the one below:

[262292]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[262292]: Loading the model!
[262292]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Entering...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting send_stream
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Finished.
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[262292]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[262292]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[262292]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[262292]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 262292...
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Entering...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Exiting send_stream
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Finished.
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Stoping producer...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Finished
[262292]: Exiting, clearing model dict!
[INFO] fastkafka._server: terminate_asyncio_process(): Process 262292 terminated.

Recap

In this guide we have defined a lifespan handler and passed to our +FastKafka app.

Some important points are:

  1. Lifespan handler is implemented as +AsyncContextManager
  2. Code before yield in lifespan will be executed before +application startup
  3. Code after yield in lifespan will be executed after +application shutdown
  4. You can pass your lifespan handler to FastKafka app on +initialisation by passing a lifespan argument
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_06_Benchmarking_FastKafka/index.html b/docs/0.7.0/guides/Guide_06_Benchmarking_FastKafka/index.html new file mode 100644 index 0000000..8196b80 --- /dev/null +++ b/docs/0.7.0/guides/Guide_06_Benchmarking_FastKafka/index.html @@ -0,0 +1,67 @@ + + + + + +Benchmarking FastKafka app | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Benchmarking FastKafka app

Prerequisites

To benchmark a FastKafka project, you will need the following:

  1. A library built with FastKafka.
  2. A running Kafka instance to benchmark the FastKafka application +against.

Creating FastKafka Code

Let’s create a FastKafka-based application and write it to the +application.py file based on the tutorial.

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

FastKafka has a decorator for benchmarking which is appropriately +called as benchmark. Let’s edit our application.py file and add the +benchmark decorator to the consumes method.

# content of the "application.py" file with benchmark

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
@kafka_app.benchmark(interval=1, sliding_window_size=5)
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Here we are conducting a benchmark of a function that consumes data from +the input_data topic with an interval of 1 second and a sliding window +size of 5.

This benchmark method uses the interval parameter to calculate the +results over a specific time period, and the sliding_window_size +parameter to determine the maximum number of results to use in +calculating the average throughput and standard deviation.

This benchmark is important to ensure that the function is performing +optimally and to identify any areas for improvement.

Starting Kafka

If you already have a Kafka running somewhere, then you can skip this +step.

Please keep in mind that your benchmarking results may be affected by +bottlenecks such as network, CPU cores in the Kafka machine, or even the +Kafka configuration itself.

Installing Java and Kafka

We need a working Kafkainstance to benchmark our FastKafka app, and +to run Kafka we need Java. Thankfully, FastKafka comes with a CLI +to install both Java and Kafka on our machine.

So, let’s install Java and Kafka by executing the following command.

fastkafka testing install_deps

The above command will extract Kafka scripts at the location +“\$HOME/.local/kafka_2.13-3.3.2" on your machine.

Creating configuration for Zookeeper and Kafka

Now we need to start Zookeeper and Kafka separately, and to start +them we need zookeeper.properties and kafka.properties files.

Let’s create a folder inside the folder where Kafka scripts were +extracted and change directory into it.

mkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir

Let’s create a file called zookeeper.properties and write the +following content to the file:

dataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper
clientPort=2181
maxClientCnxns=0

Similarly, let’s create a file called kafka.properties and write the +following content to the file:

broker.id=0
listeners=PLAINTEXT://:9092

num.network.threads=3
num.io.threads=8
socket.send.buffer.bytes=102400
socket.receive.buffer.bytes=102400
socket.request.max.bytes=104857600

num.partitions=1
num.recovery.threads.per.data.dir=1
offsets.topic.replication.factor=1
transaction.state.log.replication.factor=1
transaction.state.log.min.isr=1

log.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs
log.flush.interval.messages=10000
log.flush.interval.ms=1000
log.retention.hours=168
log.retention.bytes=1073741824
log.segment.bytes=1073741824
log.retention.check.interval.ms=300000

zookeeper.connect=localhost:2181
zookeeper.connection.timeout.ms=18000

Starting Zookeeper and Kafka

We need two different terminals to run Zookeeper in one and Kafka in +another. Let’s open a new terminal and run the following commands to +start Zookeeper:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./zookeeper-server-start.sh ../data_dir/zookeeper.properties

Once Zookeeper is up and running, open a new terminal and execute the +follwing commands to start Kafka:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./kafka-server-start.sh ../data_dir/kafka.properties

Now we have both Zookeeper and Kafka up and running.

Creating topics in Kafka

In a new terminal, please execute the following command to create +necessary topics in Kafka:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092
./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092

Populating topics with dummy data

To benchmark our FastKafka app, we need some data in Kafka topics.

In the same terminal, let’s create some dummy data:

yes '{"sepal_length": 0.7739560486, "sepal_width": 0.8636615789, "petal_length": 0.6122663046, "petal_width": 0.1338914722}' | head -n 1000000 > /tmp/test_data

This command will create a file called test_data in the tmp folder +with one million rows of text. This will act as dummy data to populate +the input_data topic.

Let’s populate the created topic input_data with the dummy data which +we created above:

./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data

Now our topic input_data has one million records/messages in it. If +you want more messages in topic, you can simply execute the above +command again and again.

Benchmarking FastKafka

Once Zookeeper and Kafka are ready, benchmarking FastKafka app is +as simple as running the fastkafka run command:

fastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app

This command will start the FastKafka app and begin consuming messages +from Kafka, which we spun up earlier. Additionally, the same command +will output all of the benchmark throughputs based on the interval and +sliding_window_size values.

The output for the fastkafka run command is:

[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh
ost:9092', 'max_poll_records': 100}
[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc
[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin
[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1
[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition
=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)

Based on the output, when using 1 worker, our FastKafka app achieved a +throughput of 93k messages per second and an average throughput of +93k messages per second.

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html b/docs/0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html new file mode 100644 index 0000000..61692a4 --- /dev/null +++ b/docs/0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html @@ -0,0 +1,137 @@ + + + + + +Encoding and Decoding Kafka Messages with FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Encoding and Decoding Kafka Messages with FastKafka

Prerequisites

  1. A basic knowledge of FastKafka is needed to proceed with this +guide. If you are not familiar with FastKafka, please go through +the tutorial first.
  2. FastKafka with its dependencies installed is needed. Please +install FastKafka using the command - pip install fastkafka

Ways to Encode and Decode Messages with FastKafka

In python, by default, we send Kafka messages as bytes. Even if our +message is a string, we convert it to bytes and then send it to Kafka +topic. imilarly, while consuming messages, we consume them as bytes and +then convert them to strings.

In FastKafka, we specify message schema using Pydantic models as +mentioned in tutorial:

# Define Pydantic models for Kafka messages
from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Then, we send and receive messages as instances of Pydantic models which +we defined. So, FastKafka needs a way to encode/decode to these Pydantic +model messages to bytes in order to send/receive messages to/from Kafka +topics.

The @consumes and @produces methods of FastKafka accept a parameter +called decoder/encoder to decode/encode Kafka messages. FastKafka +provides three ways to encode and decode messages:

  1. json - This is the default encoder/decoder option in FastKafka. +While producing, this option converts our instance of Pydantic model +messages to a JSON string and then converts it to bytes before +sending it to the topic. While consuming, it converts bytes to a +JSON string and then constructs an instance of Pydantic model from +the JSON string.
  2. avro - This option uses Avro encoding/decoding to convert instances +of Pydantic model messages to bytes while producing, and while +consuming, it constructs an instance of Pydantic model from bytes.
  3. custom encoder/decoder - If you are not happy with the json or avro +encoder/decoder options, you can write your own encoder/decoder +functions and use them to encode/decode Pydantic messages.

1. Json encoder and decoder

The default option in FastKafka is json encoder/decoder. This option, +while producing, converts our instance of pydantic model messages to +json string and then converts to bytes before sending it to the topics. +While consuming it converts bytes to json string and then constructs +instance of pydantic model from json string.

We can use the application from tutorial as +is, and it will use the json encoder/decoder by default. But, for +clarity, let’s modify it to explicitly accept the ‘json’ encoder/decoder +parameter:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder="json")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder="json")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

In the above code, the @kafka_app.consumes decorator sets up a +consumer for the “input_data" topic, using the ‘json’ decoder to convert +the message payload to an instance of IrisInputData. The +@kafka_app.produces decorator sets up a producer for the “predictions" +topic, using the ‘json’ encoder to convert the instance of +IrisPrediction to message payload.

2. Avro encoder and decoder

What is Avro?

Avro is a row-oriented remote procedure call and data serialization +framework developed within Apache’s Hadoop project. It uses JSON for +defining data types and protocols, and serializes data in a compact +binary format. To learn more about the Apache Avro, please check out the +docs.

Installing FastKafka with Avro dependencies

FastKafka with dependencies for Apache Avro installed is needed to use +avro encoder/decoder. Please install FastKafka with Avro support using +the command - pip install fastkafka[avro]

Defining Avro Schema Using Pydantic Models

By default, you can use Pydantic model to define your message schemas. +FastKafka internally takes care of encoding and decoding avro messages, +based on the Pydantic models.

So, similar to the tutorial, the message schema will +remain as it is.

# Define Pydantic models for Avro messages
from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

No need to change anything to support avro. You can use existing +Pydantic models as is.

Reusing existing avro schema

If you are using some other library to send and receive avro encoded +messages, it is highly likely that you already have an Avro schema +defined.

Building pydantic models from avro schema dictionary

Let’s modify the above example and let’s assume we have schemas already +for IrisInputData and IrisPrediction which will look like below:

iris_input_data_schema = {
"type": "record",
"namespace": "IrisInputData",
"name": "IrisInputData",
"fields": [
{"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},
{"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},
{"doc": "Petal length in cm", "type": "double", "name": "petal_length"},
{"doc": "Petal width in cm", "type": "double", "name": "petal_width"},
],
}
iris_prediction_schema = {
"type": "record",
"namespace": "IrisPrediction",
"name": "IrisPrediction",
"fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],
}

We can easily construct pydantic models from avro schema using +avsc_to_pydantic function which is included as part of FastKafka +itself.

from fastkafka.encoder import avsc_to_pydantic

IrisInputData = avsc_to_pydantic(iris_input_data_schema)
print(IrisInputData.__fields__)

IrisPrediction = avsc_to_pydantic(iris_prediction_schema)
print(IrisPrediction.__fields__)

The above code will convert avro schema to pydantic models and will +print pydantic models’ fields. The output of the above is:

{'sepal_length': ModelField(name='sepal_length', type=float, required=True),
'sepal_width': ModelField(name='sepal_width', type=float, required=True),
'petal_length': ModelField(name='petal_length', type=float, required=True),
'petal_width': ModelField(name='petal_width', type=float, required=True)}

{'species': ModelField(name='species', type=str, required=True)}

This is exactly same as manually defining the pydantic models ourselves. +You don’t have to worry about not making any mistakes while converting +avro schema to pydantic models manually. You can easily and +automatically accomplish it by using avsc_to_pydantic function as +demonstrated above.

Building pydantic models from .avsc file

Not all cases will have avro schema conveniently defined as a python +dictionary. You may have it stored as the proprietary .avsc files in +filesystem. Let’s see how to convert those .avsc files to pydantic +models.

Let’s assume our avro files are stored in files called +iris_input_data_schema.avsc and iris_prediction_schema.avsc. In that +case, following code converts the schema to pydantic models:

import json
from fastkafka.encoder import avsc_to_pydantic


with open("iris_input_data_schema.avsc", "rb") as f:
iris_input_data_schema = json.load(f)

with open("iris_prediction_schema.avsc", "rb") as f:
iris_prediction_schema = json.load(f)


IrisInputData = avsc_to_pydantic(iris_input_data_schema)
print(IrisInputData.__fields__)

IrisPrediction = avsc_to_pydantic(iris_prediction_schema)
print(IrisPrediction.__fields__)

Consume/Produce avro messages with FastKafka

FastKafka provides @consumes and @produces methods to +consume/produces messages to/from a Kafka topic. This is explained in +tutorial.

The @consumes and @produces methods accepts a parameter called +decoder/encoder to decode/encode avro messages.

@kafka_app.consumes(topic="input_data", encoder="avro")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", decoder="avro")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

In the above example, in @consumes and @produces methods, we +explicitly instruct FastKafka to decode and encode messages using +the avro decoder/encoder instead of the default json +decoder/encoder.

Assembling it all together

Let’s rewrite the sample code found in +tutorial to use avro to decode and +encode messages:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


iris_input_data_schema = {
"type": "record",
"namespace": "IrisInputData",
"name": "IrisInputData",
"fields": [
{"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},
{"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},
{"doc": "Petal length in cm", "type": "double", "name": "petal_length"},
{"doc": "Petal width in cm", "type": "double", "name": "petal_width"},
],
}
iris_prediction_schema = {
"type": "record",
"namespace": "IrisPrediction",
"name": "IrisPrediction",
"fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],
}
# Or load schema from avsc files

from fastkafka.encoder import avsc_to_pydantic

IrisInputData = avsc_to_pydantic(iris_input_data_schema)
IrisPrediction = avsc_to_pydantic(iris_prediction_schema)


from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder="avro")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder="avro")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

The above code is a sample implementation of using FastKafka to consume +and produce Avro-encoded messages from/to a Kafka topic. The code +defines two Avro schemas for the input data and the prediction result. +It then uses the avsc_to_pydantic function from the FastKafka library +to convert the Avro schema into Pydantic models, which will be used to +decode and encode Avro messages.

The FastKafka class is then instantiated with the broker details, and +two functions decorated with @kafka_app.consumes and +@kafka_app.produces are defined to consume messages from the +“input_data" topic and produce messages to the “predictions" topic, +respectively. The functions uses the decoder=“avro" and encoder=“avro" +parameters to decode and encode the Avro messages.

In summary, the above code demonstrates a straightforward way to use +Avro-encoded messages with FastKafka to build a message processing +pipeline.

3. Custom encoder and decoder

If you are not happy with the json or avro encoder/decoder options, you +can write your own encoder/decoder functions and use them to +encode/decode Pydantic messages.

Writing a custom encoder and decoder

In this section, let’s see how to write a custom encoder and decoder +which obfuscates kafka message with simple +ROT13 cipher.

import codecs
import json
from typing import Any

from pydantic.main import ModelMetaclass


def custom_encoder(msg: BaseModel) -> bytes:
msg_str = msg.json()
obfuscated = codecs.encode(msg_str, 'rot13')
raw_bytes = obfuscated.encode("utf-8")
return raw_bytes

def custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:
obfuscated = raw_msg.decode("utf-8")
msg_str = codecs.decode(obfuscated, 'rot13')
msg_dict = json.loads(msg_str)
return cls(**msg_dict)

The above code defines two custom functions for encoding and decoding +messages in a Kafka application using the FastKafka library.

The encoding function, custom_encoder(), takes a message msg which +is an instance of a Pydantic model, converts it to a JSON string using +the json() method, obfuscates the resulting string using the ROT13 +algorithm from the codecs module, and finally encodes the obfuscated +string as raw bytes using the UTF-8 encoding.

The decoding function, custom_decoder(), takes a raw message raw_msg +in bytes format, a Pydantic class to construct instance with cls +parameter. It first decodes the raw message from UTF-8 encoding, then +uses the ROT13 algorithm to de-obfuscate the string. Finally, it loads +the resulting JSON string using the json.loads() method and returns a +new instance of the specified cls class initialized with the decoded +dictionary.

These functions can be used with FastKafka’s encoder and decoder +parameters to customize the serialization and deserialization of +messages in Kafka topics.

Let’s test the above code

i = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)

encoded = custom_encoder(i)
display(encoded)

decoded = custom_decoder(encoded, IrisInputData)
display(decoded)

This will result in following output

b'{"frcny_yratgu": 0.5, "frcny_jvqgu": 0.5, "crgny_yratgu": 0.5, "crgny_jvqgu": 0.5}'

IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)

Assembling it all together

Let’s rewrite the sample code found in +tutorial to use our custom decoder and +encoder functions:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")


import codecs
import json
from typing import Any

from pydantic.main import ModelMetaclass


def custom_encoder(msg: BaseModel) -> bytes:
msg_str = msg.json()
obfuscated = codecs.encode(msg_str, 'rot13')
raw_bytes = obfuscated.encode("utf-8")
return raw_bytes

def custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:
obfuscated = raw_msg.decode("utf-8")
msg_str = codecs.decode(obfuscated, 'rot13')
msg_dict = json.loads(msg_str)
return cls(**msg_dict)


from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder=custom_decoder)
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder=custom_encoder)
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

This code defines a custom encoder and decoder functions for encoding +and decoding messages sent through a Kafka messaging system.

The custom encoder function takes a message represented as a +BaseModel and encodes it as bytes by first converting it to a JSON +string and then obfuscating it using the ROT13 encoding. The obfuscated +message is then converted to bytes using UTF-8 encoding and returned.

The custom decoder function takes in the bytes representing an +obfuscated message, decodes it using UTF-8 encoding, then decodes the +ROT13 obfuscation, and finally loads it as a dictionary using the json +module. This dictionary is then converted to a BaseModel instance +using the cls parameter.

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_11_Consumes_Basics/index.html b/docs/0.7.0/guides/Guide_11_Consumes_Basics/index.html new file mode 100644 index 0000000..e7c95ba --- /dev/null +++ b/docs/0.7.0/guides/Guide_11_Consumes_Basics/index.html @@ -0,0 +1,87 @@ + + + + + +@consumes basics | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

@consumes basics

You can use @consumes decorator to consume messages from Kafka topics.

In this guide we will create a simple FastKafka app that will consume +HelloWorld messages from hello_world topic.

Import FastKafka

To use the @consumes decorator, first we need to import the base +FastKafka app to create our application.

from fastkafka import FastKafka

In this demo we will log the messages to the output so that we can +inspect and verify that our app is consuming properly. For that we need +to import the logger.

from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

Define the structure of the messages

Next, you need to define the structure of the messages you want to +consume from the topic using pydantic. For +the guide we’ll stick to something basic, but you are free to define any +complex message structure you wish in your project, just make sure it +can be JSON encoded.

Let’s import BaseModel and Field from pydantic and create a simple +HelloWorld class containing one string parameter msg

from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

Create a base FastKafka app

Now we will create and define a base FastKafka app, replace the +<url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values of your +Kafka bootstrap server



kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

Create a consumer function and decorate it with @consumes

Let’s create a consumer function that will consume HelloWorld messages +from hello_world topic and log them.

@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

The function decorated with the @consumes decorator will be called +when a message is produced to Kafka.

The message will then be injected into the typed msg argument of the +function and its type will be used to parse the message.

In this example case, when the message is sent into a hello_world +topic, it will be parsed into a HelloWorld class and on_hello_world +function will be called with the parsed class as msg argument value.

Final app

Your app code should look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)


kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)
@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Run the app

Now we can run the app. Copy the code above in consumer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app

After running the command, you should see this output in your terminal:

[513863]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[513863]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})
[513863]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[513863]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 513863...
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._server: terminate_asyncio_process(): Process 513863 terminated.

Send the message to kafka topic

Lets send a HelloWorld message to the hello_world topic and check if +our consumer kafka application has logged the received message. In your +terminal, run:

echo {\"msg\": \"Hello world\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>
print(consumer_task.value[1].decode("UTF-8"))

You should see the “Got msg: msg='Hello world'" being logged by your +consumer.

Choosing a topic

You probably noticed that you didn’t define which topic you are +receiving the message from, this is because the @consumes decorator +determines the topic by default from your function name. The decorator +will take your function name and strip the default “on_" prefix from it +and use the rest as the topic name. In this example case, the topic is +hello_world.

You can choose your custom prefix by defining the prefix parameter in +consumes decorator, like this:

Also, you can define the topic name completely by defining the topic +in parameter in consumes decorator, like this:

Message data

The message received from kafka is translated from binary JSON +representation int the class defined by typing of msg parameter in the +function decorated by the @consumes decorator.

In this example case, the message will be parsed into a HelloWorld +class.

Message metadata

If you need any of Kafka message metadata such as timestamp, partition +or headers you can access the metadata by adding a EventMetadata typed +argument to your consumes function and the metadata from the incoming +message will be automatically injected when calling the consumes +function.

Let’s demonstrate that.

Create a consumer function with metadata

The only difference from the original basic consume function is that we +are now passing the meta: EventMetadata argument to the function. The +@consumes decorator will register that and, when a message is +consumed, it will also pass the metadata to your function. Now you can +use the metadata in your consume function. Lets log it to see what it +contains.

First, we need to import the EventMetadata

Now we can add the meta argument to our consuming function.

Your final app should look like this:

Now lets run the app and send a message to the broker to see the logged +message metadata.

You should see a similar log as the one below and the metadata being +logged in your app.

As you can see in the log, from the metadata you now have the +information about the partition, offset, timestamp, key and headers. +🎉

Dealing with high latency consuming functions

If your functions have high latency due to, for example, lengthy +database calls you will notice a big decrease in performance. This is +due to the issue of how the consumes decorator executes your consume +functions when consumeing events. By default, the consume function will +run the consuming funtions for one topic sequentially, this is the most +straightforward approach and results with the least amount of overhead.

But, to handle those high latency tasks and run them in parallel, +FastKafka has a DynamicTaskExecutor prepared for your consumers. This +executor comes with additional overhead, so use it only when you need to +handle high latency functions.

Lets demonstrate how to use it.

decorate_consumes_executor = """@app.consumes(executor="DynamicTaskExecutor")
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")
"""
md(f"```python\n{decorate_consumes}\n```")
@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Lets send a HelloWorld message to the hello_world topic and check if +our consumer kafka application has logged the received message. In your +terminal, run:

echo {\"msg\": \"Hello world\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>

You should see the “Got msg: msg='Hello world'" being logged by your +consumer.

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_12_Batch_Consuming/index.html b/docs/0.7.0/guides/Guide_12_Batch_Consuming/index.html new file mode 100644 index 0000000..7ebbaa0 --- /dev/null +++ b/docs/0.7.0/guides/Guide_12_Batch_Consuming/index.html @@ -0,0 +1,47 @@ + + + + + +Batch consuming | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Batch consuming

If you want to consume data in batches @consumes decorator makes that +possible for you. By typing a consumed msg object as a list of +messages the consumer will call your consuming function with a batch of +messages consumed from a single partition. Let’s demonstrate that now.

Consume function with batching

To consume messages in batches, you need to wrap you message type into a +list and the @consumes decorator will take care of the rest for you. +Your consumes function will be called with batches grouped by partition +now.

@app.consumes(auto_offset_reset="earliest")
async def on_hello_world(msg: List[HelloWorld]):
logger.info(f"Got msg batch: {msg}")

App example

We will modify the app example from @consumes +basics guide to consume +HelloWorld messages batch. The final app will look like this (make +sure you replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


import asyncio
from typing import List
from pydantic import BaseModel, Field

from fastkafka import FastKafka
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.consumes(auto_offset_reset="earliest")
async def on_hello_world(msg: List[HelloWorld]):
logger.info(f"Got msg batch: {msg}")

Send the messages to kafka topic

Lets send a couple of HelloWorld messages to the hello_world topic +and check if our consumer kafka application has logged the received +messages batch. In your terminal, run the following command at least two +times to create multiple messages in your kafka queue:

echo {\"msg\": \"Hello world\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>

Now we can run the app. Copy the code of the example app in +consumer_example.py and run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app

You should see the your Kafka messages being logged in batches by your +consumer.

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_21_Produces_Basics/index.html b/docs/0.7.0/guides/Guide_21_Produces_Basics/index.html new file mode 100644 index 0000000..fad6adf --- /dev/null +++ b/docs/0.7.0/guides/Guide_21_Produces_Basics/index.html @@ -0,0 +1,62 @@ + + + + + +@produces basics | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

@produces basics

You can use @produces decorator to produce messages to Kafka topics.

In this guide we will create a simple FastKafka app that will produce +hello world messages to hello_world topic.

Import FastKafka

To use the @produces decorator, frist we need to import the base +FastKafka app to create our application.

from fastkafka import FastKafka

Define the structure of the messages

Next, you need to define the structure of the messages you want to send +to the topic using pydantic. For the guide +we’ll stick to something basic, but you are free to define any complex +message structure you wish in your project, just make sure it can be +JSON encoded.

Let’s import BaseModel and Field from pydantic and create a simple +HelloWorld class containing one string parameter msg

from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

Create a base FastKafka app

Now we will create and define a base FastKafka app, replace the +<url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values of your +Kafka bootstrap server



kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

Create a producer function and decorate it with @produces

Let’s create a producer function that will produce HelloWorld messages +to hello_world topic:


@app.produces()
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Now you can call your defined function as any normal python function in +your code. The side effect of calling the function will be that the +value you are returning will also be sent to a kafka topic.

By default, the topic is determined from your function name, the “to_" +prefix is stripped and what is left over is used as a topic name. I this +case, that is hello_world.

Instruct the app to start sending HelloWorld messages

Let’s use @run_in_background decorator to instruct our app to send +HelloWorld messages to hello_world topic every second.


import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Final app

Your app code should look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)


kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.produces()
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Run the app

script_file = "producer_example.py"
cmd = "fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app"
md(
f"Now we can run the app. Copy the code above in producer_example.py and run it by running\n```shell\n{cmd}\n```"
)

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app

After running the command, you should see this output in your terminal:

[84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization
[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.

Check if the message was sent to the Kafka topic

Lets check the topic and see if there is a “Hello world!" message in the +hello_world topic. In your terminal run:

kafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>

You should see the {“msg": “Hello world!"} messages in your topic.

Choosing a topic

You probably noticed that you didn’t define which topic you are sending +the message to, this is because the @produces decorator determines the +topic by default from your function name. The decorator will take your +function name and strip the default “to_" prefix from it and use the +rest as the topic name. In this example case, the topic is +hello_world.

!!! warn "New topics"

Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.

You can choose your custom prefix by defining the prefix parameter in +produces decorator, like this:


@app.produces(prefix="send_to_")
async def send_to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Also, you can define the topic name completely by defining the topic +in parameter in produces decorator, like this:


@app.produces(topic="my_special_topic")
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Message data

The return value from your function will be translated JSON string and +then to bytes and sent to defined Kafka topic. The typing of the return +value is used for generating the documentation for your Kafka app.

In this example case, the return value is HelloWorld class which will be +translated into JSON formatted string and then to bytes. The translated +data will then be sent to Kafka. In the from of: +b'{"msg": "Hello world!"}'

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_22_Partition_Keys/index.html b/docs/0.7.0/guides/Guide_22_Partition_Keys/index.html new file mode 100644 index 0000000..7c1189a --- /dev/null +++ b/docs/0.7.0/guides/Guide_22_Partition_Keys/index.html @@ -0,0 +1,53 @@ + + + + + +Defining a partition key | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Defining a partition key

Partition keys are used in Apache Kafka to determine which partition a +message should be written to. This ensures that related messages are +kept together in the same partition, which can be useful for ensuring +order or for grouping related messages together for efficient +processing. Additionally, partitioning data across multiple partitions +allows Kafka to distribute load across multiple brokers and scale +horizontally, while replicating data across multiple brokers provides +fault tolerance.

You can define your partition keys when using the @produces decorator, +this guide will demonstrate to you this feature.

Return a key from the producing function

To define a key for the message that you want to produce to Kafka topic, +you need to wrap the response into KafkaEvent class and set the key +value. Check the example below:


from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:
return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")

In the example, we want to return the HelloWorld message class with +the key defined as my_key. So, we wrap the message and key into a +KafkaEvent class and return it as such.

While generating the documentation, the KafkaEvent class will be +unwrapped and the HelloWorld class will be documented in the +definition of message type, same way if you didn’t use the key.

!!! info "Which key to choose?"

Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.

App example

We will modify the app example from @producer basics guide to return +the HelloWorld with our key. The final app will look like this (make +sure you replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:
return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")

import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Run the app

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app

After running the command, you should see this output in your terminal:

[347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.

Check if the message was sent to the Kafka topic with the desired key

Lets check the topic and see if there is a “Hello world!" message in the +hello_world topic with the defined key. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the my_key {“msg": “Hello world!"} messages in your +topic appearing, the my_key part of the message is the key that we +defined in our producing function.

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_23_Batch_Producing/index.html b/docs/0.7.0/guides/Guide_23_Batch_Producing/index.html new file mode 100644 index 0000000..f9fd3e8 --- /dev/null +++ b/docs/0.7.0/guides/Guide_23_Batch_Producing/index.html @@ -0,0 +1,52 @@ + + + + + +Batch producing | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Batch producing

If you want to send your data in batches @produces decorator makes +that possible for you. By returning a list of messages you want to +send in a batch the producer will collect the messages and send them in +a batch to a Kafka broker.

This guide will demonstrate how to use this feature.

Return a batch from the producing function

To define a batch that you want to produce to Kafka topic, you need to +return the List of the messages that you want to be batched from your +producing function.


from typing import List

@app.produces()
async def to_hello_world(msgs: List[str]) -> List[HelloWorld]:
return [HelloWorld(msg=msg) for msg in msgs]

In the example, we want to return the HelloWorld message class batch +that is created from a list of msgs we passed into our producing +function.

Lets also prepare a backgound task that will send a batch of “hello +world" messages when the app starts.


@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

App example

We will modify the app example from @producer +basics guide to return the +HelloWorld batch. The final app will look like this (make sure you +replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


import asyncio
from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

from typing import List

@app.produces()
async def to_hello_world(msgs: List[str]) -> List[HelloWorld]:
return [HelloWorld(msg=msg) for msg in msgs]

Run the app

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app

After running the command, you should see this output in your terminal:

[46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task
[46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.

Check if the batch was sent to the Kafka topic with the defined key

Lets check the topic and see if there are “Hello world" messages in the +hello_world topic. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the batch of messages in your topic.

Batch key

To define a key for your batch like in Defining a partition +key guide you can wrap the +returning value in a KafkaEvent class. To learn more about defining a +partition ke and KafkaEvent class, please, have a look at Defining a +partition key guide.

Let’s demonstrate that.

To define a key, we just need to modify our producing function, like +this:


from typing import List
from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:
return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")

Now our app looks like this:


import asyncio
from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

from typing import List
from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:
return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")

Check if the batch was sent to the Kafka topic

Lets check the topic and see if there are “Hello world" messages in the +hello_world topic, containing a defined key. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the batch of messages with the defined key in your topic.

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html b/docs/0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html new file mode 100644 index 0000000..e32489a --- /dev/null +++ b/docs/0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html @@ -0,0 +1,155 @@ + + + + + +Using multiple Kafka clusters | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Using multiple Kafka clusters

Ready to take your FastKafka app to the next level? This guide shows you +how to connect to multiple Kafka clusters effortlessly. Consolidate +topics and produce messages across clusters like a pro. Unleash the full +potential of your Kafka-powered app with FastKafka. Let’s dive in and +elevate your application’s capabilities!

Test message

To showcase the functionalities of FastKafka and illustrate the concepts +discussed, we can use a simple test message called TestMsg. Here’s the +definition of the TestMsg class:

class TestMsg(BaseModel):
msg: str = Field(...)

Defining multiple broker configurations

When building a FastKafka application, you may need to consume messages +from multiple Kafka clusters, each with its own set of broker +configurations. FastKafka provides the flexibility to define different +broker clusters using the brokers argument in the consumes decorator. +Let’s explore an example code snippet

from pydantic import BaseModel, Field

from fastkafka import FastKafka

class TestMsg(BaseModel):
msg: str = Field(...)

kafka_brokers_1 = dict(
development=dict(url="dev.server_1", port=9092),
production=dict(url="prod.server_1", port=9092),
)
kafka_brokers_2 = dict(
development=dict(url="dev.server_2", port=9092),
production=dict(url="prod.server_1", port=9092),
)

app = FastKafka(kafka_brokers=kafka_brokers_1)


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals_1(msg: TestMsg):
print(f"Received on s1: {msg=}")
await to_predictions_1(msg)


@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)
async def on_preprocessed_signals_2(msg: TestMsg):
print(f"Received on s2: {msg=}")
await to_predictions_2(msg)

@app.produces(topic="predictions")
async def to_predictions_1(msg: TestMsg) -> TestMsg:
return msg

@app.produces(topic="predictions", brokers=kafka_brokers_2)
async def to_predictions_2(msg: TestMsg) -> TestMsg:
return msg

In this example, the application has two consumes endpoints, both of +which will consume events from preprocessed_signals topic. +on_preprocessed_signals_1 will consume events from kafka_brokers_1 +configuration and on_preprocessed_signals_2 will consume events from +kafka_brokers_2 configuration. When producing, to_predictions_1 will +produce to predictions topic on kafka_brokers_1 cluster and +to_predictions_2 will produce to predictions topic on +kafka_brokers_2 cluster.

How it works

The kafka_brokers_1 configuration represents the primary cluster, +while kafka_brokers_2 serves as an alternative cluster specified in +the decorator.

Using the FastKafka class, the app object is initialized with the +primary broker configuration (kafka_brokers_1). By default, the +@app.consumes decorator without the brokers argument consumes messages +from the preprocessed_signals topic on kafka_brokers_1.

To consume messages from a different cluster, the @app.consumes +decorator includes the brokers argument. This allows explicit +specification of the broker cluster in the on_preprocessed_signals_2 +function, enabling consumption from the same topic but using the +kafka_brokers_2 configuration.

The brokers argument can also be used in the @app.produces decorator to +define multiple broker clusters for message production.

It’s important to ensure that all broker configurations have the same +required settings as the primary cluster to ensure consistent behavior.

Testing the application

To test our FastKafka ‘mirroring’ application, we can use our testing +framework. Lets take a look how it’s done:

from fastkafka.testing import Tester

async with Tester(app) as tester:
# Send TestMsg to topic/broker pair on_preprocessed_signals_1 is consuming from
await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal_s1"))
# Assert on_preprocessed_signals_1 consumed sent message
await app.awaited_mocks.on_preprocessed_signals_1.assert_called_with(
TestMsg(msg="signal_s1"), timeout=5
)
# Assert app has produced a prediction
await tester.mirrors[app.to_predictions_1].assert_called_with(
TestMsg(msg="signal_s1"), timeout=5
)

# Send TestMsg to topic/broker pair on_preprocessed_signals_2 is consuming from
await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal_s2"))
# Assert on_preprocessed_signals_2 consumed sent message
await app.awaited_mocks.on_preprocessed_signals_2.assert_called_with(
TestMsg(msg="signal_s2"), timeout=5
)
# Assert app has produced a prediction
await tester.mirrors[app.to_predictions_2].assert_called_with(
TestMsg(msg="signal_s2"), timeout=5
)
23-05-30 10:33:08.720 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
23-05-30 10:33:08.720 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-05-30 10:33:08.721 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-05-30 10:33:08.721 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'
23-05-30 10:33:08.722 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:08.722 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'
23-05-30 10:33:08.723 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:08.741 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'
23-05-30 10:33:08.741 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:08.742 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'
23-05-30 10:33:08.743 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:08.744 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:08.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}
23-05-30 10:33:08.746 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:08.747 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:08.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:08.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-05-30 10:33:08.749 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:33:08.754 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:08.754 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}
23-05-30 10:33:08.755 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:08.755 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:08.755 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:08.756 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-05-30 10:33:08.756 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:33:08.757 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:08.757 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}
23-05-30 10:33:08.758 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:08.758 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:08.759 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:08.759 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-05-30 10:33:08.759 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:33:08.760 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:08.761 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}
23-05-30 10:33:08.762 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:08.762 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:08.762 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:08.763 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-05-30 10:33:08.763 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
Received on s1: msg=TestMsg(msg='signal_s1')
Received on s2: msg=TestMsg(msg='signal_s2')
23-05-30 10:33:13.745 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:13.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:13.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:13.747 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:13.747 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:13.748 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:13.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:13.749 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:13.749 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:13.750 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:13.750 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:13.751 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:13.751 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:13.751 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
23-05-30 10:33:13.754 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

The usage of the tester.mirrors dictionary allows specifying the +desired topic/broker combination for sending the test messages, +especially when working with multiple Kafka clusters. This ensures that +the data is sent to the appropriate topic/broker based on the consuming +function, and consumed from appropriate topic/broker based on the +producing function.

Running the application

You can run your application using fastkafka run CLI command in the +same way that you would run a single cluster app.

To start your app, copy the code above in multi_cluster_example.py and +run it by running:

Now we can run the app. Copy the code above in multi_cluster_example.py, +adjust your server configurations, and run it by running

fastkafka run --num-workers=1 --kafka-broker=development multi_cluster_example:app

In your app logs, you should see your app starting up and your two +consumer functions connecting to different kafka clusters.

[90735]: 23-05-30 10:33:29.699 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}
[90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:57647'}
[90735]: 23-05-30 10:33:29.714 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[90735]: 23-05-30 10:33:29.714 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})
[90735]: 23-05-30 10:33:29.714 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}
[90735]: 23-05-30 10:33:29.714 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[90735]: 23-05-30 10:33:29.718 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[90735]: 23-05-30 10:33:29.718 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})
[90735]: 23-05-30 10:33:29.718 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}
[90735]: 23-05-30 10:33:29.718 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[90735]: 23-05-30 10:33:29.722 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}.
[90735]: 23-05-30 10:33:29.723 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}.
Starting process cleanup, this may take a few seconds...
23-05-30 10:33:33.548 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 90735...
[90735]: 23-05-30 10:33:34.666 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:34.777 [INFO] fastkafka._server: terminate_asyncio_process(): Process 90735 terminated.

Application documentation

At the moment the documentation for multicluster app is not yet +implemented, but it is under development and you can expecti it soon!

Examples on how to use multiple broker configurations

Example #1

In this section, we’ll explore how you can effectively forward topics +between different Kafka clusters, enabling seamless data synchronization +for your applications.

Imagine having two Kafka clusters, namely kafka_brokers_1 and +kafka_brokers_2, each hosting its own set of topics and messages. Now, +if you want to forward a specific topic (in this case: +preprocessed_signals) from kafka_brokers_1 to kafka_brokers_2, +FastKafka provides an elegant solution.

Let’s examine the code snippet that configures our application for topic +forwarding:

from pydantic import BaseModel, Field

from fastkafka import FastKafka

class TestMsg(BaseModel):
msg: str = Field(...)

kafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))
kafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))

app = FastKafka(kafka_brokers=kafka_brokers_1)


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals_original(msg: TestMsg):
await to_preprocessed_signals_forward(msg)


@app.produces(topic="preprocessed_signals", brokers=kafka_brokers_2)
async def to_preprocessed_signals_forward(data: TestMsg) -> TestMsg:
return data

Here’s how it works: our FastKafka application is configured to consume +messages from kafka_brokers_1 and process them in the +on_preprocessed_signals_original function. We want to forward these +messages to kafka_brokers_2. To achieve this, we define the +to_preprocessed_signals_forward function as a producer, seamlessly +producing the processed messages to the preprocessed_signals topic +within the kafka_brokers_2 cluster.

Testing

To test our FastKafka forwarding application, we can use our testing +framework. Let’s take a look at the testing code snippet:

from fastkafka.testing import Tester

async with Tester(app) as tester:
await tester.mirrors[app.on_preprocessed_signals_original](TestMsg(msg="signal"))
await tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)
23-05-30 10:33:40.969 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
23-05-30 10:33:40.970 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-05-30 10:33:40.971 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-05-30 10:33:40.972 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'
23-05-30 10:33:40.972 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:40.982 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-05-30 10:33:40.982 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:40.983 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:40.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-05-30 10:33:40.984 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:40.985 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:40.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:40.986 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-05-30 10:33:40.987 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:33:40.988 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:40.988 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}
23-05-30 10:33:40.989 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:40.989 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:40.990 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:40.990 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-05-30 10:33:40.991 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:33:44.983 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:44.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:44.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:44.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:44.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:44.986 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:44.987 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:44.987 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:44.987 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
23-05-30 10:33:44.988 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

With the help of the Tester object, we can simulate and verify the +behavior of our FastKafka application. Here’s how it works:

  1. We create an instance of the Tester by passing in our app +object, which represents our FastKafka application.

  2. Using the tester.mirrors dictionary, we can send a message to a +specific Kafka broker and topic combination. In this case, we use +tester.mirrors[app.on_preprocessed_signals_original] to send a +TestMsg message with the content “signal" to the appropriate Kafka +broker and topic.

  3. After sending the message, we can perform assertions on the mirrored +function using +tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5). +This assertion ensures that the mirrored function has been called +within a specified timeout period (in this case, 5 seconds).

Example #2

In this section, we’ll explore how you can effortlessly consume data +from multiple sources, process it, and aggregate the results into a +single topic on a specific cluster.

Imagine you have two Kafka clusters: kafka_brokers_1 and +kafka_brokers_2, each hosting its own set of topics and messages. +Now, what if you want to consume data from both clusters, perform some +processing, and produce the results to a single topic on +kafka_brokers_1? FastKafka has got you covered!

Let’s take a look at the code snippet that configures our application +for aggregating multiple clusters:

from pydantic import BaseModel, Field

from fastkafka import FastKafka

class TestMsg(BaseModel):
msg: str = Field(...)

kafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))
kafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))

app = FastKafka(kafka_brokers=kafka_brokers_1)


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals_1(msg: TestMsg):
print(f"Default: {msg=}")
await to_predictions(msg)


@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)
async def on_preprocessed_signals_2(msg: TestMsg):
print(f"Specified: {msg=}")
await to_predictions(msg)


@app.produces(topic="predictions")
async def to_predictions(prediction: TestMsg) -> TestMsg:
print(f"Sending prediction: {prediction}")
return [prediction]

Here’s the idea: our FastKafka application is set to consume messages +from the topic “preprocessed_signals" on kafka_brokers_1 cluster, as +well as from the same topic on kafka_brokers_2 cluster. We have two +consuming functions, on_preprocessed_signals_1 and +on_preprocessed_signals_2, that handle the messages from their +respective clusters. These functions perform any required processing, in +this case, just calling the to_predictions function.

The exciting part is that the to_predictions function acts as a +producer, sending the processed results to the “predictions" topic on +kafka_brokers_1 cluster. By doing so, we effectively aggregate the +data from multiple sources into a single topic on a specific cluster.

This approach enables you to consume data from multiple Kafka clusters, +process it, and produce the aggregated results to a designated topic. +Whether you’re generating predictions, performing aggregations, or any +other form of data processing, FastKafka empowers you to harness the +full potential of multiple clusters.

Testing

Let’s take a look at the testing code snippet:

from fastkafka.testing import Tester

async with Tester(app) as tester:
await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal"))
await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal"))
await tester.on_predictions.assert_called(timeout=5)
23-05-30 10:33:50.827 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
23-05-30 10:33:50.827 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-05-30 10:33:50.828 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-05-30 10:33:50.829 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-05-30 10:33:50.829 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:50.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-05-30 10:33:50.875 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:50.876 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'
23-05-30 10:33:50.876 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:50.877 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:50.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-05-30 10:33:50.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:50.879 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:50.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:50.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-05-30 10:33:50.880 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:33:50.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:50.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}
23-05-30 10:33:50.881 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:50.882 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:50.882 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:50.883 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-05-30 10:33:50.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:33:50.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:50.884 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-05-30 10:33:50.885 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:50.885 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:50.886 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:50.886 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-05-30 10:33:50.886 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
Default: msg=TestMsg(msg='signal')
Sending prediction: msg='signal'
Specified: msg=TestMsg(msg='signal')
Sending prediction: msg='signal'
23-05-30 10:33:54.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:54.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:54.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:54.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:54.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:54.881 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:54.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:54.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:54.882 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:54.882 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:54.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Here’s how the code above works:

  1. Within an async with block, create an instance of the Tester by +passing in your app object, representing your FastKafka application.

  2. Using the tester.mirrors dictionary, you can send messages to +specific Kafka broker and topic combinations. In this case, we use +tester.mirrors[app.on_preprocessed_signals_1] and +tester.mirrors[app.on_preprocessed_signals_2] to send TestMsg +messages with the content “signal" to the corresponding Kafka broker +and topic combinations.

  3. After sending the messages, you can perform assertions on the +on_predictions function using +tester.on_predictions.assert_called(timeout=5). This assertion +ensures that the on_predictions function has been called within a +specified timeout period (in this case, 5 seconds).

Example #3

In some scenarios, you may need to produce messages to multiple Kafka +clusters simultaneously. FastKafka simplifies this process by allowing +you to configure your application to produce messages to multiple +clusters effortlessly. Let’s explore how you can achieve this:

Consider the following code snippet that demonstrates producing messages +to multiple clusters:

from pydantic import BaseModel, Field

from fastkafka import FastKafka

class TestMsg(BaseModel):
msg: str = Field(...)

kafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))
kafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))

app = FastKafka(kafka_brokers=kafka_brokers_1)


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals(msg: TestMsg):
print(f"{msg=}")
await to_predictions_1(TestMsg(msg="prediction"))
await to_predictions_2(TestMsg(msg="prediction"))


@app.produces(topic="predictions")
async def to_predictions_1(prediction: TestMsg) -> TestMsg:
print(f"Sending prediction to s1: {prediction}")
return [prediction]


@app.produces(topic="predictions", brokers=kafka_brokers_2)
async def to_predictions_2(prediction: TestMsg) -> TestMsg:
print(f"Sending prediction to s2: {prediction}")
return [prediction]

Here’s what you need to know about producing to multiple clusters:

  1. We define two Kafka broker configurations: kafka_brokers_1 and +kafka_brokers_2, representing different clusters with their +respective connection details.

  2. We create an instance of the FastKafka application, specifying +kafka_brokers_1 as the primary cluster for producing messages.

  3. The on_preprocessed_signals function serves as a consumer, +handling incoming messages from the “preprocessed_signals" topic. +Within this function, we invoke two producer functions: +to_predictions_1 and to_predictions_2.

  4. The to_predictions_1 function sends predictions to the +“predictions" topic on kafka_brokers_1 cluster.

  5. Additionally, the to_predictions_2 function sends the same +predictions to the “predictions" topic on kafka_brokers_2 cluster. +This allows for producing the same data to multiple clusters +simultaneously.

By utilizing this approach, you can seamlessly produce messages to +multiple Kafka clusters, enabling you to distribute data across +different environments or leverage the strengths of various clusters.

Feel free to customize the producer functions as per your requirements, +performing any necessary data transformations or enrichment before +sending the predictions.

With FastKafka, producing to multiple clusters becomes a breeze, +empowering you to harness the capabilities of multiple environments +effortlessly.

Testing

Let’s take a look at the testing code snippet:

from fastkafka.testing import Tester

async with Tester(app) as tester:
await tester.to_preprocessed_signals(TestMsg(msg="signal"))
await tester.mirrors[to_predictions_1].assert_called(timeout=5)
await tester.mirrors[to_predictions_2].assert_called(timeout=5)
23-05-30 10:34:00.033 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
23-05-30 10:34:00.034 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-05-30 10:34:00.035 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-05-30 10:34:00.036 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-05-30 10:34:00.037 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:34:00.038 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'
23-05-30 10:34:00.038 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:34:00.052 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-05-30 10:34:00.053 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:34:00.054 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:34:00.055 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-05-30 10:34:00.055 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:34:00.056 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:34:00.056 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:34:00.057 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-05-30 10:34:00.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:34:00.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:34:00.058 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-05-30 10:34:00.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:34:00.059 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:34:00.059 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:34:00.062 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-05-30 10:34:00.062 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:34:00.063 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:34:00.063 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}
23-05-30 10:34:00.064 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:34:00.064 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:34:00.065 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:34:00.065 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-05-30 10:34:00.065 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
msg=TestMsg(msg='signal')
Sending prediction to s1: msg='prediction'
Sending prediction to s2: msg='prediction'
23-05-30 10:34:04.055 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:34:04.055 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:34:04.056 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:34:04.056 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:34:04.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:34:04.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:34:04.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:34:04.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:34:04.058 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:34:04.059 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:34:04.059 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Here’s how you can perform the necessary tests:

  1. Within an async with block, create an instance of the Tester by +passing in your app object, representing your FastKafka application.

  2. Using the tester.to_preprocessed_signals method, you can send a +TestMsg message with the content “signal".

  3. After sending the message, you can perform assertions on the +to_predictions_1 and to_predictions_2 functions using +tester.mirrors[to_predictions_1].assert_called(timeout=5) and +tester.mirrors[to_predictions_2].assert_called(timeout=5). These +assertions ensure that the respective producer functions have +produced data to their respective topic/broker combinations.

By employing this testing approach, you can verify that the producing +functions correctly send messages to their respective clusters. The +testing framework provided by FastKafka enables you to ensure the +accuracy and reliability of your application’s producing logic.

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html b/docs/0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html new file mode 100644 index 0000000..26424f5 --- /dev/null +++ b/docs/0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html @@ -0,0 +1,69 @@ + + + + + +Deploying FastKafka using Docker | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Deploying FastKafka using Docker

Building a Docker Image

To build a Docker image for a FastKafka project, we need the following +items:

  1. A library that is built using FastKafka.
  2. A file in which the requirements are specified. This could be a +requirements.txt file, a setup.py file, or even a wheel file.
  3. A Dockerfile to build an image that will include the two files +mentioned above.

Creating FastKafka Code

Let’s create a FastKafka-based application and write it to the +application.py file based on the tutorial.

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Creating requirements.txt file

The above code only requires fastkafka. So, we will add only +fastkafka to the requirements.txt file, but you can add additional +requirements to it as well.

fastkafka>=0.3.0

Here we are using requirements.txt to store the project’s +dependencies. However, other methods like setup.py, pipenv, and +wheel files can also be used. setup.py is commonly used for +packaging and distributing Python modules, while pipenv is a tool used +for managing virtual environments and package dependencies. wheel +files are built distributions of Python packages that can be installed +with pip.

Creating Dockerfile

# (1)
FROM python:3.9-slim-bullseye
# (2)
WORKDIR /project
# (3)
COPY application.py requirements.txt /project/
# (4)
RUN pip install --no-cache-dir --upgrade -r /project/requirements.txt
# (5)
CMD ["fastkafka", "run", "--num-workers", "2", "--kafka-broker", "production", "application:kafka_app"]
  1. Start from the official Python base image.

  2. Set the current working directory to /project.

    This is where we’ll put the requirements.txt file and the +application.py file.

  3. Copy the application.py file and requirements.txt file inside +the /project directory.

  4. Install the package dependencies in the requirements file.

    The --no-cache-dir option tells pip to not save the downloaded +packages locally, as that is only if pip was going to be run again +to install the same packages, but that’s not the case when working +with containers.

    The --upgrade option tells pip to upgrade the packages if they +are already installed.

  5. Set the command to run the fastkafka run command.

    CMD takes a list of strings, each of these strings is what you +would type in the command line separated by spaces.

    This command will be run from the current working directory, the +same /project directory you set above with WORKDIR /project.

    We supply additional parameters --num-workers and --kafka-broker +for the run command. Finally, we specify the location of our +fastkafka application location as a command argument.

    To learn more about fastkafka run command please check the CLI +docs.

Build the Docker Image

Now that all the files are in place, let’s build the container image.

  1. Go to the project directory (where your Dockerfile is, containing +your application.py file).

  2. Run the following command to build the image:

    docker build -t fastkafka_project_image .

    This command will create a docker image with the name +fastkafka_project_image and the latest tag.

That’s it! You have now built a docker image for your FastKafka project.

Start the Docker Container

Run a container based on the built image:

docker run -d --name fastkafka_project_container fastkafka_project_image

Additional Security

Trivy is an open-source tool that scans Docker images for +vulnerabilities. It can be integrated into your CI/CD pipeline to ensure +that your images are secure and free from known vulnerabilities. Here’s +how you can use trivy to scan your fastkafka_project_image:

  1. Install trivy on your local machine by following the instructions +provided in the official trivy +documentation.

  2. Run the following command to scan your fastkafka_project_image:

    trivy image fastkafka_project_image

    This command will scan your fastkafka_project_image for any +vulnerabilities and provide you with a report of its findings.

  3. Fix any vulnerabilities identified by trivy. You can do this by +updating the vulnerable package to a more secure version or by using +a different package altogether.

  4. Rebuild your fastkafka_project_image and repeat steps 2 and 3 +until trivy reports no vulnerabilities.

By using trivy to scan your Docker images, you can ensure that your +containers are secure and free from known vulnerabilities.

Example repo

A FastKafka based library which uses above mentioned Dockerfile to +build a docker image can be found +here

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html b/docs/0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html new file mode 100644 index 0000000..14a8441 --- /dev/null +++ b/docs/0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html @@ -0,0 +1,134 @@ + + + + + +Using Redpanda to test FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Using Redpanda to test FastKafka

What is FastKafka?

FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.

What is Redpanda?

Redpanda is a drop-in replacement for Kafka. Most of the Kafka tools +work out of the box with Redpanda.

From redpanda.com:

Redpanda is a Kafka®-compatible streaming data platform that is proven +to be 10x faster and 6x lower in total costs. It is also JVM-free, +ZooKeeper®-free, Jepsen-tested and source available.

Some of the advantages of Redpanda over Kafka are

  1. A single binary with built-in everything, no ZooKeeper® or JVM +needed.
  2. Costs upto 6X less than Kafka.
  3. Up to 10x lower average latencies and up to 6x faster Kafka +transactions without compromising correctness.

To learn more about Redpanda, please visit their +website or checkout this blog +post +comparing Redpanda and Kafka’s performance benchmarks.

Example repo

A sample fastkafka-based library that uses Redpanda for testing, based +on this guide, can be found +here.

The process

Here are the steps we’ll be walking through to build our example:

  1. Set up the prerequisites.
  2. Clone the example repo.
  3. Explain how to write an application using FastKafka.
  4. Explain how to write a test case to test FastKafka with Redpanda.
  5. Run the test case and produce/consume messages.

1. Prerequisites

Before starting, make sure you have the following prerequisites set up:

  1. Python 3.x: A Python 3.x installation is required to run +FastKafka. You can download the latest version of Python from the +official website. You’ll also +need to have pip installed and updated, which is Python’s package +installer.
  2. Docker Desktop: Docker is used to run Redpanda, which is +required for testing FastKafka. You can download and install Docker +Desktop from the official +website.
  3. Git: You’ll need to have Git installed to clone the example +repo. You can download Git from the official +website.

2. Cloning and setting up the example repo

To get started with the example code, clone the GitHub +repository by +running the following command in your terminal:

git clone https://github.com/airtai/sample_fastkafka_with_redpanda.git
cd sample_fastkafka_with_redpanda

This will create a new directory called sample_fastkafka_with_redpanda +and download all the necessary files.

Create a virtual environment

Before writing any code, let’s create a new virtual +environment +for our project.

A virtual environment is an isolated environment for a Python project, +which allows you to manage project-specific dependencies and avoid +conflicts between different projects.

To create a new virtual environment, run the following commands in your +terminal:

python3 -m venv venv

This will create a new directory called venv in your project +directory, which will contain the virtual environment.

To activate the virtual environment, run the following command:

source venv/bin/activate

This will change your shell’s prompt to indicate that you are now +working inside the virtual environment.

Finally, run the following command to upgrade pip, the Python package +installer:

pip install --upgrade pip

Install Python dependencies

Next, let’s install the required Python dependencies. In this guide, +we’ll be using FastKafka to write our application code and pytest +and pytest-asyncio to test it.

You can install the dependencies from the requirements.txt file +provided in the cloned repository by running:

pip install -r requirements.txt

This will install all the required packages and their dependencies.

3. Writing server code

The application.py file in the cloned repository demonstrates how to +use FastKafka to consume messages from a Kafka topic, make predictions +using a predictive model, and publish the predictions to another Kafka +topic. Here is an explanation of the code:

Preparing the demo model

First we will prepare our model using the Iris dataset so that we can +demonstrate the predictions using FastKafka. The following call +downloads the dataset and trains the model.

We will wrap the model creation into a lifespan of our app so that the +model is created just before the app is started.

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines two message classes for use in a FastKafka +application:

  • The IrisInputData class is used to represent input data for a +predictive model. It has four fields of type +NonNegativeFloat, +which is a subclass of float that only allows non-negative floating +point values.

  • The IrisPrediction class is used to represent the output of the +predictive model. It has a single field species of type string +representing the predicted species.

These message classes will be used to parse and validate incoming data +in Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used both +to generate documentation and to later run the server against one of the +given kafka broker.

Next, an instance of the FastKafka class is initialized with the +minimum required arguments:

  • kafka_brokers: a dictionary used for generating documentation
from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the IrisInputData message class. Specifying the +type of the single argument is instructing the Pydantic to use +IrisInputData.parse_raw() on the consumed message before passing it +to the user defined function on_input_data.

  • The @produces decorator is applied to the to_predictions function, +which specifies that this function should produce a message to the +“predictions" Kafka topic whenever it is called. The to_predictions +function takes a single integer argument species_class representing +one of three possible strign values predicted by the mdoel. It creates +a new IrisPrediction message using this value and then returns it. +The framework will call the IrisPrediction.json().encode("utf-8") +function on the returned value and produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

4. Writing the test code

The service can be tested using the Tester instance which can be +configured to start a Redpanda +broker for testing +purposes. The test.py file in the cloned repository contains the +following code for testing.

import pytest
from application import IrisInputData, IrisPrediction, kafka_app

from fastkafka.testing import Tester

msg = IrisInputData(
sepal_length=0.1,
sepal_width=0.2,
petal_length=0.3,
petal_width=0.4,
)


@pytest.mark.asyncio
async def test():
# Start Tester app and create local Redpanda broker for testing
async with Tester(kafka_app).using_local_redpanda(
tag="v23.1.2", listener_port=9092
) as tester:
# Send IrisInputData message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with IrisPrediction in predictions topic
await tester.awaited_mocks.on_predictions.assert_awaited_with(
IrisPrediction(species="setosa"), timeout=2
)

The Tester module utilizes uses LocalRedpandaBroker to start and +stop a Redpanda broker for testing purposes using Docker

5. Running the tests

We can run the tests which is in test.py file by executing the +following command:

pytest test.py

This will start a Redpanda broker using Docker and executes tests. The +output of the command is:

(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest
============================== test session starts ===============================
platform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0
rootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py
plugins: asyncio-0.21.0, anyio-3.6.2
asyncio: mode=strict
collected 1 item

test.py . [100%]

=============================== 1 passed in 7.28s ================================
(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$

Running the tests with the Redpanda broker ensures that your code is +working correctly with a real Kafka-like message broker, making your +tests more reliable.

Recap

We have created an Iris classification model and encapulated it into our +FastKafka application. The app will consume the IrisInputData from +the input_data topic and produce the predictions to predictions +topic.

To test the app we have:

  1. Created the app

  2. Started our Tester class with Redpanda broker which mirrors the +developed app topics for testing purposes

  3. Sent IrisInputData message to input_data topic

  4. Asserted and checked that the developed iris classification service +has reacted to IrisInputData message

+ + + + \ No newline at end of file diff --git a/docs/0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html b/docs/0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html new file mode 100644 index 0000000..ec927bf --- /dev/null +++ b/docs/0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html @@ -0,0 +1,68 @@ + + + + + +Using FastAPI to Run FastKafka Application | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

Using FastAPI to Run FastKafka Application

When deploying a FastKafka application, the default approach is to +utilize the fastkafka run CLI +command. This command allows you to launch your FastKafka application as +a standalone service. However, if you already have a FastAPI application +in place and wish to run FastKafka application alongside it, you have an +alternative option.

FastKafka provides a method called FastKafka.fastapi_lifespan that +leverages FastAPI’s +lifespan +feature. This method allows you to run your FastKafka application +together with your existing FastAPI app, seamlessly integrating their +functionalities. By using the FastKafka.fastapi_lifespan method, you +can start the FastKafka application within the same process as the +FastAPI app.

The FastKafka.fastapi_lifespan method ensures that both FastAPI and +FastKafka are initialized and start working simultaneously. This +approach enables the execution of Kafka-related tasks, such as producing +and consuming messages, while also handling HTTP requests through +FastAPI’s routes.

By combining FastAPI and FastKafka in this manner, you can build a +comprehensive application that harnesses the power of both frameworks. +Whether you require real-time messaging capabilities or traditional HTTP +endpoints, this approach allows you to leverage the strengths of FastAPI +and FastKafka within a single deployment setup.

Prerequisites

  1. A basic knowledge of FastKafka is needed to proceed with this +guide. If you are not familiar with FastKafka, please go through +the tutorial first.
  2. FastKafka and FastAPI libraries needs to be installed.

This guide will provide a step-by-step explanation, taking you through +each stage individually, before combining all the components in the +final section for a comprehensive understanding of the process.

1. Basic FastKafka app

In this step, we will begin by creating a simple FastKafka application.

from pydantic import BaseModel, Field, NonNegativeFloat
from typing import *

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Greetings",
kafka_brokers=kafka_brokers,
)


class TestMsg(BaseModel):
msg: str = Field(...)


@kafka_app.consumes()
async def on_names(msg: TestMsg):
await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))


@kafka_app.produces()
async def to_greetings(greeting: TestMsg) -> TestMsg:
return greeting

In the above example, we consume messages from a topic called names, +we prepend “Hello" to the message, and send it back to another topic +called greetings.

We now have a simple FastKafka app to produce and consume from two +topics.

2. Using fastapi_lifespan method

In this step of the guide, we will explore the integration of a +FastKafka application with a FastAPI application using the +FastKafka.fastapi_lifespan method. The FastKafka.fastapi_lifespan +method is a feature provided by FastKafka, which allows you to +seamlessly integrate a FastKafka application with a FastAPI application +by leveraging FastAPI’s lifespan feature.

from fastapi import FastAPI

fastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(kafka_broker_name="localhost"))


@fastapi_app.get("/hello")
async def hello():
return {"msg": "hello there"}

In the above example, a new instance of the FastAPI app is created, +and when the app is started using uvicorn, it also runs the FastKafka +application concurrently.

Putting it all together

Let’s put the above code together and write it in a file called +fast_apps.py.

# content of the "fast_apps.py" file

from pydantic import BaseModel, Field, NonNegativeFloat
from typing import *

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Greetings",
kafka_brokers=kafka_brokers,
)


class TestMsg(BaseModel):
msg: str = Field(...)


@kafka_app.consumes()
async def on_names(msg: TestMsg):
await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))


@kafka_app.produces()
async def to_greetings(greeting: TestMsg) -> TestMsg:
return greeting


from fastapi import FastAPI

fastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan("localhost"))

@fastapi_app.get("/hello")
async def hello():
return {"msg": "hello there"}

Finally, you can run the FastAPI application using a web server of your +choice, such as Uvicorn or Hypercorn by running the below command:

uvicorn fast_apps:fastapi_app --host=0.0.0.0 --port=8080
+ + + + \ No newline at end of file diff --git a/docs/0.7.0/index.html b/docs/0.7.0/index.html new file mode 100644 index 0000000..2a5cf68 --- /dev/null +++ b/docs/0.7.0/index.html @@ -0,0 +1,118 @@ + + + + + +FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.7.0

FastKafka

Effortless Kafka integration for your web services

PyPI PyPI -
+Downloads PyPI - Python
+Version

GitHub Workflow
+Status +CodeQL +Dependency
+Review

GitHub


FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.


⭐⭐⭐ Stay in touch ⭐⭐⭐

Please show your support and stay in touch by:

Your support helps us to stay in touch with you and encourages us to +continue developing and improving the library. Thank you for your +support!


🐝🐝🐝 We were busy lately 🐝🐝🐝

Activity

Install

FastKafka works on macOS, Linux, and most Unix-style operating systems. +You can install base version of fastkafka with pip as usual:

pip install fastkafka

To install fastkafka with testing features please use:

pip install fastkafka[test]

To install fastkafka with asyncapi docs please use:

pip install fastkafka[docs]

To install fastkafka with all the features please use:

pip install fastkafka[test,docs]

Tutorial

You can start an interactive tutorial in Google Colab by clicking the +button below:

Open in Colab

Writing server code

To demonstrate FastKafka simplicity of using @produces and @consumes +decorators, we will focus on a simple app.

The app will consume jsons containig positive floats from one topic, log +them and then produce incremented values to another topic.

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines one Data mesage class. This Class will model the +consumed and produced data in our app demo, it contains one +NonNegativeFloat field data that will be logged and “processed" +before being produced to another topic.

These message class will be used to parse and validate incoming data in +Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class Data(BaseModel):
data: NonNegativeFloat = Field(
..., example=0.5, description="Float data example"
)

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used for +both generating the documentation and later to run the actual server +against one of the given kafka broker.

Next, an object of the FastKafka class is initialized with the minimum +set of arguments:

  • kafka_brokers: a dictionary used for generation of documentation

We will also import and create a logger so that we can log the incoming +data in our consuming function.

from logging import getLogger
from fastkafka import FastKafka

logger = getLogger("Demo Kafka app")

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Demo Kafka app",
kafka_brokers=kafka_brokers,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the Data message class. Specifying the type of the +single argument is instructing the Pydantic to use Data.parse_raw() +on the consumed message before passing it to the user defined function +on_input_data.

  • The @produces decorator is applied to the to_output_data function, +which specifies that this function should produce a message to the +“output_data" Kafka topic whenever it is called. The to_output_data +function takes a single float argument data. It it increments the +data returns it wrapped in a Data object. The framework will call +the Data.json().encode("utf-8") function on the returned value and +produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: Data):
logger.info(f"Got data: {msg.data}")
await to_output_data(msg.data)


@kafka_app.produces(topic="output_data")
async def to_output_data(data: float) -> Data:
processed_data = Data(data=data+1.0)
return processed_data

Testing the service

The service can be tested using the Tester instances which internally +starts InMemory implementation of Kafka broker.

The Tester will redirect your consumes and produces decorated functions +to the InMemory Kafka broker so that you can quickly test your app +without the need for a running Kafka broker and all its dependencies.

from fastkafka.testing import Tester

msg = Data(
data=0.1,
)

# Start Tester app and create InMemory Kafka broker for testing
async with Tester(kafka_app) as tester:
# Send Data message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with incremented data in output_data topic
await tester.awaited_mocks.on_output_data.assert_awaited_with(
Data(data=1.1), timeout=2
)
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] Demo Kafka app: Got data: 0.1
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Recap

We have created a simple fastkafka application. The app will consume the +Data from the input_data topic, log it and produce the incremented +data to output_data topic.

To test the app we have:

  1. Created the app

  2. Started our Tester class which mirrors the developed app topics for +testing purposes

  3. Sent Data message to input_data topic

  4. Asserted and checked that the developed service has reacted to Data +message

Running the service

The service can be started using builtin faskafka run CLI command. +Before we can do that, we will concatenate the code snippets from above +and save them in a file "application.py"

# content of the "application.py" file

from pydantic import BaseModel, Field, NonNegativeFloat

from fastkafka import FastKafka
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class Data(BaseModel):
data: NonNegativeFloat = Field(
..., example=0.5, description="Float data example"
)

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Demo Kafka app",
kafka_brokers=kafka_brokers,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: Data):
logger.info(f"Got data: {msg.data}")
await to_output_data(msg.data)


@kafka_app.produces(topic="output_data")
async def to_output_data(data: float) -> Data:
processed_data = Data(data=data+1.0)
return processed_data

To run the service, use the FastKafka CLI command and pass the module +(in this case, the file where the app implementation is located) and the +app simbol to the command.

fastkafka run --num-workers=1 --kafka-broker localhost application:kafka_app

After running the command, you should see the following output in your +command line:

[1504]: 23-05-31 11:36:45.874 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1504]: 23-05-31 11:36:45.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1506]: 23-05-31 11:36:45.993 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1506]: 23-05-31 11:36:45.994 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1506]: 23-05-31 11:36:46.014 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1506]: 23-05-31 11:36:46.015 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1506]: 23-05-31 11:36:46.040 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1506]: 23-05-31 11:36:46.042 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1506]: 23-05-31 11:36:46.043 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1506]: 23-05-31 11:36:46.043 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1506]: 23-05-31 11:36:46.068 [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata
[1506]: 23-05-31 11:36:46.070 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1504]: 23-05-31 11:36:46.131 [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1504]: 23-05-31 11:36:46.132 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)
[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable to update metadata from [0]
[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)
[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable to update metadata from [0]
[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
Starting process cleanup, this may take a few seconds...
23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1504...
23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1506...
[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-31 11:37:00.607 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1506 was already terminated.
23-05-31 11:37:00.822 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1504 was already terminated.

Documentation

The kafka app comes with builtin documentation generation using +AsyncApi HTML generator.

AsyncApi requires Node.js to be installed and we provide the following +convenience command line for it:

fastkafka docs install_deps
23-05-31 11:38:24.128 [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed

To generate the documentation programatically you just need to call the +following command:

fastkafka docs generate application:kafka_app
23-05-31 11:38:25.113 [INFO] fastkafka._components.asyncapi: Old async specifications at '/content/asyncapi/spec/asyncapi.yml' does not exist.
23-05-31 11:38:25.118 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'
23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /content/asyncapi/docs.

This will generate the asyncapi folder in relative path where all your +documentation will be saved. You can check out the content of it with:

ls -l asyncapi
total 8
drwxr-xr-x 4 root root 4096 May 31 11:38 docs
drwxr-xr-x 2 root root 4096 May 31 11:38 spec

In docs folder you will find the servable static html file of your +documentation. This can also be served using our fastkafka docs serve +CLI command (more on that in our guides).

In spec folder you will find a asyncapi.yml file containing the async +API specification of your application.

We can locally preview the generated documentation by running the +following command:

fastkafka docs serve application:kafka_app
23-05-31 11:38:45.250 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'
23-05-31 11:39:04.410 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
23-05-31 11:39:04.411 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /content/asyncapi/docs.


Serving documentation on http://127.0.0.1:8000
127.0.0.1 - - [31/May/2023 11:39:14] "GET / HTTP/1.1" 200 -
127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/global.min.css HTTP/1.1" 200 -
127.0.0.1 - - [31/May/2023 11:39:14] "GET /js/asyncapi-ui.min.js HTTP/1.1" 200 -
127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/asyncapi.min.css HTTP/1.1" 200 -
Interupting serving of documentation and cleaning up...

From the parameters passed to the application constructor, we get the +documentation bellow:

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Demo Kafka app",
kafka_brokers=kafka_brokers,
)

Kafka_servers

The following documentation snippet are for the consumer as specified in +the code above:

Kafka_consumer

The following documentation snippet are for the producer as specified in +the code above:

Kafka_producer

Finally, all messages as defined as subclasses of BaseModel are +documented as well:

Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)

License

FastKafka is licensed under the Apache License 2.0

A permissive license whose main conditions require preservation of +copyright and license notices. Contributors provide an express grant of +patent rights. Licensed works, modifications, and larger works may be +distributed under different terms and without source code.

The full text of the license can be found +here.

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/CHANGELOG/index.html b/docs/0.7.1/CHANGELOG/index.html new file mode 100644 index 0000000..e5a337a --- /dev/null +++ b/docs/0.7.1/CHANGELOG/index.html @@ -0,0 +1,33 @@ + + + + + +Release notes | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Release notes

0.7.0

New Features

  • Optional description argument to consumes and produces decorator implemented (#338), thanks to @Sternakt

    • Consumes and produces decorators now have optional description argument that is used instead of function docstring in async doc generation when specified
  • FastKafka Windows OS support enabled (#326), thanks to @kumaranvpl

    • FastKafka can now run on Windows
  • FastKafka and FastAPI integration implemented (#304), thanks to @kumaranvpl

    • FastKafka can now be run alongside FastAPI
  • Batch consuming option to consumers implemented (#298), thanks to @Sternakt

    • Consumers can consume events in batches by specifying msg type of consuming function as List[YourMsgType]
  • Removed support for synchronous produce functions (#295), thanks to @kumaranvpl

  • Added default broker values and update docs (#292), thanks to @Sternakt

Bugs Squashed

  • Fix index.ipynb to be runnable in colab (#342)

  • Use cli option root_path docs generate and serve CLI commands (#341), thanks to @kumaranvpl

  • Fix incorrect asyncapi docs path on fastkafka docs serve command (#335), thanks to @Sternakt

    • Serve docs now takes app root_path argument into consideration when specified in app
  • Fix typo (supress_timestamps->suppress_timestamps) and remove fix for enabling timestamps (#315)

  • Fix logs printing timestamps (#308)

  • Fix topics with dots causing failure of tester instantiation (#306), thanks to @Sternakt

    • Specified topics can now have "." in their names

0.6.0

New Features

  • Timestamps added to CLI commands (#283), thanks to @davorrunje

  • Added option to process messages concurrently (#278), thanks to @Sternakt

    • A new executor option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies.
  • Add consumes and produces functions to app (#274), thanks to @Sternakt

  • Export encoders, decoders from fastkafka.encoder (#246), thanks to @kumaranvpl
  • Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. (#239)
  • UI Improvement: Post screenshots with links to the actual messages in testimonials section (#228)

Bugs Squashed

  • Batch testing fix (#280), thanks to @Sternakt

  • Tester breaks when using Batching or KafkaEvent producers (#279)

  • Consumer loop callbacks are not executing in parallel (#276)

0.5.0

New Features

  • Significant speedup of Kafka producer (#236), thanks to @Sternakt

Bugs Squashed

0.4.0

New Features

0.3.1

  • README.md file updated

0.3.0

New Features

  • Guide for fastkafka produces using partition key (#172), thanks to @Sternakt

    • Closes #161
  • Add support for Redpanda for testing and deployment (#181), thanks to @kumaranvpl

  • Remove bootstrap_servers from init and use the name of broker as an option when running/testing (#134)

  • Add a GH action file to check for broken links in the docs (#163)

  • Optimize requirements for testing and docs (#151)

  • Break requirements into base and optional for testing and dev (#124)

    • Minimize base requirements needed just for running the service.
  • Add link to example git repo into guide for building docs using actions (#81)

  • Add logging for run_in_background (#46)

  • Implement partition Key mechanism for producers (#16)

Bugs Squashed

  • Implement checks for npm installation and version (#176), thanks to @Sternakt

    • Closes #158 by checking if the npx is installed and more verbose error handling
  • Fix the helper.py link in CHANGELOG.md (#165)

  • fastkafka docs install_deps fails (#157)

    • Unexpected internal error: [Errno 2] No such file or directory: 'npx'
  • Broken links in docs (#141)

  • fastkafka run is not showing up in CLI docs (#132)

0.2.3

  • Fixed broken links on PyPi index page

0.2.2

New Features

  • Extract JDK and Kafka installation out of LocalKafkaBroker (#131)

  • PyYAML version relaxed (#119), thanks to @davorrunje

  • Replace docker based kafka with local (#68)

    • replace docker compose with a simple docker run (standard run_jupyter.sh should do)
    • replace all tests to use LocalKafkaBroker
    • update documentation

Bugs Squashed

  • Fix broken link for FastKafka docs in index notebook (#145)

  • Fix encoding issues when loading setup.py on windows OS (#135)

0.2.0

New Features

  • Replace kafka container with LocalKafkaBroker (#112)
      • Replace kafka container with LocalKafkaBroker in tests
  • Remove kafka container from tests environment
  • Fix failing tests

Bugs Squashed

  • Fix random failing in CI (#109)

0.1.3

  • version update in init.py

0.1.2

New Features

  • Git workflow action for publishing Kafka docs (#78)

Bugs Squashed

  • Include missing requirement (#110)
    • Typer is imported in this file but it is not included in settings.ini
    • Add aiohttp which is imported in this file
    • Add nbformat which is imported in _components/helpers.py
    • Add nbconvert which is imported in _components/helpers.py

0.1.1

Bugs Squashed

  • JDK install fails on Python 3.8 (#106)

0.1.0

Initial release

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/CONTRIBUTING/index.html b/docs/0.7.1/CONTRIBUTING/index.html new file mode 100644 index 0000000..9ea2f67 --- /dev/null +++ b/docs/0.7.1/CONTRIBUTING/index.html @@ -0,0 +1,36 @@ + + + + + +Contributing to fastkafka | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Contributing to fastkafka

First off, thanks for taking the time to contribute! ❤️

All types of contributions are encouraged and valued. See the Table of Contents for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. 🎉

And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:

  • Star the project
  • Tweet about it
  • Refer this project in your project's readme
  • Mention the project at local meetups and tell your friends/colleagues

Table of Contents

I Have a Question

If you want to ask a question, we assume that you have read the available Documentation.

Before you ask a question, it is best to search for existing Issues that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue.

If you then still feel the need to ask a question and need clarification, we recommend the following:

  • Contact us on Discord
  • Open an Issue
    • Provide as much context as you can about what you're running into

We will then take care of the issue as soon as possible.

I Want To Contribute

When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.

Reporting Bugs

Before Submitting a Bug Report

A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible.

  • Make sure that you are using the latest version.
  • Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the documentation. If you are looking for support, you might want to check this section).
  • To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the bug tracker.
  • Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue.
  • Collect information about the bug:
    • Stack trace (Traceback)
    • OS, Platform and Version (Windows, Linux, macOS, x86, ARM)
    • Python version
    • Possibly your input and the output
    • Can you reliably reproduce the issue? And can you also reproduce it with older versions?

How Do I Submit a Good Bug Report?

We use GitHub issues to track bugs and errors. If you run into an issue with the project:

  • Open an Issue. (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)
  • Explain the behavior you would expect and the actual behavior.
  • Please provide as much context as possible and describe the reproduction steps that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case.
  • Provide the information you collected in the previous section.

Once it's filed:

  • The project team will label the issue accordingly.
  • A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as needs-repro. Bugs with the needs-repro tag will not be addressed until they are reproduced.
  • If the team is able to reproduce the issue, it will be marked needs-fix, as well as possibly other tags (such as critical), and the issue will be left to be implemented.

Suggesting Enhancements

This section guides you through submitting an enhancement suggestion for fastkafka, including completely new features and minor improvements to existing functionality. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions.

Before Submitting an Enhancement

  • Make sure that you are using the latest version.
  • Read the documentation carefully and find out if the functionality is already covered, maybe by an individual configuration.
  • Perform a search to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one.
  • Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library.
  • If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on Discord

How Do I Submit a Good Enhancement Suggestion?

Enhancement suggestions are tracked as GitHub issues.

  • Use a clear and descriptive title for the issue to identify the suggestion.
  • Provide a step-by-step description of the suggested enhancement in as many details as possible.
  • Describe the current behavior and explain which behavior you expected to see instead and why. At this point you can also tell which alternatives do not work for you.
  • Explain why this enhancement would be useful to most fastkafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.

Your First Code Contribution

A great way to start contributing to FastKafka would be by solving an issue tagged with "good first issue". To find a list of issues that are tagged as "good first issue" and are suitable for newcomers, please visit the following link: Good first issues

These issues are beginner-friendly and provide a great opportunity to get started with contributing to FastKafka. Choose an issue that interests you, follow the contribution process mentioned in Way of working and Before a PR, and help us make FastKafka even better!

If you have any questions or need further assistance, feel free to reach out to us. Happy coding!

Development

Prepare the dev environment

To start contributing to fastkafka, you first have to prepare the development environment.

Clone the fastkafka repository

To clone the repository, run the following command in the CLI:

git clone https://github.com/airtai/fastkafka.git

Optional: create a virtual python environment

To prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your fastkafka project by running:

python3 -m venv fastkafka-env

And to activate your virtual environment run:

source fastkafka-env/bin/activate

To learn more about virtual environments, please have a look at official python documentation

Install fastkafka

To install fastkafka, navigate to the root directory of the cloned fastkafka project and run:

pip install fastkafka -e [."dev"]

Install JRE and Kafka toolkit

To be able to run tests and use all the functionalities of fastkafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:

  1. Use our fastkafka testing install-deps CLI command which will install JRE and Kafka toolkit for you in your .local folder +OR
  2. Install JRE and Kafka manually. +To do this, please refer to JDK and JRE installation guide and Apache Kafka quickstart

Install npm

To be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:

  1. Use our fastkafka docs install_deps CLI command which will install npm for you in your .local folder +OR
  2. Install npm manually. +To do this, please refer to NPM installation guide

Install docusaurus

To generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of fastkafka project.

Check if everything works

After installing fastkafka and all the necessary dependencies, run nbdev_test in the root of fastkafka project. This will take a couple of minutes as it will run all the tests on fastkafka project. If everythng is setup correctly, you will get a "Success." message in your terminal, otherwise please refer to previous steps.

Way of working

The development of fastkafka is done in Jupyter notebooks. Inside the nbs directory you will find all the source code of fastkafka, this is where you will implement your changes.

The testing, cleanup and exporting of the code is being handled by nbdev, please, before starting the work on fastkafka, get familiar with it by reading nbdev documentation.

The general philosopy you should follow when writing code for fastkafka is:

  • Function should be an atomic functionality, short and concise
    • Good rule of thumb: your function should be 5-10 lines long usually
  • If there are more than 2 params, enforce keywording using *
    • E.g.: def function(param1, *, param2, param3): ...
  • Define typing of arguments and return value
    • If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected
  • After the function cell, write test cells using the assert keyword
    • Whenever you implement something you should test that functionality immediately in the cells below
  • Add Google style python docstrings when function is implemented and tested

Before a PR

After you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as smooth for you and us, please do the following before opening the request (all the commands are to be run in the root of fastkafka project):

  1. Format your notebooks: nbqa black nbs
  2. Close, shutdown, and clean the metadata from your notebooks: nbdev_clean
  3. Export your code: nbdev_export
  4. Run the tests: nbdev_test
  5. Test code typing: mypy fastkafka
  6. Test code safety with bandit: bandit -r fastkafka
  7. Test code safety with semgrep: semgrep --config auto -r fastkafka

When you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge.

Attribution

This guide is based on the contributing-gen. Make your own!

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/LICENSE/index.html b/docs/0.7.1/LICENSE/index.html new file mode 100644 index 0000000..41360a5 --- /dev/null +++ b/docs/0.7.1/LICENSE/index.html @@ -0,0 +1,168 @@ + + + + + +LICENSE | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

LICENSE

Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/

TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

  1. Definitions.

    "License" shall mean the terms and conditions for use, reproduction, +and distribution as defined by Sections 1 through 9 of this document.

    "Licensor" shall mean the copyright owner or entity authorized by +the copyright owner that is granting the License.

    "Legal Entity" shall mean the union of the acting entity and all +other entities that control, are controlled by, or are under common +control with that entity. For the purposes of this definition, +"control" means (i) the power, direct or indirect, to cause the +direction or management of such entity, whether by contract or +otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity.

    "You" (or "Your") shall mean an individual or Legal Entity +exercising permissions granted by this License.

    "Source" form shall mean the preferred form for making modifications, +including but not limited to software source code, documentation +source, and configuration files.

    "Object" form shall mean any form resulting from mechanical +transformation or translation of a Source form, including but +not limited to compiled object code, generated documentation, +and conversions to other media types.

    "Work" shall mean the work of authorship, whether in Source or +Object form, made available under the License, as indicated by a +copyright notice that is included in or attached to the work +(an example is provided in the Appendix below).

    "Derivative Works" shall mean any work, whether in Source or Object +form, that is based on (or derived from) the Work and for which the +editorial revisions, annotations, elaborations, or other modifications +represent, as a whole, an original work of authorship. For the purposes +of this License, Derivative Works shall not include works that remain +separable from, or merely link (or bind by name) to the interfaces of, +the Work and Derivative Works thereof.

    "Contribution" shall mean any work of authorship, including +the original version of the Work and any modifications or additions +to that Work or Derivative Works thereof, that is intentionally +submitted to Licensor for inclusion in the Work by the copyright owner +or by an individual or Legal Entity authorized to submit on behalf of +the copyright owner. For the purposes of this definition, "submitted" +means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, +and issue tracking systems that are managed by, or on behalf of, the +Licensor for the purpose of discussing and improving the Work, but +excluding communication that is conspicuously marked or otherwise +designated in writing by the copyright owner as "Not a Contribution."

    "Contributor" shall mean Licensor and any individual or Legal Entity +on behalf of whom a Contribution has been received by Licensor and +subsequently incorporated within the Work.

  2. Grant of Copyright License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the +Work and such Derivative Works in Source or Object form.

  3. Grant of Patent License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +(except as stated in this section) patent license to make, have made, +use, offer to sell, sell, import, and otherwise transfer the Work, +where such license applies only to those patent claims licensable +by such Contributor that are necessarily infringed by their +Contribution(s) alone or by combination of their Contribution(s) +with the Work to which such Contribution(s) was submitted. If You +institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work +or a Contribution incorporated within the Work constitutes direct +or contributory patent infringement, then any patent licenses +granted to You under this License for that Work shall terminate +as of the date such litigation is filed.

  4. Redistribution. You may reproduce and distribute copies of the +Work or Derivative Works thereof in any medium, with or without +modifications, and in Source or Object form, provided that You +meet the following conditions:

    (a) You must give any other recipients of the Work or +Derivative Works a copy of this License; and

    (b) You must cause any modified files to carry prominent notices +stating that You changed the files; and

    (c) You must retain, in the Source form of any Derivative Works +that You distribute, all copyright, patent, trademark, and +attribution notices from the Source form of the Work, +excluding those notices that do not pertain to any part of +the Derivative Works; and

    (d) If the Work includes a "NOTICE" text file as part of its +distribution, then any Derivative Works that You distribute must +include a readable copy of the attribution notices contained +within such NOTICE file, excluding those notices that do not +pertain to any part of the Derivative Works, in at least one +of the following places: within a NOTICE text file distributed +as part of the Derivative Works; within the Source form or +documentation, if provided along with the Derivative Works; or, +within a display generated by the Derivative Works, if and +wherever such third-party notices normally appear. The contents +of the NOTICE file are for informational purposes only and +do not modify the License. You may add Your own attribution +notices within Derivative Works that You distribute, alongside +or as an addendum to the NOTICE text from the Work, provided +that such additional attribution notices cannot be construed +as modifying the License.

    You may add Your own copyright statement to Your modifications and +may provide additional or different license terms and conditions +for use, reproduction, or distribution of Your modifications, or +for any such Derivative Works as a whole, provided Your use, +reproduction, and distribution of the Work otherwise complies with +the conditions stated in this License.

  5. Submission of Contributions. Unless You explicitly state otherwise, +any Contribution intentionally submitted for inclusion in the Work +by You to the Licensor shall be under the terms and conditions of +this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify +the terms of any separate license agreement you may have executed +with Licensor regarding such Contributions.

  6. Trademarks. This License does not grant permission to use the trade +names, trademarks, service marks, or product names of the Licensor, +except as required for reasonable and customary use in describing the +origin of the Work and reproducing the content of the NOTICE file.

  7. Disclaimer of Warranty. Unless required by applicable law or +agreed to in writing, Licensor provides the Work (and each +Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +implied, including, without limitation, any warranties or conditions +of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A +PARTICULAR PURPOSE. You are solely responsible for determining the +appropriateness of using or redistributing the Work and assume any +risks associated with Your exercise of permissions under this License.

  8. Limitation of Liability. In no event and under no legal theory, +whether in tort (including negligence), contract, or otherwise, +unless required by applicable law (such as deliberate and grossly +negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, +incidental, or consequential damages of any character arising as a +result of this License or out of the use or inability to use the +Work (including but not limited to damages for loss of goodwill, +work stoppage, computer failure or malfunction, or any and all +other commercial damages or losses), even if such Contributor +has been advised of the possibility of such damages.

  9. Accepting Warranty or Additional Liability. While redistributing +the Work or Derivative Works thereof, You may choose to offer, +and charge a fee for, acceptance of support, warranty, indemnity, +or other liability obligations and/or rights consistent with this +License. However, in accepting such obligations, You may act only +on Your own behalf and on Your sole responsibility, not on behalf +of any other Contributor, and only if You agree to indemnify, +defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason +of your accepting any such warranty or additional liability.

    END OF TERMS AND CONDITIONS

    APPENDIX: How to apply the Apache License to your work.

    To apply the Apache License to your work, attach the following +boilerplate notice, with the fields enclosed by brackets "[]" +replaced with your own identifying information. (Don't include +the brackets!) The text should be enclosed in the appropriate +comment syntax for the file format. We also recommend that a +file or class name and description of purpose be included on the +same "printed page" as the copyright notice for easier +identification within third-party archives.

    Copyright [yyyy][name of copyright owner]

    Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

    Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License.

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/api/fastkafka/EventMetadata/index.html b/docs/0.7.1/api/fastkafka/EventMetadata/index.html new file mode 100644 index 0000000..fb10c44 --- /dev/null +++ b/docs/0.7.1/api/fastkafka/EventMetadata/index.html @@ -0,0 +1,32 @@ + + + + + +EventMetadata | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

EventMetadata

fastkafka.EventMetadata

A class for encapsulating Kafka record metadata.

Parameters:

  • topic: The topic this record is received from
  • partition: The partition from which this record is received
  • offset: The position of this record in the corresponding Kafka partition
  • timestamp: The timestamp of this record
  • timestamp_type: The timestamp type of this record
  • key: The key (or None if no key is specified)
  • value: The value
  • serialized_key_size: The size of the serialized, uncompressed key in bytes
  • serialized_value_size: The size of the serialized, uncompressed value in bytes
  • headers: The headers

create_event_metadata

def create_event_metadata(record: aiokafka.structs.ConsumerRecord) -> EventMetadata

Creates an instance of EventMetadata from a ConsumerRecord.

Parameters:

  • record: The Kafka ConsumerRecord.

Returns:

  • The created EventMetadata instance.
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/api/fastkafka/KafkaEvent/index.html b/docs/0.7.1/api/fastkafka/KafkaEvent/index.html new file mode 100644 index 0000000..93bfd47 --- /dev/null +++ b/docs/0.7.1/api/fastkafka/KafkaEvent/index.html @@ -0,0 +1,32 @@ + + + + + +KafkaEvent | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

KafkaEvent

fastkafka.KafkaEvent

A generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel

Parameters:

  • message: The message contained in the Kafka event, can be of type pydantic.BaseModel.
  • key: The optional key used to identify the Kafka event.
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/api/fastkafka/encoder/AvroBase/index.html b/docs/0.7.1/api/fastkafka/encoder/AvroBase/index.html new file mode 100644 index 0000000..9d9d49d --- /dev/null +++ b/docs/0.7.1/api/fastkafka/encoder/AvroBase/index.html @@ -0,0 +1,32 @@ + + + + + +AvroBase | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

AvroBase

fastkafka.encoder.AvroBase

This is base pydantic class that will add some methods

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/api/fastkafka/encoder/avro_decoder/index.html b/docs/0.7.1/api/fastkafka/encoder/avro_decoder/index.html new file mode 100644 index 0000000..55d68ea --- /dev/null +++ b/docs/0.7.1/api/fastkafka/encoder/avro_decoder/index.html @@ -0,0 +1,32 @@ + + + + + +avro_decoder | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

avro_decoder

fastkafka.encoder.avro_decoder

avro_decoder

def avro_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any

Decoder to decode avro encoded messages to pydantic model instance

Parameters:

  • raw_msg: Avro encoded bytes message received from Kafka topic
  • cls: Pydantic class; This pydantic class will be used to construct instance of same class

Returns:

  • An instance of given pydantic class
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/api/fastkafka/encoder/avro_encoder/index.html b/docs/0.7.1/api/fastkafka/encoder/avro_encoder/index.html new file mode 100644 index 0000000..1743959 --- /dev/null +++ b/docs/0.7.1/api/fastkafka/encoder/avro_encoder/index.html @@ -0,0 +1,32 @@ + + + + + +avro_encoder | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

avro_encoder

fastkafka.encoder.avro_encoder

avro_encoder

def avro_encoder(msg: pydantic.main.BaseModel) -> bytes

Encoder to encode pydantic instances to avro message

Parameters:

  • msg: An instance of pydantic basemodel

Returns:

  • A bytes message which is encoded from pydantic basemodel
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic/index.html b/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic/index.html new file mode 100644 index 0000000..8fa0dbc --- /dev/null +++ b/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic/index.html @@ -0,0 +1,32 @@ + + + + + +avsc_to_pydantic | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

avsc_to_pydantic

fastkafka.encoder.avsc_to_pydantic

avsc_to_pydantic

def avsc_to_pydantic(schema: Dict[str, Any]) -> ModelMetaclass

Generate pydantic model from given Avro Schema

Parameters:

  • schema: Avro schema in dictionary format

Returns:

  • Pydantic model class built from given avro schema
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/api/fastkafka/encoder/json_decoder/index.html b/docs/0.7.1/api/fastkafka/encoder/json_decoder/index.html new file mode 100644 index 0000000..2d99b70 --- /dev/null +++ b/docs/0.7.1/api/fastkafka/encoder/json_decoder/index.html @@ -0,0 +1,32 @@ + + + + + +json_decoder | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

json_decoder

fastkafka.encoder.json_decoder

json_decoder

def json_decoder(raw_msg: bytes, cls: pydantic.main.ModelMetaclass) -> Any

Decoder to decode json string in bytes to pydantic model instance

Parameters:

  • raw_msg: Bytes message received from Kafka topic
  • cls: Pydantic class; This pydantic class will be used to construct instance of same class

Returns:

  • An instance of given pydantic class
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/api/fastkafka/encoder/json_encoder/index.html b/docs/0.7.1/api/fastkafka/encoder/json_encoder/index.html new file mode 100644 index 0000000..c3e4705 --- /dev/null +++ b/docs/0.7.1/api/fastkafka/encoder/json_encoder/index.html @@ -0,0 +1,32 @@ + + + + + +json_encoder | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

json_encoder

fastkafka.encoder.json_encoder

json_encoder

def json_encoder(msg: pydantic.main.BaseModel) -> bytes

Encoder to encode pydantic instances to json string

Parameters:

  • msg: An instance of pydantic basemodel

Returns:

  • Json string in bytes which is encoded from pydantic basemodel
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/api/fastkafka/executors/DynamicTaskExecutor/index.html b/docs/0.7.1/api/fastkafka/executors/DynamicTaskExecutor/index.html new file mode 100644 index 0000000..2705ea5 --- /dev/null +++ b/docs/0.7.1/api/fastkafka/executors/DynamicTaskExecutor/index.html @@ -0,0 +1,35 @@ + + + + + +DynamicTaskExecutor | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

DynamicTaskExecutor

fastkafka.executors.DynamicTaskExecutor

A class that implements a dynamic task executor for processing consumer records.

The DynamicTaskExecutor class extends the StreamExecutor class and provides functionality +for running a tasks in parallel using asyncio.Task.

__init__

def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000, size: int = 100000) -> None

Create an instance of DynamicTaskExecutor

Parameters:

  • throw_exceptions: Flag indicating whether exceptions should be thrown ot logged. +Defaults to False.
  • max_buffer_size: Maximum buffer size for the memory object stream. +Defaults to 100_000.
  • size: Size of the task pool. Defaults to 100_000.

run

def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None

Runs the dynamic task executor.

Parameters:

  • is_shutting_down_f: Function to check if the executor is shutting down.
  • generator: Generator function for retrieving consumer records.
  • processor: Processor function for processing consumer records.

Returns:

  • None
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/api/fastkafka/executors/SequentialExecutor/index.html b/docs/0.7.1/api/fastkafka/executors/SequentialExecutor/index.html new file mode 100644 index 0000000..14e52a6 --- /dev/null +++ b/docs/0.7.1/api/fastkafka/executors/SequentialExecutor/index.html @@ -0,0 +1,35 @@ + + + + + +SequentialExecutor | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

SequentialExecutor

fastkafka.executors.SequentialExecutor

A class that implements a sequential executor for processing consumer records.

The SequentialExecutor class extends the StreamExecutor class and provides functionality +for running processing tasks in sequence by awaiting their coroutines.

__init__

def __init__(self, throw_exceptions: bool = False, max_buffer_size: int = 100000) -> None

Create an instance of SequentialExecutor

Parameters:

  • throw_exceptions: Flag indicating whether exceptions should be thrown or logged. +Defaults to False.
  • max_buffer_size: Maximum buffer size for the memory object stream. +Defaults to 100_000.

run

def run(self, is_shutting_down_f: Callable[[], bool], generator: Callable[[], Awaitable[aiokafka.structs.ConsumerRecord]], processor: Callable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]) -> None

Runs the sequential executor.

Parameters:

  • is_shutting_down_f: Function to check if the executor is shutting down.
  • generator: Generator function for retrieving consumer records.
  • processor: Processor function for processing consumer records.

Returns:

  • None
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/api/fastkafka/index.html b/docs/0.7.1/api/fastkafka/index.html new file mode 100644 index 0000000..12f7e53 --- /dev/null +++ b/docs/0.7.1/api/fastkafka/index.html @@ -0,0 +1,497 @@ + + + + + +FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

FastKafka

fastkafka.FastKafka

__init__

def __init__(self, title: Optional[str] = None, description: Optional[str] = None, version: Optional[str] = None, contact: Optional[Dict[str, str]] = None, kafka_brokers: Optional[Dict[str, Any]] = None, root_path: Optional[pathlib.Path, str] = None, lifespan: Optional[Callable[[ForwardRef('FastKafka')], AsyncContextManager[NoneType]]] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fcedfc68f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fcedec6c850>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None, group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, exclude_internal_topics=True, isolation_level='read_uncommitted') -> None

Creates FastKafka application

Parameters:

  • title: optional title for the documentation. If None, +the title will be set to empty string
  • description: optional description for the documentation. If +None, the description will be set to empty string
  • version: optional version for the documentation. If None, +the version will be set to empty string
  • contact: optional contact for the documentation. If None, the +contact will be set to placeholder values: +name='Author' url=HttpUrl(' https://www.google.com ', ) email='noreply@gmail.com'
  • kafka_brokers: dictionary describing kafka brokers used for setting +the bootstrap server when running the applicationa and for +generating documentation. Defaults to +{ +"localhost": { +"url": "localhost", +"description": "local kafka broker", +"port": "9092", +} +}
  • root_path: path to where documentation will be created
  • lifespan: asynccontextmanager that is used for setting lifespan hooks. +aenter is called before app start and aexit after app stop. +The lifespan is called whe application is started as async context +manager, e.g.:async with kafka_app...
  • bootstrap_servers: a host[:port] string or list of +host[:port] strings that the producer should contact to +bootstrap initial cluster metadata. This does not have to be the +full node list. It just needs to have at least one broker that will +respond to a Metadata API Request. Default port is 9092. If no +servers are specified, will default to localhost:9092.
  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. +Default: aiokafka-producer-# (appended with a unique number +per instance)
  • key_serializer: used to convert user-supplied keys to bytes +If not :data:None, called as f(key), should return +:class:bytes. +Default: :data:None.
  • value_serializer: used to convert user-supplied message +values to :class:bytes. If not :data:None, called as +f(value), should return :class:bytes. +Default: :data:None.
  • acks: one of 0, 1, all. The number of acknowledgments +the producer requires the leader to have received before considering a +request complete. This controls the durability of records that are +sent. The following settings are common:
  • 0: Producer will not wait for any acknowledgment from the server +at all. The message will immediately be added to the socket +buffer and considered sent. No guarantee can be made that the +server has received the record in this case, and the retries +configuration will not take effect (as the client won't +generally know of any failures). The offset given back for each +record will always be set to -1.
  • 1: The broker leader will write the record to its local log but +will respond without awaiting full acknowledgement from all +followers. In this case should the leader fail immediately +after acknowledging the record but before the followers have +replicated it then the record will be lost.
  • all: The broker leader will wait for the full set of in-sync +replicas to acknowledge the record. This guarantees that the +record will not be lost as long as at least one in-sync replica +remains alive. This is the strongest available guarantee.

If unset, defaults to acks=1. If enable_idempotence is +:data:True defaults to acks=all

  • compression_type: The compression type for all data generated by +the producer. Valid values are gzip, snappy, lz4, zstd +or :data:None. +Compression is of full batches of data, so the efficacy of batching +will also impact the compression ratio (more batching means better +compression). Default: :data:None.
  • max_batch_size: Maximum size of buffered data per partition. +After this amount :meth:send coroutine will block until batch is +drained. +Default: 16384
  • linger_ms: The producer groups together any records that arrive +in between request transmissions into a single batched request. +Normally this occurs only under load when records arrive faster +than they can be sent out. However in some circumstances the client +may want to reduce the number of requests even under moderate load. +This setting accomplishes this by adding a small amount of +artificial delay; that is, if first request is processed faster, +than linger_ms, producer will wait linger_ms - process_time. +Default: 0 (i.e. no delay).
  • partitioner: Callable used to determine which partition +each message is assigned to. Called (after key serialization): +partitioner(key_bytes, all_partitions, available_partitions). +The default partitioner implementation hashes each non-None key +using the same murmur2 algorithm as the Java client so that +messages with the same key are assigned to the same partition. +When a key is :data:None, the message is delivered to a random partition +(filtered to partitions with available leaders only, if possible).
  • max_request_size: The maximum size of a request. This is also +effectively a cap on the maximum record size. Note that the server +has its own cap on record size which may be different from this. +This setting will limit the number of record batches the producer +will send in a single request to avoid sending huge requests. +Default: 1048576.
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • request_timeout_ms: Produce request timeout in milliseconds. +As it's sent as part of +:class:~kafka.protocol.produce.ProduceRequest (it's a blocking +call), maximum waiting time can be up to 2 * +request_timeout_ms. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • api_version: specify which kafka API version to use. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT, +SASL_SSL. Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more +information see :ref:ssl_auth. +Default: :data:None
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying :data:None will +disable idle checks. Default: 540000 (9 minutes).
  • enable_idempotence: When set to :data:True, the producer will +ensure that exactly one copy of each message is written in the +stream. If :data:False, producer retries due to broker failures, +etc., may write duplicates of the retried message in the stream. +Note that enabling idempotence acks to set to all. If it is not +explicitly set by the user it will be chosen. If incompatible +values are set, a :exc:ValueError will be thrown. +New in version 0.5.0.
  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values +are: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: :data:None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: :data:None
  • sasl_oauth_token_provider (: class:~aiokafka.abc.AbstractTokenProvider): +OAuthBearer token provider instance. (See +:mod:kafka.oauth.abstract). +Default: :data:None
  • *topics: optional list of topics to subscribe to. If not set, +call :meth:.subscribe or :meth:.assign before consuming records. +Passing topics directly is same as calling :meth:.subscribe API.
  • group_id: name of the consumer group to join for dynamic +partition assignment (if enabled), and to use for fetching and +committing offsets. If None, auto-partition assignment (via +group coordinator) and offset commits are disabled. +Default: None
  • key_deserializer: Any callable that takes a +raw message key and returns a deserialized key.
  • value_deserializer: Any callable that takes a +raw message value and returns a deserialized value.
  • fetch_min_bytes: Minimum amount of data the server should +return for a fetch request, otherwise wait up to +fetch_max_wait_ms for more data to accumulate. Default: 1.
  • fetch_max_bytes: The maximum amount of data the server should +return for a fetch request. This is not an absolute maximum, if +the first message in the first non-empty partition of the fetch +is larger than this value, the message will still be returned +to ensure that the consumer can make progress. NOTE: consumer +performs fetches to multiple brokers in parallel so memory +usage will depend on the number of brokers containing +partitions for the topic. +Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).
  • fetch_max_wait_ms: The maximum amount of time in milliseconds +the server will block before answering the fetch request if +there isn't sufficient data to immediately satisfy the +requirement given by fetch_min_bytes. Default: 500.
  • max_partition_fetch_bytes: The maximum amount of data +per-partition the server will return. The maximum total memory +used for a request = #partitions * max_partition_fetch_bytes. +This size must be at least as large as the maximum message size +the server allows or else it is possible for the producer to +send messages larger than the consumer can fetch. If that +happens, the consumer can get stuck trying to fetch a large +message on a certain partition. Default: 1048576.
  • max_poll_records: The maximum number of records returned in a +single call to :meth:.getmany. Defaults None, no limit.
  • auto_offset_reset: A policy for resetting offsets on +:exc:.OffsetOutOfRangeError errors: earliest will move to the oldest +available message, latest will move to the most recent, and +none will raise an exception so you can handle this case. +Default: latest.
  • enable_auto_commit: If true the consumer's offset will be +periodically committed in the background. Default: True.
  • auto_commit_interval_ms: milliseconds between automatic +offset commits, if enable_auto_commit is True. Default: 5000.
  • check_crcs: Automatically check the CRC32 of the records +consumed. This ensures no on-the-wire or on-disk corruption to +the messages occurred. This check adds some overhead, so it may +be disabled in cases seeking extreme performance. Default: True
  • partition_assignment_strategy: List of objects to use to +distribute partition ownership amongst consumer instances when +group management is used. This preference is implicit in the order +of the strategies in the list. When assignment strategy changes: +to support a change to the assignment strategy, new versions must +enable support both for the old assignment strategy and the new +one. The coordinator will choose the old assignment strategy until +all members have been updated. Then it will choose the new +strategy. Default: [:class:.RoundRobinPartitionAssignor]
  • max_poll_interval_ms: Maximum allowed time between calls to +consume messages (e.g., :meth:.getmany). If this interval +is exceeded the consumer is considered failed and the group will +rebalance in order to reassign the partitions to another consumer +group member. If API methods block waiting for messages, that time +does not count against this timeout. See KIP-62_ for more +information. Default 300000
  • rebalance_timeout_ms: The maximum time server will wait for this +consumer to rejoin the group in a case of rebalance. In Java client +this behaviour is bound to max.poll.interval.ms configuration, +but as aiokafka will rejoin the group in the background, we +decouple this setting to allow finer tuning by users that use +:class:.ConsumerRebalanceListener to delay rebalacing. Defaults +to session_timeout_ms
  • session_timeout_ms: Client group session and failure detection +timeout. The consumer sends periodic heartbeats +(heartbeat.interval.ms) to indicate its liveness to the broker. +If no hearts are received by the broker for a group member within +the session timeout, the broker will remove the consumer from the +group and trigger a rebalance. The allowed range is configured with +the broker configuration properties +group.min.session.timeout.ms and group.max.session.timeout.ms. +Default: 10000
  • heartbeat_interval_ms: The expected time in milliseconds +between heartbeats to the consumer coordinator when using +Kafka's group management feature. Heartbeats are used to ensure +that the consumer's session stays active and to facilitate +rebalancing when new consumers join or leave the group. The +value must be set lower than session_timeout_ms, but typically +should be set no higher than 1/3 of that value. It can be +adjusted even lower to control the expected time for normal +rebalances. Default: 3000
  • consumer_timeout_ms: maximum wait timeout for background fetching +routine. Mostly defines how fast the system will see rebalance and +request new data for new partitions. Default: 200
  • exclude_internal_topics: Whether records from internal topics +(such as offsets) should be exposed to the consumer. If set to True +the only way to receive records from an internal topic is +subscribing to it. Requires 0.10+ Default: True
  • isolation_level: Controls how to read messages written +transactionally.

If set to read_committed, :meth:.getmany will only return +transactional messages which have been committed. +If set to read_uncommitted (the default), :meth:.getmany will +return all messages, even transactional messages which have been +aborted.

Non-transactional messages will be returned unconditionally in +either mode.

Messages will always be returned in offset order. Hence, in +read_committed mode, :meth:.getmany will only return +messages up to the last stable offset (LSO), which is the one less +than the offset of the first open transaction. In particular any +messages appearing after messages belonging to ongoing transactions +will be withheld until the relevant transaction has been completed. +As a result, read_committed consumers will not be able to read up +to the high watermark when there are in flight transactions. +Further, when in read_committed the seek_to_end method will +return the LSO. See method docs below. Default: read_uncommitted

  • sasl_oauth_token_provider: OAuthBearer token provider instance. (See :mod:kafka.oauth.abstract). +Default: None

benchmark

def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]

Decorator to benchmark produces/consumes functions

Parameters:

  • interval: Period to use to calculate throughput. If value is of type int, +then it will be used as seconds. If value is of type timedelta, +then it will be used as it is. default: 1 - one second
  • sliding_window_size: The size of the sliding window to use to calculate +average throughput. default: None - By default average throughput is +not calculated

consumes

def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, prefix: str = 'on_', description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.1', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]], typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]]

Decorator registering the callback called when a message is received in a topic.

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the consumer will subscribe to and execute the +decorated function when it receives a message from the topic, +default: None. If the topic is not specified, topic name will be +inferred from the decorated function name by stripping the defined prefix
  • decoder: Decoder to use to decode messages consumed from the topic, +default: json - By default, it uses json decoder to decode +bytes to json string and then it creates instance of pydantic +BaseModel. It also accepts custom decoder function.
  • executor: Type of executor to choose for consuming tasks. Avaliable options +are "SequentialExecutor" and "DynamicTaskExecutor". The default option is +"SequentialExecutor" which will execute the consuming tasks sequentially. +If the consuming tasks have high latency it is recommended to use +"DynamicTaskExecutor" which will wrap the consuming functions into tasks +and run them in on asyncio loop in background. This comes with a cost of +increased overhead so use it only in cases when your consume functions have +high latency such as database queries or some other type of networking.
  • prefix: Prefix stripped from the decorated function to define a topic name +if the topic argument is not passed, default: "on_". If the decorated +function name is not prefixed with the defined prefix and topic argument +is not passed, then this method will throw ValueError
  • brokers: Optional argument specifying multiple broker clusters for consuming +messages from different Kafka clusters in FastKafka.
  • description: Optional description of the consuming function async docs. +If not provided, consuming function doc attr will be used.
  • *topics: optional list of topics to subscribe to. If not set, +call :meth:.subscribe or :meth:.assign before consuming records. +Passing topics directly is same as calling :meth:.subscribe API.
  • bootstrap_servers: a host[:port] string (or list of +host[:port] strings) that the consumer should contact to bootstrap +initial cluster metadata.

This does not have to be the full node list. +It just needs to have at least one broker that will respond to a +Metadata API Request. Default port is 9092. If no servers are +specified, will default to localhost:9092.

  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. Also +submitted to :class:~.consumer.group_coordinator.GroupCoordinator +for logging with respect to consumer group administration. Default: +aiokafka-{version}
  • group_id: name of the consumer group to join for dynamic +partition assignment (if enabled), and to use for fetching and +committing offsets. If None, auto-partition assignment (via +group coordinator) and offset commits are disabled. +Default: None
  • key_deserializer: Any callable that takes a +raw message key and returns a deserialized key.
  • value_deserializer: Any callable that takes a +raw message value and returns a deserialized value.
  • fetch_min_bytes: Minimum amount of data the server should +return for a fetch request, otherwise wait up to +fetch_max_wait_ms for more data to accumulate. Default: 1.
  • fetch_max_bytes: The maximum amount of data the server should +return for a fetch request. This is not an absolute maximum, if +the first message in the first non-empty partition of the fetch +is larger than this value, the message will still be returned +to ensure that the consumer can make progress. NOTE: consumer +performs fetches to multiple brokers in parallel so memory +usage will depend on the number of brokers containing +partitions for the topic. +Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).
  • fetch_max_wait_ms: The maximum amount of time in milliseconds +the server will block before answering the fetch request if +there isn't sufficient data to immediately satisfy the +requirement given by fetch_min_bytes. Default: 500.
  • max_partition_fetch_bytes: The maximum amount of data +per-partition the server will return. The maximum total memory +used for a request = #partitions * max_partition_fetch_bytes. +This size must be at least as large as the maximum message size +the server allows or else it is possible for the producer to +send messages larger than the consumer can fetch. If that +happens, the consumer can get stuck trying to fetch a large +message on a certain partition. Default: 1048576.
  • max_poll_records: The maximum number of records returned in a +single call to :meth:.getmany. Defaults None, no limit.
  • request_timeout_ms: Client request timeout in milliseconds. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • auto_offset_reset: A policy for resetting offsets on +:exc:.OffsetOutOfRangeError errors: earliest will move to the oldest +available message, latest will move to the most recent, and +none will raise an exception so you can handle this case. +Default: latest.
  • enable_auto_commit: If true the consumer's offset will be +periodically committed in the background. Default: True.
  • auto_commit_interval_ms: milliseconds between automatic +offset commits, if enable_auto_commit is True. Default: 5000.
  • check_crcs: Automatically check the CRC32 of the records +consumed. This ensures no on-the-wire or on-disk corruption to +the messages occurred. This check adds some overhead, so it may +be disabled in cases seeking extreme performance. Default: True
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • partition_assignment_strategy: List of objects to use to +distribute partition ownership amongst consumer instances when +group management is used. This preference is implicit in the order +of the strategies in the list. When assignment strategy changes: +to support a change to the assignment strategy, new versions must +enable support both for the old assignment strategy and the new +one. The coordinator will choose the old assignment strategy until +all members have been updated. Then it will choose the new +strategy. Default: [:class:.RoundRobinPartitionAssignor]
  • max_poll_interval_ms: Maximum allowed time between calls to +consume messages (e.g., :meth:.getmany). If this interval +is exceeded the consumer is considered failed and the group will +rebalance in order to reassign the partitions to another consumer +group member. If API methods block waiting for messages, that time +does not count against this timeout. See KIP-62_ for more +information. Default 300000
  • rebalance_timeout_ms: The maximum time server will wait for this +consumer to rejoin the group in a case of rebalance. In Java client +this behaviour is bound to max.poll.interval.ms configuration, +but as aiokafka will rejoin the group in the background, we +decouple this setting to allow finer tuning by users that use +:class:.ConsumerRebalanceListener to delay rebalacing. Defaults +to session_timeout_ms
  • session_timeout_ms: Client group session and failure detection +timeout. The consumer sends periodic heartbeats +(heartbeat.interval.ms) to indicate its liveness to the broker. +If no hearts are received by the broker for a group member within +the session timeout, the broker will remove the consumer from the +group and trigger a rebalance. The allowed range is configured with +the broker configuration properties +group.min.session.timeout.ms and group.max.session.timeout.ms. +Default: 10000
  • heartbeat_interval_ms: The expected time in milliseconds +between heartbeats to the consumer coordinator when using +Kafka's group management feature. Heartbeats are used to ensure +that the consumer's session stays active and to facilitate +rebalancing when new consumers join or leave the group. The +value must be set lower than session_timeout_ms, but typically +should be set no higher than 1/3 of that value. It can be +adjusted even lower to control the expected time for normal +rebalances. Default: 3000
  • consumer_timeout_ms: maximum wait timeout for background fetching +routine. Mostly defines how fast the system will see rebalance and +request new data for new partitions. Default: 200
  • api_version: specify which kafka API version to use. +:class:AIOKafkaConsumer supports Kafka API versions >=0.9 only. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT, +SASL_SSL. Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more information see +:ref:ssl_auth. Default: None.
  • exclude_internal_topics: Whether records from internal topics +(such as offsets) should be exposed to the consumer. If set to True +the only way to receive records from an internal topic is +subscribing to it. Requires 0.10+ Default: True
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying None will +disable idle checks. Default: 540000 (9 minutes).
  • isolation_level: Controls how to read messages written +transactionally.

If set to read_committed, :meth:.getmany will only return +transactional messages which have been committed. +If set to read_uncommitted (the default), :meth:.getmany will +return all messages, even transactional messages which have been +aborted.

Non-transactional messages will be returned unconditionally in +either mode.

Messages will always be returned in offset order. Hence, in +read_committed mode, :meth:.getmany will only return +messages up to the last stable offset (LSO), which is the one less +than the offset of the first open transaction. In particular any +messages appearing after messages belonging to ongoing transactions +will be withheld until the relevant transaction has been completed. +As a result, read_committed consumers will not be able to read up +to the high watermark when there are in flight transactions. +Further, when in read_committed the seek_to_end method will +return the LSO. See method docs below. Default: read_uncommitted

  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: +PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: None
  • sasl_oauth_token_provider: OAuthBearer token provider instance. (See :mod:kafka.oauth.abstract). +Default: None

Returns:

  • : A function returning the same function

create_docs

def create_docs(self: fastkafka.FastKafka) -> None

Create the asyncapi documentation based on the configured consumers and producers.

This function exports the asyncapi specification based on the configured consumers +and producers in the FastKafka instance. It generates the asyncapi documentation by +extracting the topics and callbacks from the consumers and producers.

Note: +The asyncapi documentation is saved to the location specified by the _asyncapi_path +attribute of the FastKafka instance.

Returns:

  • None

create_mocks

def create_mocks(self: fastkafka.FastKafka) -> None

Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock

fastapi_lifespan

def fastapi_lifespan(self: fastkafka.FastKafka, kafka_broker_name: str) -> typing.Callable[[ForwardRef('FastAPI')], typing.AsyncIterator[NoneType]]

Method for managing the lifespan of a FastAPI application with a specific Kafka broker.

Parameters:

  • kafka_broker_name: The name of the Kafka broker to start FastKafka

Returns:

  • Lifespan function to use for initializing FastAPI

get_topics

def get_topics(self: fastkafka.FastKafka) -> typing.Iterable[str]

Get all topics for both producing and consuming.

Returns:

  • A set of topics for both producing and consuming.

produces

def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fcedfc68f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fcedec6c850>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]

Decorator registering the callback called when delivery report for a produced message is received

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the producer will send returned values from +the decorated function to, default: None- If the topic is not +specified, topic name will be inferred from the decorated function +name by stripping the defined prefix.
  • encoder: Encoder to use to encode messages before sending it to topic, +default: json - By default, it uses json encoder to convert +pydantic basemodel to json string and then encodes the string to bytes +using 'utf-8' encoding. It also accepts custom encoder function.
  • prefix: Prefix stripped from the decorated function to define a topic +name if the topic argument is not passed, default: "to_". If the +decorated function name is not prefixed with the defined prefix +and topic argument is not passed, then this method will throw ValueError
  • brokers: Optional argument specifying multiple broker clusters for consuming +messages from different Kafka clusters in FastKafka.
  • description: Optional description of the producing function async docs. +If not provided, producing function doc attr will be used.
  • bootstrap_servers: a host[:port] string or list of +host[:port] strings that the producer should contact to +bootstrap initial cluster metadata. This does not have to be the +full node list. It just needs to have at least one broker that will +respond to a Metadata API Request. Default port is 9092. If no +servers are specified, will default to localhost:9092.
  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. +Default: aiokafka-producer-# (appended with a unique number +per instance)
  • key_serializer: used to convert user-supplied keys to bytes +If not :data:None, called as f(key), should return +:class:bytes. +Default: :data:None.
  • value_serializer: used to convert user-supplied message +values to :class:bytes. If not :data:None, called as +f(value), should return :class:bytes. +Default: :data:None.
  • acks: one of 0, 1, all. The number of acknowledgments +the producer requires the leader to have received before considering a +request complete. This controls the durability of records that are +sent. The following settings are common:
  • 0: Producer will not wait for any acknowledgment from the server +at all. The message will immediately be added to the socket +buffer and considered sent. No guarantee can be made that the +server has received the record in this case, and the retries +configuration will not take effect (as the client won't +generally know of any failures). The offset given back for each +record will always be set to -1.
  • 1: The broker leader will write the record to its local log but +will respond without awaiting full acknowledgement from all +followers. In this case should the leader fail immediately +after acknowledging the record but before the followers have +replicated it then the record will be lost.
  • all: The broker leader will wait for the full set of in-sync +replicas to acknowledge the record. This guarantees that the +record will not be lost as long as at least one in-sync replica +remains alive. This is the strongest available guarantee.

If unset, defaults to acks=1. If enable_idempotence is +:data:True defaults to acks=all

  • compression_type: The compression type for all data generated by +the producer. Valid values are gzip, snappy, lz4, zstd +or :data:None. +Compression is of full batches of data, so the efficacy of batching +will also impact the compression ratio (more batching means better +compression). Default: :data:None.
  • max_batch_size: Maximum size of buffered data per partition. +After this amount :meth:send coroutine will block until batch is +drained. +Default: 16384
  • linger_ms: The producer groups together any records that arrive +in between request transmissions into a single batched request. +Normally this occurs only under load when records arrive faster +than they can be sent out. However in some circumstances the client +may want to reduce the number of requests even under moderate load. +This setting accomplishes this by adding a small amount of +artificial delay; that is, if first request is processed faster, +than linger_ms, producer will wait linger_ms - process_time. +Default: 0 (i.e. no delay).
  • partitioner: Callable used to determine which partition +each message is assigned to. Called (after key serialization): +partitioner(key_bytes, all_partitions, available_partitions). +The default partitioner implementation hashes each non-None key +using the same murmur2 algorithm as the Java client so that +messages with the same key are assigned to the same partition. +When a key is :data:None, the message is delivered to a random partition +(filtered to partitions with available leaders only, if possible).
  • max_request_size: The maximum size of a request. This is also +effectively a cap on the maximum record size. Note that the server +has its own cap on record size which may be different from this. +This setting will limit the number of record batches the producer +will send in a single request to avoid sending huge requests. +Default: 1048576.
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • request_timeout_ms: Produce request timeout in milliseconds. +As it's sent as part of +:class:~kafka.protocol.produce.ProduceRequest (it's a blocking +call), maximum waiting time can be up to 2 * +request_timeout_ms. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • api_version: specify which kafka API version to use. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT, +SASL_SSL. Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more +information see :ref:ssl_auth. +Default: :data:None
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying :data:None will +disable idle checks. Default: 540000 (9 minutes).
  • enable_idempotence: When set to :data:True, the producer will +ensure that exactly one copy of each message is written in the +stream. If :data:False, producer retries due to broker failures, +etc., may write duplicates of the retried message in the stream. +Note that enabling idempotence acks to set to all. If it is not +explicitly set by the user it will be chosen. If incompatible +values are set, a :exc:ValueError will be thrown. +New in version 0.5.0.
  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values +are: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: :data:None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: :data:None
  • sasl_oauth_token_provider (: class:~aiokafka.abc.AbstractTokenProvider): +OAuthBearer token provider instance. (See +:mod:kafka.oauth.abstract). +Default: :data:None

Returns:

  • : A function returning the same function

Exceptions:

  • ValueError: when needed

run_in_background

def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]

Decorator to schedule a task to be run in the background.

This decorator is used to schedule a task to be run in the background when the app's _on_startup event is triggered.

Returns:

  • A decorator function that takes a background task as an input and stores it to be run in the backround.

set_kafka_broker

def set_kafka_broker(self, kafka_broker_name: str) -> None

Sets the Kafka broker to start FastKafka with

Parameters:

  • kafka_broker_name: The name of the Kafka broker to start FastKafka

Returns:

  • None

Exceptions:

  • ValueError: If the provided kafka_broker_name is not found in dictionary of kafka_brokers
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/api/fastkafka/testing/ApacheKafkaBroker/index.html b/docs/0.7.1/api/fastkafka/testing/ApacheKafkaBroker/index.html new file mode 100644 index 0000000..9f6bdb6 --- /dev/null +++ b/docs/0.7.1/api/fastkafka/testing/ApacheKafkaBroker/index.html @@ -0,0 +1,32 @@ + + + + + +ApacheKafkaBroker | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

ApacheKafkaBroker

fastkafka.testing.ApacheKafkaBroker

ApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing.

__init__

def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None

Initialises the ApacheKafkaBroker object

Parameters:

  • data_dir: Path to the directory where the zookeepeer instance will save data
  • zookeeper_port: Port for clients (Kafka brokes) to connect
  • listener_port: Port on which the clients (producers and consumers) can connect

get_service_config_string

def get_service_config_string(self: fastkafka.testing.ApacheKafkaBroker, service: str, data_dir: pathlib.Path) -> str

Gets the configuration string for a service.

Parameters:

  • service: Name of the service ("kafka" or "zookeeper").
  • data_dir: Path to the directory where the service will save data.

Returns:

  • The service configuration string.

start

def start(self: fastkafka.testing.ApacheKafkaBroker) -> str

Starts a local Kafka broker and ZooKeeper instance synchronously.

Returns:

  • The Kafka broker bootstrap server address in string format: host:port.

stop

def stop(self: fastkafka.testing.ApacheKafkaBroker) -> None

Stops a local kafka broker and zookeeper instance synchronously

Returns:

  • None
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/api/fastkafka/testing/LocalRedpandaBroker/index.html b/docs/0.7.1/api/fastkafka/testing/LocalRedpandaBroker/index.html new file mode 100644 index 0000000..e8c0860 --- /dev/null +++ b/docs/0.7.1/api/fastkafka/testing/LocalRedpandaBroker/index.html @@ -0,0 +1,32 @@ + + + + + +LocalRedpandaBroker | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

LocalRedpandaBroker

fastkafka.testing.LocalRedpandaBroker

LocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing.

__init__

def __init__(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug', **kwargs: Dict[str, Any]) -> None

Initialises the LocalRedpandaBroker object

Parameters:

  • listener_port: Port on which the clients (producers and consumers) can connect
  • tag: Tag of Redpanda image to use to start container
  • seastar_core: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)
  • memory: The amount of memory to make available to Redpanda
  • mode: Mode to use to load configuration properties in container
  • default_log_level: Log levels to use for Redpanda

get_service_config_string

def get_service_config_string(self, service: str, data_dir: pathlib.Path) -> str

Generates a configuration for a service

Parameters:

  • data_dir: Path to the directory where the zookeepeer instance will save data
  • service: "redpanda", defines which service to get config string for

start

def start(self: fastkafka.testing.LocalRedpandaBroker) -> str

Starts a local redpanda broker instance synchronously

Returns:

  • Redpanda broker bootstrap server address in string format: add:port

stop

def stop(self: fastkafka.testing.LocalRedpandaBroker) -> None

Stops a local redpanda broker instance synchronously

Returns:

  • None
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/api/fastkafka/testing/Tester/index.html b/docs/0.7.1/api/fastkafka/testing/Tester/index.html new file mode 100644 index 0000000..91a309a --- /dev/null +++ b/docs/0.7.1/api/fastkafka/testing/Tester/index.html @@ -0,0 +1,290 @@ + + + + + +Tester | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Tester

fastkafka.testing.Tester

__init__

def __init__(self, app: Union[fastkafka.FastKafka, List[fastkafka.FastKafka]], broker: Optional[fastkafka.testing.ApacheKafkaBroker, fastkafka.testing.LocalRedpandaBroker, fastkafka._testing.in_memory_broker.InMemoryBroker] = None, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> None

Mirror-like object for testing a FastFafka application

Can be used as context manager

Parameters:

  • app: The FastKafka application to be tested.
  • broker: An optional broker to start and to use for testing.
  • data_dir: Path to the directory where the zookeepeer instance will save data
  • zookeeper_port: Port for clients (Kafka brokes) to connect
  • listener_port: Port on which the clients (producers and consumers) can connect

benchmark

def benchmark(self: fastkafka.FastKafka, interval: Union[int, datetime.timedelta] = 1, sliding_window_size: Optional[int] = None) -> typing.Callable[[typing.Callable[[~I], typing.Optional[~O]]], typing.Callable[[~I], typing.Optional[~O]]]

Decorator to benchmark produces/consumes functions

Parameters:

  • interval: Period to use to calculate throughput. If value is of type int, +then it will be used as seconds. If value is of type timedelta, +then it will be used as it is. default: 1 - one second
  • sliding_window_size: The size of the sliding window to use to calculate +average throughput. default: None - By default average throughput is +not calculated

consumes

def consumes(self: fastkafka.FastKafka, topic: Optional[str] = None, decoder: Union[str, Callable[[bytes, pydantic.main.ModelMetaclass], Any]] = 'json', executor: Optional[str, fastkafka._components.task_streaming.StreamExecutor] = None, brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, prefix: str = 'on_', description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id='aiokafka-0.8.1', group_id=None, key_deserializer=None, value_deserializer=None, fetch_max_wait_ms=500, fetch_max_bytes=52428800, fetch_min_bytes=1, max_partition_fetch_bytes=1048576, request_timeout_ms=40000, retry_backoff_ms=100, auto_offset_reset='latest', enable_auto_commit=True, auto_commit_interval_ms=5000, check_crcs=True, metadata_max_age_ms=300000, partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,), max_poll_interval_ms=300000, rebalance_timeout_ms=None, session_timeout_ms=10000, heartbeat_interval_ms=3000, consumer_timeout_ms=200, max_poll_records=None, ssl_context=None, security_protocol='PLAINTEXT', api_version='auto', exclude_internal_topics=True, connections_max_idle_ms=540000, isolation_level='read_uncommitted', sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]], typing.Union[typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], typing.Awaitable[NoneType]], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel]], NoneType], typing.Callable[[typing.Union[typing.List[pydantic.main.BaseModel], pydantic.main.BaseModel], typing.Union[typing.List[fastkafka.EventMetadata], fastkafka.EventMetadata]], NoneType]]]

Decorator registering the callback called when a message is received in a topic.

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the consumer will subscribe to and execute the +decorated function when it receives a message from the topic, +default: None. If the topic is not specified, topic name will be +inferred from the decorated function name by stripping the defined prefix
  • decoder: Decoder to use to decode messages consumed from the topic, +default: json - By default, it uses json decoder to decode +bytes to json string and then it creates instance of pydantic +BaseModel. It also accepts custom decoder function.
  • executor: Type of executor to choose for consuming tasks. Avaliable options +are "SequentialExecutor" and "DynamicTaskExecutor". The default option is +"SequentialExecutor" which will execute the consuming tasks sequentially. +If the consuming tasks have high latency it is recommended to use +"DynamicTaskExecutor" which will wrap the consuming functions into tasks +and run them in on asyncio loop in background. This comes with a cost of +increased overhead so use it only in cases when your consume functions have +high latency such as database queries or some other type of networking.
  • prefix: Prefix stripped from the decorated function to define a topic name +if the topic argument is not passed, default: "on_". If the decorated +function name is not prefixed with the defined prefix and topic argument +is not passed, then this method will throw ValueError
  • brokers: Optional argument specifying multiple broker clusters for consuming +messages from different Kafka clusters in FastKafka.
  • description: Optional description of the consuming function async docs. +If not provided, consuming function doc attr will be used.
  • *topics: optional list of topics to subscribe to. If not set, +call :meth:.subscribe or :meth:.assign before consuming records. +Passing topics directly is same as calling :meth:.subscribe API.
  • bootstrap_servers: a host[:port] string (or list of +host[:port] strings) that the consumer should contact to bootstrap +initial cluster metadata.

This does not have to be the full node list. +It just needs to have at least one broker that will respond to a +Metadata API Request. Default port is 9092. If no servers are +specified, will default to localhost:9092.

  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. Also +submitted to :class:~.consumer.group_coordinator.GroupCoordinator +for logging with respect to consumer group administration. Default: +aiokafka-{version}
  • group_id: name of the consumer group to join for dynamic +partition assignment (if enabled), and to use for fetching and +committing offsets. If None, auto-partition assignment (via +group coordinator) and offset commits are disabled. +Default: None
  • key_deserializer: Any callable that takes a +raw message key and returns a deserialized key.
  • value_deserializer: Any callable that takes a +raw message value and returns a deserialized value.
  • fetch_min_bytes: Minimum amount of data the server should +return for a fetch request, otherwise wait up to +fetch_max_wait_ms for more data to accumulate. Default: 1.
  • fetch_max_bytes: The maximum amount of data the server should +return for a fetch request. This is not an absolute maximum, if +the first message in the first non-empty partition of the fetch +is larger than this value, the message will still be returned +to ensure that the consumer can make progress. NOTE: consumer +performs fetches to multiple brokers in parallel so memory +usage will depend on the number of brokers containing +partitions for the topic. +Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).
  • fetch_max_wait_ms: The maximum amount of time in milliseconds +the server will block before answering the fetch request if +there isn't sufficient data to immediately satisfy the +requirement given by fetch_min_bytes. Default: 500.
  • max_partition_fetch_bytes: The maximum amount of data +per-partition the server will return. The maximum total memory +used for a request = #partitions * max_partition_fetch_bytes. +This size must be at least as large as the maximum message size +the server allows or else it is possible for the producer to +send messages larger than the consumer can fetch. If that +happens, the consumer can get stuck trying to fetch a large +message on a certain partition. Default: 1048576.
  • max_poll_records: The maximum number of records returned in a +single call to :meth:.getmany. Defaults None, no limit.
  • request_timeout_ms: Client request timeout in milliseconds. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • auto_offset_reset: A policy for resetting offsets on +:exc:.OffsetOutOfRangeError errors: earliest will move to the oldest +available message, latest will move to the most recent, and +none will raise an exception so you can handle this case. +Default: latest.
  • enable_auto_commit: If true the consumer's offset will be +periodically committed in the background. Default: True.
  • auto_commit_interval_ms: milliseconds between automatic +offset commits, if enable_auto_commit is True. Default: 5000.
  • check_crcs: Automatically check the CRC32 of the records +consumed. This ensures no on-the-wire or on-disk corruption to +the messages occurred. This check adds some overhead, so it may +be disabled in cases seeking extreme performance. Default: True
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • partition_assignment_strategy: List of objects to use to +distribute partition ownership amongst consumer instances when +group management is used. This preference is implicit in the order +of the strategies in the list. When assignment strategy changes: +to support a change to the assignment strategy, new versions must +enable support both for the old assignment strategy and the new +one. The coordinator will choose the old assignment strategy until +all members have been updated. Then it will choose the new +strategy. Default: [:class:.RoundRobinPartitionAssignor]
  • max_poll_interval_ms: Maximum allowed time between calls to +consume messages (e.g., :meth:.getmany). If this interval +is exceeded the consumer is considered failed and the group will +rebalance in order to reassign the partitions to another consumer +group member. If API methods block waiting for messages, that time +does not count against this timeout. See KIP-62_ for more +information. Default 300000
  • rebalance_timeout_ms: The maximum time server will wait for this +consumer to rejoin the group in a case of rebalance. In Java client +this behaviour is bound to max.poll.interval.ms configuration, +but as aiokafka will rejoin the group in the background, we +decouple this setting to allow finer tuning by users that use +:class:.ConsumerRebalanceListener to delay rebalacing. Defaults +to session_timeout_ms
  • session_timeout_ms: Client group session and failure detection +timeout. The consumer sends periodic heartbeats +(heartbeat.interval.ms) to indicate its liveness to the broker. +If no hearts are received by the broker for a group member within +the session timeout, the broker will remove the consumer from the +group and trigger a rebalance. The allowed range is configured with +the broker configuration properties +group.min.session.timeout.ms and group.max.session.timeout.ms. +Default: 10000
  • heartbeat_interval_ms: The expected time in milliseconds +between heartbeats to the consumer coordinator when using +Kafka's group management feature. Heartbeats are used to ensure +that the consumer's session stays active and to facilitate +rebalancing when new consumers join or leave the group. The +value must be set lower than session_timeout_ms, but typically +should be set no higher than 1/3 of that value. It can be +adjusted even lower to control the expected time for normal +rebalances. Default: 3000
  • consumer_timeout_ms: maximum wait timeout for background fetching +routine. Mostly defines how fast the system will see rebalance and +request new data for new partitions. Default: 200
  • api_version: specify which kafka API version to use. +:class:AIOKafkaConsumer supports Kafka API versions >=0.9 only. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT, +SASL_SSL. Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more information see +:ref:ssl_auth. Default: None.
  • exclude_internal_topics: Whether records from internal topics +(such as offsets) should be exposed to the consumer. If set to True +the only way to receive records from an internal topic is +subscribing to it. Requires 0.10+ Default: True
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying None will +disable idle checks. Default: 540000 (9 minutes).
  • isolation_level: Controls how to read messages written +transactionally.

If set to read_committed, :meth:.getmany will only return +transactional messages which have been committed. +If set to read_uncommitted (the default), :meth:.getmany will +return all messages, even transactional messages which have been +aborted.

Non-transactional messages will be returned unconditionally in +either mode.

Messages will always be returned in offset order. Hence, in +read_committed mode, :meth:.getmany will only return +messages up to the last stable offset (LSO), which is the one less +than the offset of the first open transaction. In particular any +messages appearing after messages belonging to ongoing transactions +will be withheld until the relevant transaction has been completed. +As a result, read_committed consumers will not be able to read up +to the high watermark when there are in flight transactions. +Further, when in read_committed the seek_to_end method will +return the LSO. See method docs below. Default: read_uncommitted

  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: +PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: None
  • sasl_oauth_token_provider: OAuthBearer token provider instance. (See :mod:kafka.oauth.abstract). +Default: None

Returns:

  • : A function returning the same function

create_docs

def create_docs(self: fastkafka.FastKafka) -> None

Create the asyncapi documentation based on the configured consumers and producers.

This function exports the asyncapi specification based on the configured consumers +and producers in the FastKafka instance. It generates the asyncapi documentation by +extracting the topics and callbacks from the consumers and producers.

Note: +The asyncapi documentation is saved to the location specified by the _asyncapi_path +attribute of the FastKafka instance.

Returns:

  • None

create_mocks

def create_mocks(self: fastkafka.FastKafka) -> None

Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock

fastapi_lifespan

def fastapi_lifespan(self: fastkafka.FastKafka, kafka_broker_name: str) -> typing.Callable[[ForwardRef('FastAPI')], typing.AsyncIterator[NoneType]]

Method for managing the lifespan of a FastAPI application with a specific Kafka broker.

Parameters:

  • kafka_broker_name: The name of the Kafka broker to start FastKafka

Returns:

  • Lifespan function to use for initializing FastAPI

get_topics

def get_topics(self: fastkafka.FastKafka) -> typing.Iterable[str]

Get all topics for both producing and consuming.

Returns:

  • A set of topics for both producing and consuming.

produces

def produces(self: fastkafka.FastKafka, topic: Optional[str] = None, encoder: Union[str, Callable[[pydantic.main.BaseModel], bytes]] = 'json', prefix: str = 'to_', brokers: Optional[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers] = None, description: Optional[str] = None, loop=None, bootstrap_servers='localhost', client_id=None, metadata_max_age_ms=300000, request_timeout_ms=40000, api_version='auto', acks=<object object at 0x7fcedfc68f60>, key_serializer=None, value_serializer=None, compression_type=None, max_batch_size=16384, partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7fcedec6c850>, max_request_size=1048576, linger_ms=0, send_backoff_ms=100, retry_backoff_ms=100, security_protocol='PLAINTEXT', ssl_context=None, connections_max_idle_ms=540000, enable_idempotence=False, transactional_id=None, transaction_timeout_ms=60000, sasl_mechanism='PLAIN', sasl_plain_password=None, sasl_plain_username=None, sasl_kerberos_service_name='kafka', sasl_kerberos_domain_name=None, sasl_oauth_token_provider=None) -> typing.Callable[[typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]], typing.Union[typing.Callable[..., typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]], typing.Callable[..., typing.Awaitable[typing.Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], typing.List[pydantic.main.BaseModel], fastkafka.KafkaEvent[typing.List[pydantic.main.BaseModel]]]]]]]

Decorator registering the callback called when delivery report for a produced message is received

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

  • topic: Kafka topic that the producer will send returned values from +the decorated function to, default: None- If the topic is not +specified, topic name will be inferred from the decorated function +name by stripping the defined prefix.
  • encoder: Encoder to use to encode messages before sending it to topic, +default: json - By default, it uses json encoder to convert +pydantic basemodel to json string and then encodes the string to bytes +using 'utf-8' encoding. It also accepts custom encoder function.
  • prefix: Prefix stripped from the decorated function to define a topic +name if the topic argument is not passed, default: "to_". If the +decorated function name is not prefixed with the defined prefix +and topic argument is not passed, then this method will throw ValueError
  • brokers: Optional argument specifying multiple broker clusters for consuming +messages from different Kafka clusters in FastKafka.
  • description: Optional description of the producing function async docs. +If not provided, producing function doc attr will be used.
  • bootstrap_servers: a host[:port] string or list of +host[:port] strings that the producer should contact to +bootstrap initial cluster metadata. This does not have to be the +full node list. It just needs to have at least one broker that will +respond to a Metadata API Request. Default port is 9092. If no +servers are specified, will default to localhost:9092.
  • client_id: a name for this client. This string is passed in +each request to servers and can be used to identify specific +server-side log entries that correspond to this client. +Default: aiokafka-producer-# (appended with a unique number +per instance)
  • key_serializer: used to convert user-supplied keys to bytes +If not :data:None, called as f(key), should return +:class:bytes. +Default: :data:None.
  • value_serializer: used to convert user-supplied message +values to :class:bytes. If not :data:None, called as +f(value), should return :class:bytes. +Default: :data:None.
  • acks: one of 0, 1, all. The number of acknowledgments +the producer requires the leader to have received before considering a +request complete. This controls the durability of records that are +sent. The following settings are common:
  • 0: Producer will not wait for any acknowledgment from the server +at all. The message will immediately be added to the socket +buffer and considered sent. No guarantee can be made that the +server has received the record in this case, and the retries +configuration will not take effect (as the client won't +generally know of any failures). The offset given back for each +record will always be set to -1.
  • 1: The broker leader will write the record to its local log but +will respond without awaiting full acknowledgement from all +followers. In this case should the leader fail immediately +after acknowledging the record but before the followers have +replicated it then the record will be lost.
  • all: The broker leader will wait for the full set of in-sync +replicas to acknowledge the record. This guarantees that the +record will not be lost as long as at least one in-sync replica +remains alive. This is the strongest available guarantee.

If unset, defaults to acks=1. If enable_idempotence is +:data:True defaults to acks=all

  • compression_type: The compression type for all data generated by +the producer. Valid values are gzip, snappy, lz4, zstd +or :data:None. +Compression is of full batches of data, so the efficacy of batching +will also impact the compression ratio (more batching means better +compression). Default: :data:None.
  • max_batch_size: Maximum size of buffered data per partition. +After this amount :meth:send coroutine will block until batch is +drained. +Default: 16384
  • linger_ms: The producer groups together any records that arrive +in between request transmissions into a single batched request. +Normally this occurs only under load when records arrive faster +than they can be sent out. However in some circumstances the client +may want to reduce the number of requests even under moderate load. +This setting accomplishes this by adding a small amount of +artificial delay; that is, if first request is processed faster, +than linger_ms, producer will wait linger_ms - process_time. +Default: 0 (i.e. no delay).
  • partitioner: Callable used to determine which partition +each message is assigned to. Called (after key serialization): +partitioner(key_bytes, all_partitions, available_partitions). +The default partitioner implementation hashes each non-None key +using the same murmur2 algorithm as the Java client so that +messages with the same key are assigned to the same partition. +When a key is :data:None, the message is delivered to a random partition +(filtered to partitions with available leaders only, if possible).
  • max_request_size: The maximum size of a request. This is also +effectively a cap on the maximum record size. Note that the server +has its own cap on record size which may be different from this. +This setting will limit the number of record batches the producer +will send in a single request to avoid sending huge requests. +Default: 1048576.
  • metadata_max_age_ms: The period of time in milliseconds after +which we force a refresh of metadata even if we haven't seen any +partition leadership changes to proactively discover any new +brokers or partitions. Default: 300000
  • request_timeout_ms: Produce request timeout in milliseconds. +As it's sent as part of +:class:~kafka.protocol.produce.ProduceRequest (it's a blocking +call), maximum waiting time can be up to 2 * +request_timeout_ms. +Default: 40000.
  • retry_backoff_ms: Milliseconds to backoff when retrying on +errors. Default: 100.
  • api_version: specify which kafka API version to use. +If set to auto, will attempt to infer the broker version by +probing various APIs. Default: auto
  • security_protocol: Protocol used to communicate with brokers. +Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT, +SASL_SSL. Default: PLAINTEXT.
  • ssl_context: pre-configured :class:~ssl.SSLContext +for wrapping socket connections. Directly passed into asyncio's +:meth:~asyncio.loop.create_connection. For more +information see :ref:ssl_auth. +Default: :data:None
  • connections_max_idle_ms: Close idle connections after the number +of milliseconds specified by this config. Specifying :data:None will +disable idle checks. Default: 540000 (9 minutes).
  • enable_idempotence: When set to :data:True, the producer will +ensure that exactly one copy of each message is written in the +stream. If :data:False, producer retries due to broker failures, +etc., may write duplicates of the retried message in the stream. +Note that enabling idempotence acks to set to all. If it is not +explicitly set by the user it will be chosen. If incompatible +values are set, a :exc:ValueError will be thrown. +New in version 0.5.0.
  • sasl_mechanism: Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values +are: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, +OAUTHBEARER. +Default: PLAIN
  • sasl_plain_username: username for SASL PLAIN authentication. +Default: :data:None
  • sasl_plain_password: password for SASL PLAIN authentication. +Default: :data:None
  • sasl_oauth_token_provider (: class:~aiokafka.abc.AbstractTokenProvider): +OAuthBearer token provider instance. (See +:mod:kafka.oauth.abstract). +Default: :data:None

Returns:

  • : A function returning the same function

Exceptions:

  • ValueError: when needed

run_in_background

def run_in_background(self: fastkafka.FastKafka) -> typing.Callable[[typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]], typing.Callable[..., typing.Coroutine[typing.Any, typing.Any, typing.Any]]]

Decorator to schedule a task to be run in the background.

This decorator is used to schedule a task to be run in the background when the app's _on_startup event is triggered.

Returns:

  • A decorator function that takes a background task as an input and stores it to be run in the backround.

set_kafka_broker

def set_kafka_broker(self, kafka_broker_name: str) -> None

Sets the Kafka broker to start FastKafka with

Parameters:

  • kafka_broker_name: The name of the Kafka broker to start FastKafka

Returns:

  • None

Exceptions:

  • ValueError: If the provided kafka_broker_name is not found in dictionary of kafka_brokers

using_local_kafka

def using_local_kafka(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, zookeeper_port: int = 2181, listener_port: int = 9092) -> Tester

Starts local Kafka broker used by the Tester instance

Parameters:

  • data_dir: Path to the directory where the zookeepeer instance will save data
  • zookeeper_port: Port for clients (Kafka brokes) to connect
  • listener_port: Port on which the clients (producers and consumers) can connect

Returns:

  • An instance of tester with Kafka as broker

using_local_redpanda

def using_local_redpanda(self, topics: Iterable[str] = [], retries: int = 3, apply_nest_asyncio: bool = False, listener_port: int = 9092, tag: str = 'v23.1.2', seastar_core: int = 1, memory: str = '1G', mode: str = 'dev-container', default_log_level: str = 'debug') -> Tester

Starts local Redpanda broker used by the Tester instance

Parameters:

  • listener_port: Port on which the clients (producers and consumers) can connect
  • tag: Tag of Redpanda image to use to start container
  • seastar_core: Core(s) to use byt Seastar (the framework Redpanda uses under the hood)
  • memory: The amount of memory to make available to Redpanda
  • mode: Mode to use to load configuration properties in container
  • default_log_level: Log levels to use for Redpanda

Returns:

  • An instance of tester with Redpanda as broker
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/cli/fastkafka/index.html b/docs/0.7.1/cli/fastkafka/index.html new file mode 100644 index 0000000..88bd04b --- /dev/null +++ b/docs/0.7.1/cli/fastkafka/index.html @@ -0,0 +1,32 @@ + + + + + +fastkafka | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

fastkafka

Usage:

$ fastkafka [OPTIONS] COMMAND [ARGS]...

Options:

  • --install-completion: Install completion for the current shell.
  • --show-completion: Show completion for the current shell, to copy it or customize the installation.
  • --help: Show this message and exit.

Commands:

  • docs: Commands for managing fastkafka app...
  • run: Runs Fast Kafka API application
  • testing: Commands for managing fastkafka testing

fastkafka docs

Commands for managing fastkafka app documentation

Usage:

$ fastkafka docs [OPTIONS] COMMAND [ARGS]...

Options:

  • --help: Show this message and exit.

Commands:

  • generate: Generates documentation for a FastKafka...
  • install_deps: Installs dependencies for FastKafka...
  • serve: Generates and serves documentation for a...

fastkafka docs generate

Generates documentation for a FastKafka application

Usage:

$ fastkafka docs generate [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --root-path TEXT: root path under which documentation will be created; default is current directory
  • --help: Show this message and exit.

fastkafka docs install_deps

Installs dependencies for FastKafka documentation generation

Usage:

$ fastkafka docs install_deps [OPTIONS]

Options:

  • --help: Show this message and exit.

fastkafka docs serve

Generates and serves documentation for a FastKafka application

Usage:

$ fastkafka docs serve [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --root-path TEXT: root path under which documentation will be created; default is current directory
  • --bind TEXT: Some info [default: 127.0.0.1]
  • --port INTEGER: Some info [default: 8000]
  • --help: Show this message and exit.

fastkafka run

Runs Fast Kafka API application

Usage:

$ fastkafka run [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --num-workers INTEGER: Number of FastKafka instances to run, defaults to number of CPU cores. [default: 64]
  • --kafka-broker TEXT: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. [default: localhost]
  • --help: Show this message and exit.

fastkafka testing

Commands for managing fastkafka testing

Usage:

$ fastkafka testing [OPTIONS] COMMAND [ARGS]...

Options:

  • --help: Show this message and exit.

Commands:

  • install_deps: Installs dependencies for FastKafka app...

fastkafka testing install_deps

Installs dependencies for FastKafka app testing

Usage:

$ fastkafka testing install_deps [OPTIONS]

Options:

  • --help: Show this message and exit.
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/cli/run_fastkafka_server_process/index.html b/docs/0.7.1/cli/run_fastkafka_server_process/index.html new file mode 100644 index 0000000..1cd7168 --- /dev/null +++ b/docs/0.7.1/cli/run_fastkafka_server_process/index.html @@ -0,0 +1,32 @@ + + + + + +run_fastkafka_server_process | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

run_fastkafka_server_process

Usage:

$ run_fastkafka_server_process [OPTIONS] APP

Arguments:

  • APP: Input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --kafka-broker TEXT: Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class. [required]
  • --install-completion: Install completion for the current shell.
  • --show-completion: Show completion for the current shell, to copy it or customize the installation.
  • --help: Show this message and exit.
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_00_FastKafka_Demo/index.html b/docs/0.7.1/guides/Guide_00_FastKafka_Demo/index.html new file mode 100644 index 0000000..e930fd5 --- /dev/null +++ b/docs/0.7.1/guides/Guide_00_FastKafka_Demo/index.html @@ -0,0 +1,122 @@ + + + + + +FastKafka tutorial | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

FastKafka tutorial

FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.

Install

FastKafka works on macOS, Linux, and most Unix-style operating systems. +You can install it with pip as usual:

pip install fastkafka
try:
import fastkafka
except:
! pip install fastkafka

Running in Colab

You can start this interactive tutorial in Google Colab by clicking the +button below:

Open In Colab

Writing server code

Here is an example python script using FastKafka that takes data from a +Kafka topic, makes a prediction using a predictive model, and outputs +the prediction to another Kafka topic.

Preparing the demo model

First we will prepare our model using the Iris dataset so that we can +demonstrate the preditions using FastKafka. The following call downloads +the dataset and trains the model.

We will wrap the model creation into a lifespan of our app so that the +model is created just before the app is started.

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines two message classes for use in a FastKafka +application:

  • The IrisInputData class is used to represent input data for a +predictive model. It has four fields of type +NonNegativeFloat, +which is a subclass of float that only allows non-negative floating +point values.

  • The IrisPrediction class is used to represent the output of the +predictive model. It has a single field species of type string +representing the predicted species.

These message classes will be used to parse and validate incoming data +in Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used for +generating the documentation only and it is not being checked by the +actual server.

Next, an object of the +FastKafka +class is initialized with the minimum set of arguments:

  • kafka_brokers: a dictionary used for generation of documentation
from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the IrisInputData message class. Specifying the +type of the single argument is instructing the Pydantic to use +IrisInputData.parse_raw() on the consumed message before passing it +to the user defined function on_input_data.

  • The @produces decorator is applied to the to_predictions function, +which specifies that this function should produce a message to the +“predictions" Kafka topic whenever it is called. The to_predictions +function takes a single integer argument species_class representing +one of three possible strign values predicted by the mdoel. It creates +a new IrisPrediction message using this value and then returns it. +The framework will call the IrisPrediction.json().encode("utf-8") +function on the returned value and produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Testing the service

The service can be tested using the +Tester +instances which internally starts Kafka broker and zookeeper.

Before running tests, we have to install Java runtime and Apache Kafka +locally. To simplify the process, we provide the following convenience +command:

fastkafka testing install_deps
[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
from fastkafka.testing import Tester

msg = IrisInputData(
sepal_length=0.1,
sepal_width=0.2,
petal_length=0.3,
petal_width=0.4,
)

# Start Tester app and create local Kafka broker for testing
async with Tester(kafka_app) as tester:
# Send IrisInputData message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with IrisPrediction in predictions topic
await tester.awaited_mocks.on_predictions.assert_awaited_with(
IrisPrediction(species="setosa"), timeout=2
)
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Recap

We have created a Iris classification model and encapulated it into our +fastkafka application. The app will consume the IrisInputData from the +input_data topic and produce the predictions to predictions topic.

To test the app we have:

  1. Created the app

  2. Started our Tester class which mirrors the developed app topics for +testing purpuoses

  3. Sent IrisInputData message to input_data topic

  4. Asserted and checked that the developed iris classification service +has reacted to IrisInputData message

Running the service

The service can be started using builtin faskafka run CLI command. +Before we can do that, we will concatenate the code snippets from above +and save them in a file "application.py"

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

To run the service, you will need a running Kafka broker on localhost as +specified in the kafka_brokers parameter above. We can start the Kafka +broker locally using the +ApacheKafkaBroker. +Notice that the same happens automatically in the +Tester +as shown above.

[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()
[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.

'127.0.0.1:9092'

Then, we start the FastKafka service by running the following command in +the folder where the application.py file is located:

fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app

In the above command, we use --num-workers option to specify how many +workers to launch and we use --kafka-broker option to specify which +kafka broker configuration to use from earlier specified kafka_brokers

[1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[1200654]: [ERROR] aiokafka: Unable to update metadata from [0]
[1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[1200656]: [ERROR] aiokafka: Unable to update metadata from [0]
^C
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...

You need to interupt running of the cell above by selecting +Runtime->Interupt execution on the toolbar above.

Finally, we can stop the local Kafka Broker:

[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.

Documentation

The kafka app comes with builtin documentation generation using +AsyncApi HTML generator.

When running in Colab, we need to update Node.js first:

We need to install all dependancies for the generator using the +following command line:

fastkafka docs install_deps
[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed

To generate the documentation programatically you just need to call the +folloving command:

fastkafka docs generate application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.

. This will generate the asyncapi folder in relative path where all +your documentation will be saved. You can check out the content of it +with:

ls -l asyncapi
total 8
drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs
drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec

In docs folder you will find the servable static html file of your +documentation. This can also be served using our fastkafka docs serve +CLI command (more on that in our guides).

In spec folder you will find a asyncapi.yml file containing the async +API specification of your application.

We can locally preview the generated documentation by running the +following command:

fastkafka docs serve application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.


Serving documentation on http://127.0.0.1:8000
^C
Interupting serving of documentation and cleaning up...

From the parameters passed to the application constructor, we get the +documentation bellow:

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
bootstrap_servers="localhost:9092",
)

Kafka_servers

The following documentation snippet are for the consumer as specified in +the code above:

Kafka_consumer

The following documentation snippet are for the producer as specified in +the code above:

Kafka_producer

Finally, all messages as defined as subclasses of BaseModel are +documented as well:

Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_01_Intro/index.html b/docs/0.7.1/guides/Guide_01_Intro/index.html new file mode 100644 index 0000000..8c8107e --- /dev/null +++ b/docs/0.7.1/guides/Guide_01_Intro/index.html @@ -0,0 +1,51 @@ + + + + + +Intro | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Intro

This tutorial will show you how to use FastKafkaAPI, step by +step.

The goal of FastKafkaAPI is to simplify the use of Apache Kafka in +Python inspired by FastAPI look and feel.

In this Intro tutorial we’ll go trough the basic requirements to run the +demos presented in future steps.

Installing FastKafkaAPI

First step is to install FastKafkaAPI

$ pip install fastkafka

Preparing a Kafka broker

Next step is to prepare the Kafka environment, our consumers and +producers will need some channel of communication.

!!! info "Hey, your first info!"

If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. 

To go through the tutorial, we recommend that you use dockerized Kafka +brokers, if you have Docker and docker-compose installed the setup +should take you no time (if we exclude the container download times).

!!! warning "Listen! This is important."

To be able to setup this configuration you need to have Docker and docker-compose installed

See here for more info on <a href = \"https://docs.docker.com/\" target=\"_blank\">Docker</a> and <a href = \"https://docs.docker.com/compose/install/\" target=\"_blank\">docker compose</a>

To setup the recommended environment, first, create a new folder wher +you want to save your demo files (e.g. fastkafka_demo). Inside the new +folder create a new YAML file named kafka_demo.yml and copy the +following configuration into it:

version: "3"
services:
zookeeper:
image: wurstmeister/zookeeper
hostname: zookeeper
container_name: zookeeper
networks:
- fastkafka-network
ports:
- "2181:2181"
- "22:22"
- "2888:2888"
- "3888:3888"
kafka:
image: wurstmeister/kafka
container_name: kafka
ports:
- "9093:9093"
environment:
HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d' ' -f 2"
KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093
KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093
KAFKA_INTER_BROKER_LISTENER_NAME: INTER
KAFKA_CREATE_TOPICS: "hello:1:1"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
- zookeeper
healthcheck:
test: [ "CMD", "kafka-topics.sh", "--list", "--zookeeper", "zookeeper:2181" ]
interval: 5s
timeout: 10s
retries: 5
networks:
- fastkafka-network
networks:
fastkafka-network:
name: "fastkafka-network"

This configuration will start a single instance of Zookeeper, single +instance of Kafka broker and create a ‘hello’ topic (quite enough for a +start). To start the configuration, run:

$ docker-compose -f kafka_demo.yaml up -d --wait

This will start the necessary containers and wait till they report that +they are Healthy. After the command finishes, you are good to go to try +out the FastKafkaAPI capabilities! 🎊

Running the code

After installing FastKafkaAPI and initialising the Kafka broker you can +proceed to the ‘First Steps’ part of the tutorial. There, you will write +your first Kafka client and producer apps, run them, and interact with +them.

You are highly encouraged to follow along the tutorials not just by +reading trough them but by implementing the code examples in your own +environment. This will not only help you remember the use cases better +but also, hopefully, demonstrate to you the ease of use of this library.

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_02_First_Steps/index.html b/docs/0.7.1/guides/Guide_02_First_Steps/index.html new file mode 100644 index 0000000..9c2a0c9 --- /dev/null +++ b/docs/0.7.1/guides/Guide_02_First_Steps/index.html @@ -0,0 +1,49 @@ + + + + + +First Steps | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

First Steps

Creating a simple Kafka consumer app

For our first demo we will create the simplest possible Kafka consumer +and run it using ‘fastkafka run’ command.

The consumer will:

  1. Connect to the Kafka Broker we setup in the Intro guide

  2. Listen to the hello topic

  3. Write any message received from the hello topic to stdout

To create the consumer, first, create a file named

hello_kafka_consumer.py and copy the following code to it:

from os import environ

from fastkafka import FastKafka
from pydantic import BaseModel, Field

kafka_server_url = environ["KAFKA_HOSTNAME"]
kafka_server_port = environ["KAFKA_PORT"]

kafka_brokers = {
"localhost": {
"description": "local development kafka",
"url": kafka_server_url,
"port": kafka_server_port
}
}

class HelloKafkaMsg(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_app = FastKafka(
kafka_brokers=kafka_brokers
)

@kafka_app.consumes()
async def on_hello(msg: HelloKafkaMsg):
print(f"Got data, msg={msg.msg}", flush=True)

!!! info "Kafka configuration"

This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').

!!! warning "Remember to flush"

Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.

To run this consumer, in your terminal, run:

fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app

After running the command, you should see something similar to the ouput +below:

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})
[878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization
[878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.

[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.

Now you can interact with your consumer, by sending the messages to the +subscribed ‘hello’ topic, don’t worry, we will cover this in the next +step of this guide.

Sending first message to your consumer

After we have created and run our first consumer, we should send a +message to it, to make sure it is working properly.

If you are using the Kafka setup as described in the Intro guide, you +can follow the steps listed here to send a message to the hello topic.

First, connect to your running kafka broker by running:

docker run -it kafka /bin/bash

Then, when connected to the container, run:

kafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello

This will open an interactive connection to the hello topic, now you can +write your mesages to the topic and they will be consumed by our +consumer.

In the shell, type:

{"msg":"hello"}

and press enter. This will send a hello message to the topic which will +be read by our running consumer and outputed to stdout.

Check the output of your consumer (terminal where you ran the ‘fastkafka +run’ command) and confirm that your consumer has read the Kafka message. +You shoud see something like this:

Got data, msg=hello

Creating a hello Kafka producer

Consuming messages is only a part of this Library functionality, the +other big part is producing the messages. So, let’s create our first +kafka producer which will send it’s greetings to our consumer +periodically.

The producer will:

  1. Connect to the Kafka Broker we setup in the Intro guide
  2. Connect to the hello topic
  3. Periodically send a message to the hello world topic

To create the producer, first, create a file named

hello_kafka_producer.py and copy the following code to it:

from os import environ

import asyncio
from pydantic import BaseModel, Field

from fastkafka import FastKafka
from fastkafka._components.logger import get_logger

kafka_server_url = environ["KAFKA_HOSTNAME"]
kafka_server_port = environ["KAFKA_PORT"]

kafka_brokers = {
"localhost": {
"description": "local development kafka",
"url": kafka_server_url,
"port": kafka_server_port
}
}

class HelloKafkaMsg(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_app = FastKafka(
kafka_brokers=kafka_brokers
)

logger = get_logger(__name__)

@kafka_app.produces()
async def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:
logger.info(f"Producing: {msg}")
return msg

@kafka_app.run_in_background()
async def hello_every_second():
while(True):
await to_hello(HelloKafkaMsg(msg="hello"))
await asyncio.sleep(1)

!!! info "Kafka configuration"

This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').

To run this producer, in your terminal, run:

fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app

After running the command, you should see something similar to the ouput +below:

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.

[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.

Now, while the producer is running, it will send a HelloKafkaMsg every +second to the hello kafka topic. If your consumer is still running, you +should see the messages appear in its log.

Recap

In this guide we have:

  1. Created a simple Kafka consumer using FastKafka
  2. Sent a message to our consumer trough Kafka
  3. Created a simple Kafka producer using FastKafka
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_03_Authentication/index.html b/docs/0.7.1/guides/Guide_03_Authentication/index.html new file mode 100644 index 0000000..effa207 --- /dev/null +++ b/docs/0.7.1/guides/Guide_03_Authentication/index.html @@ -0,0 +1,37 @@ + + + + + +Authentication | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Authentication

TLS Authentication

sasl_mechanism (str) – Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN, +GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN

sasl_plain_username (str) – username for SASL PLAIN authentication. +Default: None

sasl_plain_password (str) – password for SASL PLAIN authentication. +Default: None

sasl_oauth_token_provider (AbstractTokenProvider) – OAuthBearer token +provider instance. (See kafka.oauth.abstract). Default: None

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_04_Github_Actions_Workflow/index.html b/docs/0.7.1/guides/Guide_04_Github_Actions_Workflow/index.html new file mode 100644 index 0000000..2e5227d --- /dev/null +++ b/docs/0.7.1/guides/Guide_04_Github_Actions_Workflow/index.html @@ -0,0 +1,42 @@ + + + + + +Deploy FastKafka docs to GitHub Pages | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Deploy FastKafka docs to GitHub Pages

Getting started

Add your workflow file .github/workflows/fastkafka_docs_deploy.yml and +push it to your remote default branch.

Here is an example workflow:

name: Deploy FastKafka Generated Documentation to GitHub Pages

on:
push:
branches: [ "main", "master" ]
workflow_dispatch:

jobs:
deploy:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: airtai/workflows/fastkafka-ghp@main
with:
app: "test_fastkafka.application:kafka_app"

Options

Set app location

Input in the form of path:app, where path is the path to a Python +file and app is an object of type +FastKafka:

- name: Deploy
uses: airtai/workflows/fastkafka-ghp@main
with:
app: "test_fastkafka.application:kafka_app"

In the above example, +FastKafka +app is named as kafka_app and it is available in the application +submodule of the test_fastkafka module.

Example Repository

A +FastKafka-based +library that uses the above-mentioned workfow actions to publish +FastKafka docs to Github Pages can be found +here.

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_05_Lifespan_Handler/index.html b/docs/0.7.1/guides/Guide_05_Lifespan_Handler/index.html new file mode 100644 index 0000000..0532df1 --- /dev/null +++ b/docs/0.7.1/guides/Guide_05_Lifespan_Handler/index.html @@ -0,0 +1,75 @@ + + + + + +Lifespan Events | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Lifespan Events

Did you know that you can define some special code that runs before and +after your Kafka application? This code will be executed just once, but +it covers the whole lifespan of your app! 🚀

Lets break it down:

You can define logic (code) that should be executed before the +application starts up. This is like a warm-up for your app, getting it +ready to consume and produce messages.

Similarly, you can define logic (code) that should be executed when the +application is shutting down. This is like a cool-down for your app, +making sure everything is properly closed and cleaned up.

By executing code before consuming and after producing, you cover the +entire lifecycle of your application 🎉

This is super handy for setting up shared resources that are needed +across consumers and producers, like a database connection pool or a +machine learning model. And the best part? You can clean up these +resources when the app is shutting down!

So lets give it a try and see how it can make your Kafka app even more +awesome! 💪

Lifespan example - Iris prediction model

Let’s dive into an example to see how you can leverage the lifecycle +handler to solve a common use case. Imagine that you have some machine +learning models that need to consume incoming messages and produce +response/prediction messages. These models are shared among consumers +and producers, which means you don’t want to load them for every +message.

Here’s where the lifecycle handler comes to the rescue! By loading the +model before the messages are consumed and produced, but only right +before the application starts receiving messages, you can ensure that +the model is ready to use without compromising the performance of your +tests. In the upcoming sections, we’ll walk you through how to +initialize an Iris species prediction model and use it in your developed +application.

Lifespan

You can define this startup and shutdown logic using the lifespan +parameter of the FastKafka app, and an async context manager.

Let’s start with an example and then see it in detail.

We create an async function lifespan() with yield like this:

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
from contextlib import asynccontextmanager

from fastkafka import FastKafka

ml_models = {}

@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
print("Loading the model!")
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)
yield
# Clean up the ML models and release the resources

print("Exiting, clearing model dict!")
ml_models.clear()

The first thing to notice, is that we are defining an async function +with yield. This is very similar to Dependencies with yield.

The first part of the function, before the yield, will be executed +before the application starts. And the part after the yield will +be executed after the application has finished.

This lifespan will create an iris_prediction model on application +startup and cleanup the references after the app is shutdown.

The lifespan will be passed an KafkaApp reference on startup of your +application, which you can use to reference your application on startup.

For demonstration sake, we also added prints so that when running the +app we can see that our lifespan was called.

Async context manager

Context managers can be used in with blocks, our lifespan, for example +could be used like this:

ml_models = {}
async with lifespan(None):
print(ml_models)

When you create a context manager or an async context manager, what it +does is that, before entering the with block, it will execute the code +before the yield, and after exiting the with block, it will execute +the code after the yield.

If you want to learn more about context managers and contextlib +decorators, please visit Python official +docs

App demo

FastKafka app

Lets now create our application using the created lifespan handler.

Notice how we passed our lifespan handler to the app when constructing +it trough the lifespan argument.

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local development kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Data modeling

Lets model the Iris data for our app:

from pydantic import BaseModel, Field, NonNegativeFloat

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Consumers and producers

Lets create a consumer and producer for our app that will generate +predictions from input iris data.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Final app

The final app looks like this:

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
from contextlib import asynccontextmanager

from pydantic import BaseModel, Field, NonNegativeFloat

from fastkafka import FastKafka

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")
ml_models = {}

@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
print("Loading the model!")
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)
yield
# Clean up the ML models and release the resources

print("Exiting, clearing model dict!")
ml_models.clear()

kafka_brokers = {
"localhost": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local development kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Running the app

Now we can run the app with your custom lifespan handler. Copy the code +above in lifespan_example.py and run it by running

fastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app

When you run the app, you should see a simmilar output to the one below:

[262292]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[262292]: Loading the model!
[262292]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Entering...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Starting send_stream
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.start(): Finished.
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[262292]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[262292]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[262292]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[262292]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 262292...
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[262292]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Entering...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Exiting send_stream
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: _aiokafka_producer_manager(): Finished.
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Stoping producer...
[262292]: [INFO] fastkafka._components.aiokafka_producer_manager: AIOKafkaProducerManager.stop(): Finished
[262292]: Exiting, clearing model dict!
[INFO] fastkafka._server: terminate_asyncio_process(): Process 262292 terminated.

Recap

In this guide we have defined a lifespan handler and passed to our +FastKafka app.

Some important points are:

  1. Lifespan handler is implemented as +AsyncContextManager
  2. Code before yield in lifespan will be executed before +application startup
  3. Code after yield in lifespan will be executed after +application shutdown
  4. You can pass your lifespan handler to FastKafka app on +initialisation by passing a lifespan argument
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_06_Benchmarking_FastKafka/index.html b/docs/0.7.1/guides/Guide_06_Benchmarking_FastKafka/index.html new file mode 100644 index 0000000..57b6743 --- /dev/null +++ b/docs/0.7.1/guides/Guide_06_Benchmarking_FastKafka/index.html @@ -0,0 +1,80 @@ + + + + + +Benchmarking FastKafka app | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Benchmarking FastKafka app

Prerequisites

To benchmark a +FastKafka +project, you will need the following:

  1. A library built with +FastKafka.
  2. A running Kafka instance to benchmark the FastKafka application +against.

Creating FastKafka Code

Let’s create a +FastKafka-based +application and write it to the application.py file based on the +tutorial.

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

FastKafka +has a decorator for benchmarking which is appropriately called as +benchmark. Let’s edit our application.py file and add the +benchmark decorator to the consumes method.

# content of the "application.py" file with benchmark

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
@kafka_app.benchmark(interval=1, sliding_window_size=5)
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Here we are conducting a benchmark of a function that consumes data from +the input_data topic with an interval of 1 second and a sliding window +size of 5.

This benchmark method uses the interval parameter to calculate the +results over a specific time period, and the sliding_window_size +parameter to determine the maximum number of results to use in +calculating the average throughput and standard deviation.

This benchmark is important to ensure that the function is performing +optimally and to identify any areas for improvement.

Starting Kafka

If you already have a Kafka running somewhere, then you can skip this +step.

Please keep in mind that your benchmarking results may be affected by +bottlenecks such as network, CPU cores in the Kafka machine, or even the +Kafka configuration itself.

Installing Java and Kafka

We need a working Kafkainstance to benchmark our +FastKafka +app, and to run Kafka we need Java. Thankfully, +FastKafka +comes with a CLI to install both Java and Kafka on our machine.

So, let’s install Java and Kafka by executing the following command.

fastkafka testing install_deps

The above command will extract Kafka scripts at the location +“\$HOME/.local/kafka_2.13-3.3.2" on your machine.

Creating configuration for Zookeeper and Kafka

Now we need to start Zookeeper and Kafka separately, and to start +them we need zookeeper.properties and kafka.properties files.

Let’s create a folder inside the folder where Kafka scripts were +extracted and change directory into it.

mkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir

Let’s create a file called zookeeper.properties and write the +following content to the file:

dataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper
clientPort=2181
maxClientCnxns=0

Similarly, let’s create a file called kafka.properties and write the +following content to the file:

broker.id=0
listeners=PLAINTEXT://:9092

num.network.threads=3
num.io.threads=8
socket.send.buffer.bytes=102400
socket.receive.buffer.bytes=102400
socket.request.max.bytes=104857600

num.partitions=1
num.recovery.threads.per.data.dir=1
offsets.topic.replication.factor=1
transaction.state.log.replication.factor=1
transaction.state.log.min.isr=1

log.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs
log.flush.interval.messages=10000
log.flush.interval.ms=1000
log.retention.hours=168
log.retention.bytes=1073741824
log.segment.bytes=1073741824
log.retention.check.interval.ms=300000

zookeeper.connect=localhost:2181
zookeeper.connection.timeout.ms=18000

Starting Zookeeper and Kafka

We need two different terminals to run Zookeeper in one and Kafka in +another. Let’s open a new terminal and run the following commands to +start Zookeeper:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./zookeeper-server-start.sh ../data_dir/zookeeper.properties

Once Zookeeper is up and running, open a new terminal and execute the +follwing commands to start Kafka:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./kafka-server-start.sh ../data_dir/kafka.properties

Now we have both Zookeeper and Kafka up and running.

Creating topics in Kafka

In a new terminal, please execute the following command to create +necessary topics in Kafka:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092
./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092

Populating topics with dummy data

To benchmark our +FastKafka +app, we need some data in Kafka topics.

In the same terminal, let’s create some dummy data:

yes '{"sepal_length": 0.7739560486, "sepal_width": 0.8636615789, "petal_length": 0.6122663046, "petal_width": 0.1338914722}' | head -n 1000000 > /tmp/test_data

This command will create a file called test_data in the tmp folder +with one million rows of text. This will act as dummy data to populate +the input_data topic.

Let’s populate the created topic input_data with the dummy data which +we created above:

./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data

Now our topic input_data has one million records/messages in it. If +you want more messages in topic, you can simply execute the above +command again and again.

Benchmarking FastKafka

Once Zookeeper and Kafka are ready, benchmarking +FastKafka +app is as simple as running the fastkafka run command:

fastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app

This command will start the +FastKafka +app and begin consuming messages from Kafka, which we spun up earlier. +Additionally, the same command will output all of the benchmark +throughputs based on the interval and sliding_window_size values.

The output for the fastkafka run command is:

[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh
ost:9092', 'max_poll_records': 100}
[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc
[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin
[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1
[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition
=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)

Based on the output, when using 1 worker, our +FastKafka +app achieved a throughput of 93k messages per second and an +average throughput of 93k messages per second.

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html b/docs/0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html new file mode 100644 index 0000000..b40f9cb --- /dev/null +++ b/docs/0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html @@ -0,0 +1,150 @@ + + + + + +Encoding and Decoding Kafka Messages with FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Encoding and Decoding Kafka Messages with FastKafka

Prerequisites

  1. A basic knowledge of +FastKafka +is needed to proceed with this guide. If you are not familiar with +FastKafka, +please go through the tutorial first.
  2. FastKafka +with its dependencies installed is needed. Please install +FastKafka +using the command - pip install fastkafka

Ways to Encode and Decode Messages with FastKafka

In python, by default, we send Kafka messages as bytes. Even if our +message is a string, we convert it to bytes and then send it to Kafka +topic. imilarly, while consuming messages, we consume them as bytes and +then convert them to strings.

In FastKafka, we specify message schema using Pydantic models as +mentioned in tutorial:

# Define Pydantic models for Kafka messages
from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Then, we send and receive messages as instances of Pydantic models which +we defined. So, FastKafka needs a way to encode/decode to these Pydantic +model messages to bytes in order to send/receive messages to/from Kafka +topics.

The @consumes and @produces methods of FastKafka accept a parameter +called decoder/encoder to decode/encode Kafka messages. FastKafka +provides three ways to encode and decode messages:

  1. json - This is the default encoder/decoder option in FastKafka. +While producing, this option converts our instance of Pydantic model +messages to a JSON string and then converts it to bytes before +sending it to the topic. While consuming, it converts bytes to a +JSON string and then constructs an instance of Pydantic model from +the JSON string.
  2. avro - This option uses Avro encoding/decoding to convert instances +of Pydantic model messages to bytes while producing, and while +consuming, it constructs an instance of Pydantic model from bytes.
  3. custom encoder/decoder - If you are not happy with the json or avro +encoder/decoder options, you can write your own encoder/decoder +functions and use them to encode/decode Pydantic messages.

1. Json encoder and decoder

The default option in FastKafka is json encoder/decoder. This option, +while producing, converts our instance of pydantic model messages to +json string and then converts to bytes before sending it to the topics. +While consuming it converts bytes to json string and then constructs +instance of pydantic model from json string.

We can use the application from tutorial as +is, and it will use the json encoder/decoder by default. But, for +clarity, let’s modify it to explicitly accept the ‘json’ encoder/decoder +parameter:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder="json")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder="json")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

In the above code, the @kafka_app.consumes decorator sets up a +consumer for the “input_data" topic, using the ‘json’ decoder to convert +the message payload to an instance of IrisInputData. The +@kafka_app.produces decorator sets up a producer for the “predictions" +topic, using the ‘json’ encoder to convert the instance of +IrisPrediction to message payload.

2. Avro encoder and decoder

What is Avro?

Avro is a row-oriented remote procedure call and data serialization +framework developed within Apache’s Hadoop project. It uses JSON for +defining data types and protocols, and serializes data in a compact +binary format. To learn more about the Apache Avro, please check out the +docs.

Installing FastKafka with Avro dependencies

FastKafka +with dependencies for Apache Avro installed is needed to use avro +encoder/decoder. Please install +FastKafka +with Avro support using the command - pip install fastkafka[avro]

Defining Avro Schema Using Pydantic Models

By default, you can use Pydantic model to define your message schemas. +FastKafka internally takes care of encoding and decoding avro messages, +based on the Pydantic models.

So, similar to the tutorial, the message schema will +remain as it is.

# Define Pydantic models for Avro messages
from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

No need to change anything to support avro. You can use existing +Pydantic models as is.

Reusing existing avro schema

If you are using some other library to send and receive avro encoded +messages, it is highly likely that you already have an Avro schema +defined.

Building pydantic models from avro schema dictionary

Let’s modify the above example and let’s assume we have schemas already +for IrisInputData and IrisPrediction which will look like below:

iris_input_data_schema = {
"type": "record",
"namespace": "IrisInputData",
"name": "IrisInputData",
"fields": [
{"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},
{"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},
{"doc": "Petal length in cm", "type": "double", "name": "petal_length"},
{"doc": "Petal width in cm", "type": "double", "name": "petal_width"},
],
}
iris_prediction_schema = {
"type": "record",
"namespace": "IrisPrediction",
"name": "IrisPrediction",
"fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],
}

We can easily construct pydantic models from avro schema using +avsc_to_pydantic +function which is included as part of +FastKafka +itself.

from fastkafka.encoder import avsc_to_pydantic

IrisInputData = avsc_to_pydantic(iris_input_data_schema)
print(IrisInputData.__fields__)

IrisPrediction = avsc_to_pydantic(iris_prediction_schema)
print(IrisPrediction.__fields__)

The above code will convert avro schema to pydantic models and will +print pydantic models’ fields. The output of the above is:

{'sepal_length': ModelField(name='sepal_length', type=float, required=True),
'sepal_width': ModelField(name='sepal_width', type=float, required=True),
'petal_length': ModelField(name='petal_length', type=float, required=True),
'petal_width': ModelField(name='petal_width', type=float, required=True)}

{'species': ModelField(name='species', type=str, required=True)}

This is exactly same as manually defining the pydantic models ourselves. +You don’t have to worry about not making any mistakes while converting +avro schema to pydantic models manually. You can easily and +automatically accomplish it by using +avsc_to_pydantic +function as demonstrated above.

Building pydantic models from .avsc file

Not all cases will have avro schema conveniently defined as a python +dictionary. You may have it stored as the proprietary .avsc files in +filesystem. Let’s see how to convert those .avsc files to pydantic +models.

Let’s assume our avro files are stored in files called +iris_input_data_schema.avsc and iris_prediction_schema.avsc. In that +case, following code converts the schema to pydantic models:

import json
from fastkafka.encoder import avsc_to_pydantic


with open("iris_input_data_schema.avsc", "rb") as f:
iris_input_data_schema = json.load(f)

with open("iris_prediction_schema.avsc", "rb") as f:
iris_prediction_schema = json.load(f)


IrisInputData = avsc_to_pydantic(iris_input_data_schema)
print(IrisInputData.__fields__)

IrisPrediction = avsc_to_pydantic(iris_prediction_schema)
print(IrisPrediction.__fields__)

Consume/Produce avro messages with FastKafka

FastKafka +provides @consumes and @produces methods to consume/produces +messages to/from a Kafka topic. This is explained in +tutorial.

The @consumes and @produces methods accepts a parameter called +decoder/encoder to decode/encode avro messages.

@kafka_app.consumes(topic="input_data", encoder="avro")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", decoder="avro")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

In the above example, in @consumes and @produces methods, we +explicitly instruct FastKafka to decode and encode messages using +the avro decoder/encoder instead of the default json +decoder/encoder.

Assembling it all together

Let’s rewrite the sample code found in +tutorial to use avro to decode and +encode messages:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


iris_input_data_schema = {
"type": "record",
"namespace": "IrisInputData",
"name": "IrisInputData",
"fields": [
{"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},
{"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},
{"doc": "Petal length in cm", "type": "double", "name": "petal_length"},
{"doc": "Petal width in cm", "type": "double", "name": "petal_width"},
],
}
iris_prediction_schema = {
"type": "record",
"namespace": "IrisPrediction",
"name": "IrisPrediction",
"fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],
}
# Or load schema from avsc files

from fastkafka.encoder import avsc_to_pydantic

IrisInputData = avsc_to_pydantic(iris_input_data_schema)
IrisPrediction = avsc_to_pydantic(iris_prediction_schema)


from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder="avro")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder="avro")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

The above code is a sample implementation of using FastKafka to consume +and produce Avro-encoded messages from/to a Kafka topic. The code +defines two Avro schemas for the input data and the prediction result. +It then uses the +avsc_to_pydantic +function from the FastKafka library to convert the Avro schema into +Pydantic models, which will be used to decode and encode Avro messages.

The +FastKafka +class is then instantiated with the broker details, and two functions +decorated with @kafka_app.consumes and @kafka_app.produces are +defined to consume messages from the “input_data" topic and produce +messages to the “predictions" topic, respectively. The functions uses +the decoder=“avro" and encoder=“avro" parameters to decode and encode +the Avro messages.

In summary, the above code demonstrates a straightforward way to use +Avro-encoded messages with FastKafka to build a message processing +pipeline.

3. Custom encoder and decoder

If you are not happy with the json or avro encoder/decoder options, you +can write your own encoder/decoder functions and use them to +encode/decode Pydantic messages.

Writing a custom encoder and decoder

In this section, let’s see how to write a custom encoder and decoder +which obfuscates kafka message with simple +ROT13 cipher.

import codecs
import json
from typing import Any

from pydantic.main import ModelMetaclass


def custom_encoder(msg: BaseModel) -> bytes:
msg_str = msg.json()
obfuscated = codecs.encode(msg_str, 'rot13')
raw_bytes = obfuscated.encode("utf-8")
return raw_bytes

def custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:
obfuscated = raw_msg.decode("utf-8")
msg_str = codecs.decode(obfuscated, 'rot13')
msg_dict = json.loads(msg_str)
return cls(**msg_dict)

The above code defines two custom functions for encoding and decoding +messages in a Kafka application using the FastKafka library.

The encoding function, custom_encoder(), takes a message msg which +is an instance of a Pydantic model, converts it to a JSON string using +the json() method, obfuscates the resulting string using the ROT13 +algorithm from the codecs module, and finally encodes the obfuscated +string as raw bytes using the UTF-8 encoding.

The decoding function, custom_decoder(), takes a raw message raw_msg +in bytes format, a Pydantic class to construct instance with cls +parameter. It first decodes the raw message from UTF-8 encoding, then +uses the ROT13 algorithm to de-obfuscate the string. Finally, it loads +the resulting JSON string using the json.loads() method and returns a +new instance of the specified cls class initialized with the decoded +dictionary.

These functions can be used with FastKafka’s encoder and decoder +parameters to customize the serialization and deserialization of +messages in Kafka topics.

Let’s test the above code

i = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)

encoded = custom_encoder(i)
display(encoded)

decoded = custom_decoder(encoded, IrisInputData)
display(decoded)

This will result in following output

b'{"frcny_yratgu": 0.5, "frcny_jvqgu": 0.5, "crgny_yratgu": 0.5, "crgny_jvqgu": 0.5}'

IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)

Assembling it all together

Let’s rewrite the sample code found in +tutorial to use our custom decoder and +encoder functions:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")


import codecs
import json
from typing import Any

from pydantic.main import ModelMetaclass


def custom_encoder(msg: BaseModel) -> bytes:
msg_str = msg.json()
obfuscated = codecs.encode(msg_str, 'rot13')
raw_bytes = obfuscated.encode("utf-8")
return raw_bytes

def custom_decoder(raw_msg: bytes, cls: ModelMetaclass) -> Any:
obfuscated = raw_msg.decode("utf-8")
msg_str = codecs.decode(obfuscated, 'rot13')
msg_dict = json.loads(msg_str)
return cls(**msg_dict)


from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder=custom_decoder)
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder=custom_encoder)
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

This code defines a custom encoder and decoder functions for encoding +and decoding messages sent through a Kafka messaging system.

The custom encoder function takes a message represented as a +BaseModel and encodes it as bytes by first converting it to a JSON +string and then obfuscating it using the ROT13 encoding. The obfuscated +message is then converted to bytes using UTF-8 encoding and returned.

The custom decoder function takes in the bytes representing an +obfuscated message, decodes it using UTF-8 encoding, then decodes the +ROT13 obfuscation, and finally loads it as a dictionary using the json +module. This dictionary is then converted to a BaseModel instance +using the cls parameter.

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_11_Consumes_Basics/index.html b/docs/0.7.1/guides/Guide_11_Consumes_Basics/index.html new file mode 100644 index 0000000..0b99d82 --- /dev/null +++ b/docs/0.7.1/guides/Guide_11_Consumes_Basics/index.html @@ -0,0 +1,88 @@ + + + + + +@consumes basics | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

@consumes basics

You can use @consumes decorator to consume messages from Kafka topics.

In this guide we will create a simple FastKafka app that will consume +HelloWorld messages from hello_world topic.

Import FastKafka

To use the @consumes decorator, first we need to import the base +FastKafka app to create our application.

from fastkafka import FastKafka

In this demo we will log the messages to the output so that we can +inspect and verify that our app is consuming properly. For that we need +to import the logger.

from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

Define the structure of the messages

Next, you need to define the structure of the messages you want to +consume from the topic using pydantic. For +the guide we’ll stick to something basic, but you are free to define any +complex message structure you wish in your project, just make sure it +can be JSON encoded.

Let’s import BaseModel and Field from pydantic and create a simple +HelloWorld class containing one string parameter msg

from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

Create a base FastKafka app

Now we will create and define a base FastKafka app, replace the +<url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values of your +Kafka bootstrap server



kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

Create a consumer function and decorate it with @consumes

Let’s create a consumer function that will consume HelloWorld messages +from hello_world topic and log them.

@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

The function decorated with the @consumes decorator will be called +when a message is produced to Kafka.

The message will then be injected into the typed msg argument of the +function and its type will be used to parse the message.

In this example case, when the message is sent into a hello_world +topic, it will be parsed into a HelloWorld class and on_hello_world +function will be called with the parsed class as msg argument value.

Final app

Your app code should look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)


kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)
@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Run the app

Now we can run the app. Copy the code above in consumer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app

After running the command, you should see this output in your terminal:

[513863]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[513863]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})
[513863]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[513863]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 513863...
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[513863]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._server: terminate_asyncio_process(): Process 513863 terminated.

Send the message to kafka topic

Lets send a HelloWorld message to the hello_world topic and check if +our consumer kafka application has logged the received message. In your +terminal, run:

echo {\"msg\": \"Hello world\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>
print(consumer_task.value[1].decode("UTF-8"))

You should see the “Got msg: msg='Hello world'" being logged by your +consumer.

Choosing a topic

You probably noticed that you didn’t define which topic you are +receiving the message from, this is because the @consumes decorator +determines the topic by default from your function name. The decorator +will take your function name and strip the default “on_" prefix from it +and use the rest as the topic name. In this example case, the topic is +hello_world.

You can choose your custom prefix by defining the prefix parameter in +consumes decorator, like this:

Also, you can define the topic name completely by defining the topic +in parameter in consumes decorator, like this:

Message data

The message received from kafka is translated from binary JSON +representation int the class defined by typing of msg parameter in the +function decorated by the @consumes decorator.

In this example case, the message will be parsed into a HelloWorld +class.

Message metadata

If you need any of Kafka message metadata such as timestamp, partition +or headers you can access the metadata by adding a EventMetadata typed +argument to your consumes function and the metadata from the incoming +message will be automatically injected when calling the consumes +function.

Let’s demonstrate that.

Create a consumer function with metadata

The only difference from the original basic consume function is that we +are now passing the meta: EventMetadata argument to the function. The +@consumes decorator will register that and, when a message is +consumed, it will also pass the metadata to your function. Now you can +use the metadata in your consume function. Lets log it to see what it +contains.

First, we need to import the EventMetadata

Now we can add the meta argument to our consuming function.

Your final app should look like this:

Now lets run the app and send a message to the broker to see the logged +message metadata.

You should see a similar log as the one below and the metadata being +logged in your app.

As you can see in the log, from the metadata you now have the +information about the partition, offset, timestamp, key and headers. +🎉

Dealing with high latency consuming functions

If your functions have high latency due to, for example, lengthy +database calls you will notice a big decrease in performance. This is +due to the issue of how the consumes decorator executes your consume +functions when consumeing events. By default, the consume function will +run the consuming funtions for one topic sequentially, this is the most +straightforward approach and results with the least amount of overhead.

But, to handle those high latency tasks and run them in parallel, +FastKafka has a +DynamicTaskExecutor +prepared for your consumers. This executor comes with additional +overhead, so use it only when you need to handle high latency functions.

Lets demonstrate how to use it.

decorate_consumes_executor = """@app.consumes(executor="DynamicTaskExecutor")
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")
"""
md(f"```python\n{decorate_consumes}\n```")
@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Lets send a HelloWorld message to the hello_world topic and check if +our consumer kafka application has logged the received message. In your +terminal, run:

echo {\"msg\": \"Hello world\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>

You should see the “Got msg: msg='Hello world'" being logged by your +consumer.

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_12_Batch_Consuming/index.html b/docs/0.7.1/guides/Guide_12_Batch_Consuming/index.html new file mode 100644 index 0000000..c9700e6 --- /dev/null +++ b/docs/0.7.1/guides/Guide_12_Batch_Consuming/index.html @@ -0,0 +1,47 @@ + + + + + +Batch consuming | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Batch consuming

If you want to consume data in batches @consumes decorator makes that +possible for you. By typing a consumed msg object as a list of +messages the consumer will call your consuming function with a batch of +messages consumed from a single partition. Let’s demonstrate that now.

Consume function with batching

To consume messages in batches, you need to wrap you message type into a +list and the @consumes decorator will take care of the rest for you. +Your consumes function will be called with batches grouped by partition +now.

@app.consumes(auto_offset_reset="earliest")
async def on_hello_world(msg: List[HelloWorld]):
logger.info(f"Got msg batch: {msg}")

App example

We will modify the app example from @consumes +basics guide to consume +HelloWorld messages batch. The final app will look like this (make +sure you replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


import asyncio
from typing import List
from pydantic import BaseModel, Field

from fastkafka import FastKafka
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.consumes(auto_offset_reset="earliest")
async def on_hello_world(msg: List[HelloWorld]):
logger.info(f"Got msg batch: {msg}")

Send the messages to kafka topic

Lets send a couple of HelloWorld messages to the hello_world topic +and check if our consumer kafka application has logged the received +messages batch. In your terminal, run the following command at least two +times to create multiple messages in your kafka queue:

echo {\"msg\": \"Hello world\"} | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>

Now we can run the app. Copy the code of the example app in +consumer_example.py and run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app

You should see the your Kafka messages being logged in batches by your +consumer.

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_21_Produces_Basics/index.html b/docs/0.7.1/guides/Guide_21_Produces_Basics/index.html new file mode 100644 index 0000000..394c3fc --- /dev/null +++ b/docs/0.7.1/guides/Guide_21_Produces_Basics/index.html @@ -0,0 +1,62 @@ + + + + + +@produces basics | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

@produces basics

You can use @produces decorator to produce messages to Kafka topics.

In this guide we will create a simple FastKafka app that will produce +hello world messages to hello_world topic.

Import FastKafka

To use the @produces decorator, frist we need to import the base +FastKafka app to create our application.

from fastkafka import FastKafka

Define the structure of the messages

Next, you need to define the structure of the messages you want to send +to the topic using pydantic. For the guide +we’ll stick to something basic, but you are free to define any complex +message structure you wish in your project, just make sure it can be +JSON encoded.

Let’s import BaseModel and Field from pydantic and create a simple +HelloWorld class containing one string parameter msg

from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

Create a base FastKafka app

Now we will create and define a base FastKafka app, replace the +<url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values of your +Kafka bootstrap server



kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

Create a producer function and decorate it with @produces

Let’s create a producer function that will produce HelloWorld messages +to hello_world topic:


@app.produces()
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Now you can call your defined function as any normal python function in +your code. The side effect of calling the function will be that the +value you are returning will also be sent to a kafka topic.

By default, the topic is determined from your function name, the “to_" +prefix is stripped and what is left over is used as a topic name. I this +case, that is hello_world.

Instruct the app to start sending HelloWorld messages

Let’s use @run_in_background decorator to instruct our app to send +HelloWorld messages to hello_world topic every second.


import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Final app

Your app code should look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)


kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.produces()
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Run the app

script_file = "producer_example.py"
cmd = "fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app"
md(
f"Now we can run the app. Copy the code above in producer_example.py and run it by running\n```shell\n{cmd}\n```"
)

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app

After running the command, you should see this output in your terminal:

[84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization
[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.

Check if the message was sent to the Kafka topic

Lets check the topic and see if there is a “Hello world!" message in the +hello_world topic. In your terminal run:

kafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>

You should see the {“msg": “Hello world!"} messages in your topic.

Choosing a topic

You probably noticed that you didn’t define which topic you are sending +the message to, this is because the @produces decorator determines the +topic by default from your function name. The decorator will take your +function name and strip the default “to_" prefix from it and use the +rest as the topic name. In this example case, the topic is +hello_world.

!!! warn "New topics"

Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.

You can choose your custom prefix by defining the prefix parameter in +produces decorator, like this:


@app.produces(prefix="send_to_")
async def send_to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Also, you can define the topic name completely by defining the topic +in parameter in produces decorator, like this:


@app.produces(topic="my_special_topic")
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Message data

The return value from your function will be translated JSON string and +then to bytes and sent to defined Kafka topic. The typing of the return +value is used for generating the documentation for your Kafka app.

In this example case, the return value is HelloWorld class which will be +translated into JSON formatted string and then to bytes. The translated +data will then be sent to Kafka. In the from of: +b'{"msg": "Hello world!"}'

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_22_Partition_Keys/index.html b/docs/0.7.1/guides/Guide_22_Partition_Keys/index.html new file mode 100644 index 0000000..302bd25 --- /dev/null +++ b/docs/0.7.1/guides/Guide_22_Partition_Keys/index.html @@ -0,0 +1,55 @@ + + + + + +Defining a partition key | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Defining a partition key

Partition keys are used in Apache Kafka to determine which partition a +message should be written to. This ensures that related messages are +kept together in the same partition, which can be useful for ensuring +order or for grouping related messages together for efficient +processing. Additionally, partitioning data across multiple partitions +allows Kafka to distribute load across multiple brokers and scale +horizontally, while replicating data across multiple brokers provides +fault tolerance.

You can define your partition keys when using the @produces decorator, +this guide will demonstrate to you this feature.

Return a key from the producing function

To define a key for the message that you want to produce to Kafka topic, +you need to wrap the response into +KafkaEvent +class and set the key value. Check the example below:


from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:
return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")

In the example, we want to return the HelloWorld message class with +the key defined as my_key. So, we wrap the message and key into a +KafkaEvent class and return it as such.

While generating the documentation, the +KafkaEvent +class will be unwrapped and the HelloWorld class will be documented in +the definition of message type, same way if you didn’t use the key.

!!! info "Which key to choose?"

Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.

App example

We will modify the app example from @producer basics guide to return +the HelloWorld with our key. The final app will look like this (make +sure you replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:
return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")

import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Run the app

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app

After running the command, you should see this output in your terminal:

[347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.

Check if the message was sent to the Kafka topic with the desired key

Lets check the topic and see if there is a “Hello world!" message in the +hello_world topic with the defined key. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the my_key {“msg": “Hello world!"} messages in your +topic appearing, the my_key part of the message is the key that we +defined in our producing function.

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_23_Batch_Producing/index.html b/docs/0.7.1/guides/Guide_23_Batch_Producing/index.html new file mode 100644 index 0000000..4a81305 --- /dev/null +++ b/docs/0.7.1/guides/Guide_23_Batch_Producing/index.html @@ -0,0 +1,55 @@ + + + + + +Batch producing | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Batch producing

If you want to send your data in batches @produces decorator makes +that possible for you. By returning a list of messages you want to +send in a batch the producer will collect the messages and send them in +a batch to a Kafka broker.

This guide will demonstrate how to use this feature.

Return a batch from the producing function

To define a batch that you want to produce to Kafka topic, you need to +return the List of the messages that you want to be batched from your +producing function.


from typing import List

@app.produces()
async def to_hello_world(msgs: List[str]) -> List[HelloWorld]:
return [HelloWorld(msg=msg) for msg in msgs]

In the example, we want to return the HelloWorld message class batch +that is created from a list of msgs we passed into our producing +function.

Lets also prepare a backgound task that will send a batch of “hello +world" messages when the app starts.


@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

App example

We will modify the app example from @producer +basics guide to return the +HelloWorld batch. The final app will look like this (make sure you +replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


import asyncio
from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

from typing import List

@app.produces()
async def to_hello_world(msgs: List[str]) -> List[HelloWorld]:
return [HelloWorld(msg=msg) for msg in msgs]

Run the app

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app

After running the command, you should see this output in your terminal:

[46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task
[46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.

Check if the batch was sent to the Kafka topic with the defined key

Lets check the topic and see if there are “Hello world" messages in the +hello_world topic. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the batch of messages in your topic.

Batch key

To define a key for your batch like in Defining a partition +key guide you can wrap the +returning value in a +KafkaEvent +class. To learn more about defining a partition ke and +KafkaEvent +class, please, have a look at Defining a partition +key guide.

Let’s demonstrate that.

To define a key, we just need to modify our producing function, like +this:


from typing import List
from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:
return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")

Now our app looks like this:


import asyncio
from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

from typing import List
from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:
return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")

Check if the batch was sent to the Kafka topic

Lets check the topic and see if there are “Hello world" messages in the +hello_world topic, containing a defined key. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the batch of messages with the defined key in your topic.

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html b/docs/0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html new file mode 100644 index 0000000..5876279 --- /dev/null +++ b/docs/0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html @@ -0,0 +1,155 @@ + + + + + +Using multiple Kafka clusters | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Using multiple Kafka clusters

Ready to take your FastKafka app to the next level? This guide shows you +how to connect to multiple Kafka clusters effortlessly. Consolidate +topics and produce messages across clusters like a pro. Unleash the full +potential of your Kafka-powered app with FastKafka. Let’s dive in and +elevate your application’s capabilities!

Test message

To showcase the functionalities of FastKafka and illustrate the concepts +discussed, we can use a simple test message called TestMsg. Here’s the +definition of the TestMsg class:

class TestMsg(BaseModel):
msg: str = Field(...)

Defining multiple broker configurations

When building a FastKafka application, you may need to consume messages +from multiple Kafka clusters, each with its own set of broker +configurations. FastKafka provides the flexibility to define different +broker clusters using the brokers argument in the consumes decorator. +Let’s explore an example code snippet

from pydantic import BaseModel, Field

from fastkafka import FastKafka

class TestMsg(BaseModel):
msg: str = Field(...)

kafka_brokers_1 = dict(
development=dict(url="dev.server_1", port=9092),
production=dict(url="prod.server_1", port=9092),
)
kafka_brokers_2 = dict(
development=dict(url="dev.server_2", port=9092),
production=dict(url="prod.server_1", port=9092),
)

app = FastKafka(kafka_brokers=kafka_brokers_1)


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals_1(msg: TestMsg):
print(f"Received on s1: {msg=}")
await to_predictions_1(msg)


@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)
async def on_preprocessed_signals_2(msg: TestMsg):
print(f"Received on s2: {msg=}")
await to_predictions_2(msg)

@app.produces(topic="predictions")
async def to_predictions_1(msg: TestMsg) -> TestMsg:
return msg

@app.produces(topic="predictions", brokers=kafka_brokers_2)
async def to_predictions_2(msg: TestMsg) -> TestMsg:
return msg

In this example, the application has two consumes endpoints, both of +which will consume events from preprocessed_signals topic. +on_preprocessed_signals_1 will consume events from kafka_brokers_1 +configuration and on_preprocessed_signals_2 will consume events from +kafka_brokers_2 configuration. When producing, to_predictions_1 will +produce to predictions topic on kafka_brokers_1 cluster and +to_predictions_2 will produce to predictions topic on +kafka_brokers_2 cluster.

How it works

The kafka_brokers_1 configuration represents the primary cluster, +while kafka_brokers_2 serves as an alternative cluster specified in +the decorator.

Using the FastKafka class, the app object is initialized with the +primary broker configuration (kafka_brokers_1). By default, the +@app.consumes decorator without the brokers argument consumes messages +from the preprocessed_signals topic on kafka_brokers_1.

To consume messages from a different cluster, the @app.consumes +decorator includes the brokers argument. This allows explicit +specification of the broker cluster in the on_preprocessed_signals_2 +function, enabling consumption from the same topic but using the +kafka_brokers_2 configuration.

The brokers argument can also be used in the @app.produces decorator to +define multiple broker clusters for message production.

It’s important to ensure that all broker configurations have the same +required settings as the primary cluster to ensure consistent behavior.

Testing the application

To test our FastKafka ‘mirroring’ application, we can use our testing +framework. Lets take a look how it’s done:

from fastkafka.testing import Tester

async with Tester(app) as tester:
# Send TestMsg to topic/broker pair on_preprocessed_signals_1 is consuming from
await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal_s1"))
# Assert on_preprocessed_signals_1 consumed sent message
await app.awaited_mocks.on_preprocessed_signals_1.assert_called_with(
TestMsg(msg="signal_s1"), timeout=5
)
# Assert app has produced a prediction
await tester.mirrors[app.to_predictions_1].assert_called_with(
TestMsg(msg="signal_s1"), timeout=5
)

# Send TestMsg to topic/broker pair on_preprocessed_signals_2 is consuming from
await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal_s2"))
# Assert on_preprocessed_signals_2 consumed sent message
await app.awaited_mocks.on_preprocessed_signals_2.assert_called_with(
TestMsg(msg="signal_s2"), timeout=5
)
# Assert app has produced a prediction
await tester.mirrors[app.to_predictions_2].assert_called_with(
TestMsg(msg="signal_s2"), timeout=5
)
23-05-30 10:33:08.720 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
23-05-30 10:33:08.720 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-05-30 10:33:08.721 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-05-30 10:33:08.721 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'
23-05-30 10:33:08.722 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:08.722 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'
23-05-30 10:33:08.723 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:08.741 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'
23-05-30 10:33:08.741 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:08.742 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'
23-05-30 10:33:08.743 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:08.744 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:08.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}
23-05-30 10:33:08.746 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:08.747 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:08.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:08.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-05-30 10:33:08.749 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:33:08.754 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:08.754 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}
23-05-30 10:33:08.755 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:08.755 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:08.755 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:08.756 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-05-30 10:33:08.756 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:33:08.757 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:08.757 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}
23-05-30 10:33:08.758 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:08.758 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:08.759 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:08.759 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-05-30 10:33:08.759 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:33:08.760 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:08.761 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}
23-05-30 10:33:08.762 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:08.762 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:08.762 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:08.763 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-05-30 10:33:08.763 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
Received on s1: msg=TestMsg(msg='signal_s1')
Received on s2: msg=TestMsg(msg='signal_s2')
23-05-30 10:33:13.745 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:13.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:13.746 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:13.747 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:13.747 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:13.748 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:13.748 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:13.749 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:13.749 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:13.750 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:13.750 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:13.751 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:13.751 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:13.751 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:13.753 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
23-05-30 10:33:13.754 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

The usage of the tester.mirrors dictionary allows specifying the +desired topic/broker combination for sending the test messages, +especially when working with multiple Kafka clusters. This ensures that +the data is sent to the appropriate topic/broker based on the consuming +function, and consumed from appropriate topic/broker based on the +producing function.

Running the application

You can run your application using fastkafka run CLI command in the +same way that you would run a single cluster app.

To start your app, copy the code above in multi_cluster_example.py and +run it by running:

Now we can run the app. Copy the code above in multi_cluster_example.py, +adjust your server configurations, and run it by running

fastkafka run --num-workers=1 --kafka-broker=development multi_cluster_example:app

In your app logs, you should see your app starting up and your two +consumer functions connecting to different kafka clusters.

[90735]: 23-05-30 10:33:29.699 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}
[90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[90735]: 23-05-30 10:33:29.700 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:57647'}
[90735]: 23-05-30 10:33:29.714 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[90735]: 23-05-30 10:33:29.714 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})
[90735]: 23-05-30 10:33:29.714 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}
[90735]: 23-05-30 10:33:29.714 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[90735]: 23-05-30 10:33:29.718 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[90735]: 23-05-30 10:33:29.718 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})
[90735]: 23-05-30 10:33:29.718 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}
[90735]: 23-05-30 10:33:29.718 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[90735]: 23-05-30 10:33:29.722 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}.
[90735]: 23-05-30 10:33:29.723 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}.
Starting process cleanup, this may take a few seconds...
23-05-30 10:33:33.548 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 90735...
[90735]: 23-05-30 10:33:34.666 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[90735]: 23-05-30 10:33:34.667 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:34.777 [INFO] fastkafka._server: terminate_asyncio_process(): Process 90735 terminated.

Application documentation

At the moment the documentation for multicluster app is not yet +implemented, but it is under development and you can expecti it soon!

Examples on how to use multiple broker configurations

Example #1

In this section, we’ll explore how you can effectively forward topics +between different Kafka clusters, enabling seamless data synchronization +for your applications.

Imagine having two Kafka clusters, namely kafka_brokers_1 and +kafka_brokers_2, each hosting its own set of topics and messages. Now, +if you want to forward a specific topic (in this case: +preprocessed_signals) from kafka_brokers_1 to kafka_brokers_2, +FastKafka provides an elegant solution.

Let’s examine the code snippet that configures our application for topic +forwarding:

from pydantic import BaseModel, Field

from fastkafka import FastKafka

class TestMsg(BaseModel):
msg: str = Field(...)

kafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))
kafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))

app = FastKafka(kafka_brokers=kafka_brokers_1)


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals_original(msg: TestMsg):
await to_preprocessed_signals_forward(msg)


@app.produces(topic="preprocessed_signals", brokers=kafka_brokers_2)
async def to_preprocessed_signals_forward(data: TestMsg) -> TestMsg:
return data

Here’s how it works: our FastKafka application is configured to consume +messages from kafka_brokers_1 and process them in the +on_preprocessed_signals_original function. We want to forward these +messages to kafka_brokers_2. To achieve this, we define the +to_preprocessed_signals_forward function as a producer, seamlessly +producing the processed messages to the preprocessed_signals topic +within the kafka_brokers_2 cluster.

Testing

To test our FastKafka forwarding application, we can use our testing +framework. Let’s take a look at the testing code snippet:

from fastkafka.testing import Tester

async with Tester(app) as tester:
await tester.mirrors[app.on_preprocessed_signals_original](TestMsg(msg="signal"))
await tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)
23-05-30 10:33:40.969 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
23-05-30 10:33:40.970 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-05-30 10:33:40.971 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-05-30 10:33:40.972 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'
23-05-30 10:33:40.972 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:40.982 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-05-30 10:33:40.982 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:40.983 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:40.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-05-30 10:33:40.984 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:40.985 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:40.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:40.986 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-05-30 10:33:40.987 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:33:40.988 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:40.988 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}
23-05-30 10:33:40.989 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:40.989 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:40.990 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:40.990 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-05-30 10:33:40.991 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:33:44.983 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:44.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:44.984 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:44.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:44.985 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:44.986 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:44.987 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:44.987 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:44.987 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
23-05-30 10:33:44.988 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

With the help of the Tester object, we can simulate and verify the +behavior of our FastKafka application. Here’s how it works:

  1. We create an instance of the Tester by passing in our app +object, which represents our FastKafka application.

  2. Using the tester.mirrors dictionary, we can send a message to a +specific Kafka broker and topic combination. In this case, we use +tester.mirrors[app.on_preprocessed_signals_original] to send a +TestMsg message with the content “signal" to the appropriate Kafka +broker and topic.

  3. After sending the message, we can perform assertions on the mirrored +function using +tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5). +This assertion ensures that the mirrored function has been called +within a specified timeout period (in this case, 5 seconds).

Example #2

In this section, we’ll explore how you can effortlessly consume data +from multiple sources, process it, and aggregate the results into a +single topic on a specific cluster.

Imagine you have two Kafka clusters: kafka_brokers_1 and +kafka_brokers_2, each hosting its own set of topics and messages. +Now, what if you want to consume data from both clusters, perform some +processing, and produce the results to a single topic on +kafka_brokers_1? FastKafka has got you covered!

Let’s take a look at the code snippet that configures our application +for aggregating multiple clusters:

from pydantic import BaseModel, Field

from fastkafka import FastKafka

class TestMsg(BaseModel):
msg: str = Field(...)

kafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))
kafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))

app = FastKafka(kafka_brokers=kafka_brokers_1)


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals_1(msg: TestMsg):
print(f"Default: {msg=}")
await to_predictions(msg)


@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)
async def on_preprocessed_signals_2(msg: TestMsg):
print(f"Specified: {msg=}")
await to_predictions(msg)


@app.produces(topic="predictions")
async def to_predictions(prediction: TestMsg) -> TestMsg:
print(f"Sending prediction: {prediction}")
return [prediction]

Here’s the idea: our FastKafka application is set to consume messages +from the topic “preprocessed_signals" on kafka_brokers_1 cluster, as +well as from the same topic on kafka_brokers_2 cluster. We have two +consuming functions, on_preprocessed_signals_1 and +on_preprocessed_signals_2, that handle the messages from their +respective clusters. These functions perform any required processing, in +this case, just calling the to_predictions function.

The exciting part is that the to_predictions function acts as a +producer, sending the processed results to the “predictions" topic on +kafka_brokers_1 cluster. By doing so, we effectively aggregate the +data from multiple sources into a single topic on a specific cluster.

This approach enables you to consume data from multiple Kafka clusters, +process it, and produce the aggregated results to a designated topic. +Whether you’re generating predictions, performing aggregations, or any +other form of data processing, FastKafka empowers you to harness the +full potential of multiple clusters.

Testing

Let’s take a look at the testing code snippet:

from fastkafka.testing import Tester

async with Tester(app) as tester:
await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal"))
await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal"))
await tester.on_predictions.assert_called(timeout=5)
23-05-30 10:33:50.827 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
23-05-30 10:33:50.827 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-05-30 10:33:50.828 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-05-30 10:33:50.829 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-05-30 10:33:50.829 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:50.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-05-30 10:33:50.875 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:50.876 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'
23-05-30 10:33:50.876 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:33:50.877 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:50.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-05-30 10:33:50.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:50.879 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:50.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:50.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-05-30 10:33:50.880 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:33:50.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:50.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}
23-05-30 10:33:50.881 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:50.882 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:50.882 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:50.883 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-05-30 10:33:50.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:33:50.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:33:50.884 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-05-30 10:33:50.885 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:33:50.885 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:33:50.886 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:33:50.886 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-05-30 10:33:50.886 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
Default: msg=TestMsg(msg='signal')
Sending prediction: msg='signal'
Specified: msg=TestMsg(msg='signal')
Sending prediction: msg='signal'
23-05-30 10:33:54.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:54.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:54.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:54.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:54.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:54.881 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:54.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:54.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:54.882 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:33:54.882 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:33:54.883 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
23-05-30 10:33:54.884 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Here’s how the code above works:

  1. Within an async with block, create an instance of the Tester by +passing in your app object, representing your FastKafka application.

  2. Using the tester.mirrors dictionary, you can send messages to +specific Kafka broker and topic combinations. In this case, we use +tester.mirrors[app.on_preprocessed_signals_1] and +tester.mirrors[app.on_preprocessed_signals_2] to send TestMsg +messages with the content “signal" to the corresponding Kafka broker +and topic combinations.

  3. After sending the messages, you can perform assertions on the +on_predictions function using +tester.on_predictions.assert_called(timeout=5). This assertion +ensures that the on_predictions function has been called within a +specified timeout period (in this case, 5 seconds).

Example #3

In some scenarios, you may need to produce messages to multiple Kafka +clusters simultaneously. FastKafka simplifies this process by allowing +you to configure your application to produce messages to multiple +clusters effortlessly. Let’s explore how you can achieve this:

Consider the following code snippet that demonstrates producing messages +to multiple clusters:

from pydantic import BaseModel, Field

from fastkafka import FastKafka

class TestMsg(BaseModel):
msg: str = Field(...)

kafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))
kafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))

app = FastKafka(kafka_brokers=kafka_brokers_1)


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals(msg: TestMsg):
print(f"{msg=}")
await to_predictions_1(TestMsg(msg="prediction"))
await to_predictions_2(TestMsg(msg="prediction"))


@app.produces(topic="predictions")
async def to_predictions_1(prediction: TestMsg) -> TestMsg:
print(f"Sending prediction to s1: {prediction}")
return [prediction]


@app.produces(topic="predictions", brokers=kafka_brokers_2)
async def to_predictions_2(prediction: TestMsg) -> TestMsg:
print(f"Sending prediction to s2: {prediction}")
return [prediction]

Here’s what you need to know about producing to multiple clusters:

  1. We define two Kafka broker configurations: kafka_brokers_1 and +kafka_brokers_2, representing different clusters with their +respective connection details.

  2. We create an instance of the FastKafka application, specifying +kafka_brokers_1 as the primary cluster for producing messages.

  3. The on_preprocessed_signals function serves as a consumer, +handling incoming messages from the “preprocessed_signals" topic. +Within this function, we invoke two producer functions: +to_predictions_1 and to_predictions_2.

  4. The to_predictions_1 function sends predictions to the +“predictions" topic on kafka_brokers_1 cluster.

  5. Additionally, the to_predictions_2 function sends the same +predictions to the “predictions" topic on kafka_brokers_2 cluster. +This allows for producing the same data to multiple clusters +simultaneously.

By utilizing this approach, you can seamlessly produce messages to +multiple Kafka clusters, enabling you to distribute data across +different environments or leverage the strengths of various clusters.

Feel free to customize the producer functions as per your requirements, +performing any necessary data transformations or enrichment before +sending the predictions.

With FastKafka, producing to multiple clusters becomes a breeze, +empowering you to harness the capabilities of multiple environments +effortlessly.

Testing

Let’s take a look at the testing code snippet:

from fastkafka.testing import Tester

async with Tester(app) as tester:
await tester.to_preprocessed_signals(TestMsg(msg="signal"))
await tester.mirrors[to_predictions_1].assert_called(timeout=5)
await tester.mirrors[to_predictions_2].assert_called(timeout=5)
23-05-30 10:34:00.033 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
23-05-30 10:34:00.034 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-05-30 10:34:00.035 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-05-30 10:34:00.036 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-05-30 10:34:00.037 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:34:00.038 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'
23-05-30 10:34:00.038 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:34:00.052 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-05-30 10:34:00.053 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-05-30 10:34:00.054 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:34:00.055 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-05-30 10:34:00.055 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:34:00.056 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:34:00.056 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:34:00.057 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-05-30 10:34:00.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:34:00.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:34:00.058 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-05-30 10:34:00.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:34:00.059 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:34:00.059 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:34:00.062 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-05-30 10:34:00.062 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-05-30 10:34:00.063 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-05-30 10:34:00.063 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}
23-05-30 10:34:00.064 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-05-30 10:34:00.064 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-05-30 10:34:00.065 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-05-30 10:34:00.065 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-05-30 10:34:00.065 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
msg=TestMsg(msg='signal')
Sending prediction to s1: msg='prediction'
Sending prediction to s2: msg='prediction'
23-05-30 10:34:04.055 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:34:04.055 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:34:04.056 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:34:04.056 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:34:04.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:34:04.057 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:34:04.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:34:04.058 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-05-30 10:34:04.058 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-05-30 10:34:04.059 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-30 10:34:04.059 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
23-05-30 10:34:04.060 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Here’s how you can perform the necessary tests:

  1. Within an async with block, create an instance of the Tester by +passing in your app object, representing your FastKafka application.

  2. Using the tester.to_preprocessed_signals method, you can send a +TestMsg message with the content “signal".

  3. After sending the message, you can perform assertions on the +to_predictions_1 and to_predictions_2 functions using +tester.mirrors[to_predictions_1].assert_called(timeout=5) and +tester.mirrors[to_predictions_2].assert_called(timeout=5). These +assertions ensure that the respective producer functions have +produced data to their respective topic/broker combinations.

By employing this testing approach, you can verify that the producing +functions correctly send messages to their respective clusters. The +testing framework provided by FastKafka enables you to ensure the +accuracy and reliability of your application’s producing logic.

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html b/docs/0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html new file mode 100644 index 0000000..0999a10 --- /dev/null +++ b/docs/0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html @@ -0,0 +1,73 @@ + + + + + +Deploying FastKafka using Docker | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Deploying FastKafka using Docker

Building a Docker Image

To build a Docker image for a FastKafka project, we need the following +items:

  1. A library that is built using FastKafka.
  2. A file in which the requirements are specified. This could be a +requirements.txt file, a setup.py file, or even a wheel file.
  3. A Dockerfile to build an image that will include the two files +mentioned above.

Creating FastKafka Code

Let’s create a +FastKafka-based +application and write it to the application.py file based on the +tutorial.

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Creating requirements.txt file

The above code only requires fastkafka. So, we will add only +fastkafka to the requirements.txt file, but you can add additional +requirements to it as well.

fastkafka>=0.3.0

Here we are using requirements.txt to store the project’s +dependencies. However, other methods like setup.py, pipenv, and +wheel files can also be used. setup.py is commonly used for +packaging and distributing Python modules, while pipenv is a tool used +for managing virtual environments and package dependencies. wheel +files are built distributions of Python packages that can be installed +with pip.

Creating Dockerfile

# (1)
FROM python:3.9-slim-bullseye
# (2)
WORKDIR /project
# (3)
COPY application.py requirements.txt /project/
# (4)
RUN pip install --no-cache-dir --upgrade -r /project/requirements.txt
# (5)
CMD ["fastkafka", "run", "--num-workers", "2", "--kafka-broker", "production", "application:kafka_app"]
  1. Start from the official Python base image.

  2. Set the current working directory to /project.

    This is where we’ll put the requirements.txt file and the +application.py file.

  3. Copy the application.py file and requirements.txt file inside +the /project directory.

  4. Install the package dependencies in the requirements file.

    The --no-cache-dir option tells pip to not save the downloaded +packages locally, as that is only if pip was going to be run again +to install the same packages, but that’s not the case when working +with containers.

    The --upgrade option tells pip to upgrade the packages if they +are already installed.

  5. Set the command to run the fastkafka run command.

    CMD takes a list of strings, each of these strings is what you +would type in the command line separated by spaces.

    This command will be run from the current working directory, the +same /project directory you set above with WORKDIR /project.

    We supply additional parameters --num-workers and --kafka-broker +for the run command. Finally, we specify the location of our +fastkafka application location as a command argument.

    To learn more about fastkafka run command please check the CLI +docs.

Build the Docker Image

Now that all the files are in place, let’s build the container image.

  1. Go to the project directory (where your Dockerfile is, containing +your application.py file).

  2. Run the following command to build the image:

    docker build -t fastkafka_project_image .

    This command will create a docker image with the name +fastkafka_project_image and the latest tag.

That’s it! You have now built a docker image for your FastKafka project.

Start the Docker Container

Run a container based on the built image:

docker run -d --name fastkafka_project_container fastkafka_project_image

Additional Security

Trivy is an open-source tool that scans Docker images for +vulnerabilities. It can be integrated into your CI/CD pipeline to ensure +that your images are secure and free from known vulnerabilities. Here’s +how you can use trivy to scan your fastkafka_project_image:

  1. Install trivy on your local machine by following the instructions +provided in the official trivy +documentation.

  2. Run the following command to scan your fastkafka_project_image:

    trivy image fastkafka_project_image

    This command will scan your fastkafka_project_image for any +vulnerabilities and provide you with a report of its findings.

  3. Fix any vulnerabilities identified by trivy. You can do this by +updating the vulnerable package to a more secure version or by using +a different package altogether.

  4. Rebuild your fastkafka_project_image and repeat steps 2 and 3 +until trivy reports no vulnerabilities.

By using trivy to scan your Docker images, you can ensure that your +containers are secure and free from known vulnerabilities.

Example repo

A +FastKafka +based library which uses above mentioned Dockerfile to build a docker +image can be found +here

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html b/docs/0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html new file mode 100644 index 0000000..ec024cb --- /dev/null +++ b/docs/0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html @@ -0,0 +1,143 @@ + + + + + +Using Redpanda to test FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Using Redpanda to test FastKafka

What is FastKafka?

FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.

What is Redpanda?

Redpanda is a drop-in replacement for Kafka. Most of the Kafka tools +work out of the box with Redpanda.

From redpanda.com:

Redpanda is a Kafka®-compatible streaming data platform that is proven +to be 10x faster and 6x lower in total costs. It is also JVM-free, +ZooKeeper®-free, Jepsen-tested and source available.

Some of the advantages of Redpanda over Kafka are

  1. A single binary with built-in everything, no ZooKeeper® or JVM +needed.
  2. Costs upto 6X less than Kafka.
  3. Up to 10x lower average latencies and up to 6x faster Kafka +transactions without compromising correctness.

To learn more about Redpanda, please visit their +website or checkout this blog +post +comparing Redpanda and Kafka’s performance benchmarks.

Example repo

A sample fastkafka-based library that uses Redpanda for testing, based +on this guide, can be found +here.

The process

Here are the steps we’ll be walking through to build our example:

  1. Set up the prerequisites.
  2. Clone the example repo.
  3. Explain how to write an application using FastKafka.
  4. Explain how to write a test case to test FastKafka with Redpanda.
  5. Run the test case and produce/consume messages.

1. Prerequisites

Before starting, make sure you have the following prerequisites set up:

  1. Python 3.x: A Python 3.x installation is required to run +FastKafka. You can download the latest version of Python from the +official website. You’ll also +need to have pip installed and updated, which is Python’s package +installer.
  2. Docker Desktop: Docker is used to run Redpanda, which is +required for testing FastKafka. You can download and install Docker +Desktop from the official +website.
  3. Git: You’ll need to have Git installed to clone the example +repo. You can download Git from the official +website.

2. Cloning and setting up the example repo

To get started with the example code, clone the GitHub +repository by +running the following command in your terminal:

git clone https://github.com/airtai/sample_fastkafka_with_redpanda.git
cd sample_fastkafka_with_redpanda

This will create a new directory called sample_fastkafka_with_redpanda +and download all the necessary files.

Create a virtual environment

Before writing any code, let’s create a new virtual +environment +for our project.

A virtual environment is an isolated environment for a Python project, +which allows you to manage project-specific dependencies and avoid +conflicts between different projects.

To create a new virtual environment, run the following commands in your +terminal:

python3 -m venv venv

This will create a new directory called venv in your project +directory, which will contain the virtual environment.

To activate the virtual environment, run the following command:

source venv/bin/activate

This will change your shell’s prompt to indicate that you are now +working inside the virtual environment.

Finally, run the following command to upgrade pip, the Python package +installer:

pip install --upgrade pip

Install Python dependencies

Next, let’s install the required Python dependencies. In this guide, +we’ll be using +FastKafka +to write our application code and pytest and pytest-asyncio to test +it.

You can install the dependencies from the requirements.txt file +provided in the cloned repository by running:

pip install -r requirements.txt

This will install all the required packages and their dependencies.

3. Writing server code

The application.py file in the cloned repository demonstrates how to +use FastKafka to consume messages from a Kafka topic, make predictions +using a predictive model, and publish the predictions to another Kafka +topic. Here is an explanation of the code:

Preparing the demo model

First we will prepare our model using the Iris dataset so that we can +demonstrate the predictions using FastKafka. The following call +downloads the dataset and trains the model.

We will wrap the model creation into a lifespan of our app so that the +model is created just before the app is started.

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines two message classes for use in a FastKafka +application:

  • The IrisInputData class is used to represent input data for a +predictive model. It has four fields of type +NonNegativeFloat, +which is a subclass of float that only allows non-negative floating +point values.

  • The IrisPrediction class is used to represent the output of the +predictive model. It has a single field species of type string +representing the predicted species.

These message classes will be used to parse and validate incoming data +in Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used both +to generate documentation and to later run the server against one of the +given kafka broker.

Next, an instance of the +FastKafka +class is initialized with the minimum required arguments:

  • kafka_brokers: a dictionary used for generating documentation
from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the IrisInputData message class. Specifying the +type of the single argument is instructing the Pydantic to use +IrisInputData.parse_raw() on the consumed message before passing it +to the user defined function on_input_data.

  • The @produces decorator is applied to the to_predictions function, +which specifies that this function should produce a message to the +“predictions" Kafka topic whenever it is called. The to_predictions +function takes a single integer argument species_class representing +one of three possible strign values predicted by the mdoel. It creates +a new IrisPrediction message using this value and then returns it. +The framework will call the IrisPrediction.json().encode("utf-8") +function on the returned value and produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

4. Writing the test code

The service can be tested using the +Tester +instance which can be configured to start a Redpanda +broker for testing +purposes. The test.py file in the cloned repository contains the +following code for testing.

import pytest
from application import IrisInputData, IrisPrediction, kafka_app

from fastkafka.testing import Tester

msg = IrisInputData(
sepal_length=0.1,
sepal_width=0.2,
petal_length=0.3,
petal_width=0.4,
)


@pytest.mark.asyncio
async def test():
# Start Tester app and create local Redpanda broker for testing
async with Tester(kafka_app).using_local_redpanda(
tag="v23.1.2", listener_port=9092
) as tester:
# Send IrisInputData message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with IrisPrediction in predictions topic
await tester.awaited_mocks.on_predictions.assert_awaited_with(
IrisPrediction(species="setosa"), timeout=2
)

The +Tester +module utilizes uses +LocalRedpandaBroker +to start and stop a Redpanda broker for testing purposes using Docker

5. Running the tests

We can run the tests which is in test.py file by executing the +following command:

pytest test.py

This will start a Redpanda broker using Docker and executes tests. The +output of the command is:

(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest
============================== test session starts ===============================
platform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0
rootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py
plugins: asyncio-0.21.0, anyio-3.6.2
asyncio: mode=strict
collected 1 item

test.py . [100%]

=============================== 1 passed in 7.28s ================================
(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$

Running the tests with the Redpanda broker ensures that your code is +working correctly with a real Kafka-like message broker, making your +tests more reliable.

Recap

We have created an Iris classification model and encapulated it into our +FastKafka +application. The app will consume the IrisInputData from the +input_data topic and produce the predictions to predictions topic.

To test the app we have:

  1. Created the app

  2. Started our +Tester +class with Redpanda broker which mirrors the developed app topics +for testing purposes

  3. Sent IrisInputData message to input_data topic

  4. Asserted and checked that the developed iris classification service +has reacted to IrisInputData message

+ + + + \ No newline at end of file diff --git a/docs/0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html b/docs/0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html new file mode 100644 index 0000000..9ef583b --- /dev/null +++ b/docs/0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html @@ -0,0 +1,78 @@ + + + + + +Using FastAPI to Run FastKafka Application | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

Using FastAPI to Run FastKafka Application

When deploying a FastKafka application, the default approach is to +utilize the fastkafka run CLI +command. This command allows you to launch your FastKafka application as +a standalone service. However, if you already have a FastAPI application +in place and wish to run FastKafka application alongside it, you have an +alternative option.

FastKafka provides a method called +FastKafka.fastapi_lifespan +that leverages FastAPI’s +lifespan +feature. This method allows you to run your FastKafka application +together with your existing FastAPI app, seamlessly integrating their +functionalities. By using the +FastKafka.fastapi_lifespan +method, you can start the FastKafka application within the same process +as the FastAPI app.

The +FastKafka.fastapi_lifespan +method ensures that both FastAPI and FastKafka are initialized and start +working simultaneously. This approach enables the execution of +Kafka-related tasks, such as producing and consuming messages, while +also handling HTTP requests through FastAPI’s routes.

By combining FastAPI and FastKafka in this manner, you can build a +comprehensive application that harnesses the power of both frameworks. +Whether you require real-time messaging capabilities or traditional HTTP +endpoints, this approach allows you to leverage the strengths of FastAPI +and FastKafka within a single deployment setup.

Prerequisites

  1. A basic knowledge of +FastKafka +is needed to proceed with this guide. If you are not familiar with +FastKafka, +please go through the tutorial first.
  2. FastKafka +and FastAPI libraries needs to be installed.

This guide will provide a step-by-step explanation, taking you through +each stage individually, before combining all the components in the +final section for a comprehensive understanding of the process.

1. Basic FastKafka app

In this step, we will begin by creating a simple FastKafka application.

from pydantic import BaseModel, Field, NonNegativeFloat
from typing import *

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Greetings",
kafka_brokers=kafka_brokers,
)


class TestMsg(BaseModel):
msg: str = Field(...)


@kafka_app.consumes()
async def on_names(msg: TestMsg):
await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))


@kafka_app.produces()
async def to_greetings(greeting: TestMsg) -> TestMsg:
return greeting

In the above example, we consume messages from a topic called names, +we prepend “Hello" to the message, and send it back to another topic +called greetings.

We now have a simple +FastKafka +app to produce and consume from two topics.

2. Using fastapi_lifespan method

In this step of the guide, we will explore the integration of a +FastKafka application with a FastAPI application using the +FastKafka.fastapi_lifespan +method. The +FastKafka.fastapi_lifespan +method is a feature provided by FastKafka, which allows you to +seamlessly integrate a FastKafka application with a FastAPI application +by leveraging FastAPI’s lifespan feature.

from fastapi import FastAPI

fastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(kafka_broker_name="localhost"))


@fastapi_app.get("/hello")
async def hello():
return {"msg": "hello there"}

In the above example, a new instance of the FastAPI app is created, +and when the app is started using uvicorn, it also runs the +FastKafka +application concurrently.

Putting it all together

Let’s put the above code together and write it in a file called +fast_apps.py.

# content of the "fast_apps.py" file

from pydantic import BaseModel, Field, NonNegativeFloat
from typing import *

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Greetings",
kafka_brokers=kafka_brokers,
)


class TestMsg(BaseModel):
msg: str = Field(...)


@kafka_app.consumes()
async def on_names(msg: TestMsg):
await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))


@kafka_app.produces()
async def to_greetings(greeting: TestMsg) -> TestMsg:
return greeting


from fastapi import FastAPI

fastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan("localhost"))

@fastapi_app.get("/hello")
async def hello():
return {"msg": "hello there"}

Finally, you can run the FastAPI application using a web server of your +choice, such as Uvicorn or Hypercorn by running the below command:

uvicorn fast_apps:fastapi_app --host=0.0.0.0 --port=8080
+ + + + \ No newline at end of file diff --git a/docs/0.7.1/index.html b/docs/0.7.1/index.html new file mode 100644 index 0000000..5d56355 --- /dev/null +++ b/docs/0.7.1/index.html @@ -0,0 +1,121 @@ + + + + + +FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.7.1

FastKafka

Effortless Kafka integration for your web services

PyPI PyPI -
+Downloads PyPI - Python
+Version

GitHub Workflow
+Status +CodeQL +Dependency
+Review

GitHub


FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.


⭐⭐⭐ Stay in touch ⭐⭐⭐

Please show your support and stay in touch by:

Your support helps us to stay in touch with you and encourages us to +continue developing and improving the library. Thank you for your +support!


🐝🐝🐝 We were busy lately 🐝🐝🐝

Activity

Install

FastKafka works on macOS, Linux, and most Unix-style operating systems. +You can install base version of fastkafka with pip as usual:

pip install fastkafka

To install fastkafka with testing features please use:

pip install fastkafka[test]

To install fastkafka with asyncapi docs please use:

pip install fastkafka[docs]

To install fastkafka with all the features please use:

pip install fastkafka[test,docs]

Tutorial

You can start an interactive tutorial in Google Colab by clicking the +button below:

Open in Colab

Writing server code

To demonstrate FastKafka simplicity of using @produces and @consumes +decorators, we will focus on a simple app.

The app will consume jsons containig positive floats from one topic, log +them and then produce incremented values to another topic.

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines one Data mesage class. This Class will model the +consumed and produced data in our app demo, it contains one +NonNegativeFloat field data that will be logged and “processed" +before being produced to another topic.

These message class will be used to parse and validate incoming data in +Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class Data(BaseModel):
data: NonNegativeFloat = Field(
..., example=0.5, description="Float data example"
)

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used for +both generating the documentation and later to run the actual server +against one of the given kafka broker.

Next, an object of the +FastKafka +class is initialized with the minimum set of arguments:

  • kafka_brokers: a dictionary used for generation of documentation

We will also import and create a logger so that we can log the incoming +data in our consuming function.

from logging import getLogger
from fastkafka import FastKafka

logger = getLogger("Demo Kafka app")

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Demo Kafka app",
kafka_brokers=kafka_brokers,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the Data message class. Specifying the type of the +single argument is instructing the Pydantic to use Data.parse_raw() +on the consumed message before passing it to the user defined function +on_input_data.

  • The @produces decorator is applied to the to_output_data function, +which specifies that this function should produce a message to the +“output_data" Kafka topic whenever it is called. The to_output_data +function takes a single float argument data. It it increments the +data returns it wrapped in a Data object. The framework will call +the Data.json().encode("utf-8") function on the returned value and +produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: Data):
logger.info(f"Got data: {msg.data}")
await to_output_data(msg.data)


@kafka_app.produces(topic="output_data")
async def to_output_data(data: float) -> Data:
processed_data = Data(data=data+1.0)
return processed_data

Testing the service

The service can be tested using the +Tester +instances which internally starts InMemory implementation of Kafka +broker.

The Tester will redirect your consumes and produces decorated functions +to the InMemory Kafka broker so that you can quickly test your app +without the need for a running Kafka broker and all its dependencies.

from fastkafka.testing import Tester

msg = Data(
data=0.1,
)

# Start Tester app and create InMemory Kafka broker for testing
async with Tester(kafka_app) as tester:
# Send Data message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with incremented data in output_data topic
await tester.awaited_mocks.on_output_data.assert_awaited_with(
Data(data=1.1), timeout=2
)
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] Demo Kafka app: Got data: 0.1
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Recap

We have created a simple fastkafka application. The app will consume the +Data from the input_data topic, log it and produce the incremented +data to output_data topic.

To test the app we have:

  1. Created the app

  2. Started our Tester class which mirrors the developed app topics for +testing purposes

  3. Sent Data message to input_data topic

  4. Asserted and checked that the developed service has reacted to Data +message

Running the service

The service can be started using builtin faskafka run CLI command. +Before we can do that, we will concatenate the code snippets from above +and save them in a file "application.py"

# content of the "application.py" file

from pydantic import BaseModel, Field, NonNegativeFloat

from fastkafka import FastKafka
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class Data(BaseModel):
data: NonNegativeFloat = Field(
..., example=0.5, description="Float data example"
)

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Demo Kafka app",
kafka_brokers=kafka_brokers,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: Data):
logger.info(f"Got data: {msg.data}")
await to_output_data(msg.data)


@kafka_app.produces(topic="output_data")
async def to_output_data(data: float) -> Data:
processed_data = Data(data=data+1.0)
return processed_data

To run the service, use the FastKafka CLI command and pass the module +(in this case, the file where the app implementation is located) and the +app simbol to the command.

fastkafka run --num-workers=1 --kafka-broker localhost application:kafka_app

After running the command, you should see the following output in your +command line:

[1504]: 23-05-31 11:36:45.874 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1504]: 23-05-31 11:36:45.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1506]: 23-05-31 11:36:45.993 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1506]: 23-05-31 11:36:45.994 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1506]: 23-05-31 11:36:46.014 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1506]: 23-05-31 11:36:46.015 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1506]: 23-05-31 11:36:46.040 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1506]: 23-05-31 11:36:46.042 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1506]: 23-05-31 11:36:46.043 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1506]: 23-05-31 11:36:46.043 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1506]: 23-05-31 11:36:46.068 [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata
[1506]: 23-05-31 11:36:46.070 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1504]: 23-05-31 11:36:46.131 [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1504]: 23-05-31 11:36:46.132 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)
[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable to update metadata from [0]
[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)
[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable to update metadata from [0]
[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
Starting process cleanup, this may take a few seconds...
23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1504...
23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1506...
[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-31 11:37:00.607 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1506 was already terminated.
23-05-31 11:37:00.822 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1504 was already terminated.

Documentation

The kafka app comes with builtin documentation generation using +AsyncApi HTML generator.

AsyncApi requires Node.js to be installed and we provide the following +convenience command line for it:

fastkafka docs install_deps
23-05-31 11:38:24.128 [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed

To generate the documentation programatically you just need to call the +following command:

fastkafka docs generate application:kafka_app
23-05-31 11:38:25.113 [INFO] fastkafka._components.asyncapi: Old async specifications at '/content/asyncapi/spec/asyncapi.yml' does not exist.
23-05-31 11:38:25.118 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'
23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /content/asyncapi/docs.

This will generate the asyncapi folder in relative path where all your +documentation will be saved. You can check out the content of it with:

ls -l asyncapi
total 8
drwxr-xr-x 4 root root 4096 May 31 11:38 docs
drwxr-xr-x 2 root root 4096 May 31 11:38 spec

In docs folder you will find the servable static html file of your +documentation. This can also be served using our fastkafka docs serve +CLI command (more on that in our guides).

In spec folder you will find a asyncapi.yml file containing the async +API specification of your application.

We can locally preview the generated documentation by running the +following command:

fastkafka docs serve application:kafka_app
23-05-31 11:38:45.250 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'
23-05-31 11:39:04.410 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
23-05-31 11:39:04.411 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /content/asyncapi/docs.


Serving documentation on http://127.0.0.1:8000
127.0.0.1 - - [31/May/2023 11:39:14] "GET / HTTP/1.1" 200 -
127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/global.min.css HTTP/1.1" 200 -
127.0.0.1 - - [31/May/2023 11:39:14] "GET /js/asyncapi-ui.min.js HTTP/1.1" 200 -
127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/asyncapi.min.css HTTP/1.1" 200 -
Interupting serving of documentation and cleaning up...

From the parameters passed to the application constructor, we get the +documentation bellow:

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Demo Kafka app",
kafka_brokers=kafka_brokers,
)

Kafka_servers

The following documentation snippet are for the consumer as specified in +the code above:

Kafka_consumer

The following documentation snippet are for the producer as specified in +the code above:

Kafka_producer

Finally, all messages as defined as subclasses of BaseModel are +documented as well:

Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)

License

FastKafka is licensed under the Apache License 2.0

A permissive license whose main conditions require preservation of +copyright and license notices. Contributors provide an express grant of +patent rights. Licensed works, modifications, and larger works may be +distributed under different terms and without source code.

The full text of the license can be found +here.

+ + + + \ No newline at end of file diff --git a/docs/CHANGELOG/index.html b/docs/CHANGELOG/index.html new file mode 100644 index 0000000..93c8356 --- /dev/null +++ b/docs/CHANGELOG/index.html @@ -0,0 +1,33 @@ + + + + + +Release notes | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Release notes

0.7.0

New Features

  • Optional description argument to consumes and produces decorator implemented (#338), thanks to @Sternakt

    • Consumes and produces decorators now have optional description argument that is used instead of function docstring in async doc generation when specified
  • FastKafka Windows OS support enabled (#326), thanks to @kumaranvpl

    • FastKafka can now run on Windows
  • FastKafka and FastAPI integration implemented (#304), thanks to @kumaranvpl

    • FastKafka can now be run alongside FastAPI
  • Batch consuming option to consumers implemented (#298), thanks to @Sternakt

    • Consumers can consume events in batches by specifying msg type of consuming function as List[YourMsgType]
  • Removed support for synchronous produce functions (#295), thanks to @kumaranvpl

  • Added default broker values and update docs (#292), thanks to @Sternakt

Bugs Squashed

  • Fix index.ipynb to be runnable in colab (#342)

  • Use cli option root_path docs generate and serve CLI commands (#341), thanks to @kumaranvpl

  • Fix incorrect asyncapi docs path on fastkafka docs serve command (#335), thanks to @Sternakt

    • Serve docs now takes app root_path argument into consideration when specified in app
  • Fix typo (supress_timestamps->suppress_timestamps) and remove fix for enabling timestamps (#315)

  • Fix logs printing timestamps (#308)

  • Fix topics with dots causing failure of tester instantiation (#306), thanks to @Sternakt

    • Specified topics can now have "." in their names

0.6.0

New Features

  • Timestamps added to CLI commands (#283), thanks to @davorrunje

  • Added option to process messages concurrently (#278), thanks to @Sternakt

    • A new executor option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies.
  • Add consumes and produces functions to app (#274), thanks to @Sternakt

  • Export encoders, decoders from fastkafka.encoder (#246), thanks to @kumaranvpl
  • Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. (#239)
  • UI Improvement: Post screenshots with links to the actual messages in testimonials section (#228)

Bugs Squashed

  • Batch testing fix (#280), thanks to @Sternakt

  • Tester breaks when using Batching or KafkaEvent producers (#279)

  • Consumer loop callbacks are not executing in parallel (#276)

0.5.0

New Features

  • Significant speedup of Kafka producer (#236), thanks to @Sternakt

Bugs Squashed

0.4.0

New Features

0.3.1

  • README.md file updated

0.3.0

New Features

  • Guide for FastKafka produces using partition key (#172), thanks to @Sternakt

    • Closes #161
  • Add support for Redpanda for testing and deployment (#181), thanks to @kumaranvpl

  • Remove bootstrap_servers from init and use the name of broker as an option when running/testing (#134)

  • Add a GH action file to check for broken links in the docs (#163)

  • Optimize requirements for testing and docs (#151)

  • Break requirements into base and optional for testing and dev (#124)

    • Minimize base requirements needed just for running the service.
  • Add link to example git repo into guide for building docs using actions (#81)

  • Add logging for run_in_background (#46)

  • Implement partition Key mechanism for producers (#16)

Bugs Squashed

  • Implement checks for npm installation and version (#176), thanks to @Sternakt

    • Closes #158 by checking if the npx is installed and more verbose error handling
  • Fix the helper.py link in CHANGELOG.md (#165)

  • fastkafka docs install_deps fails (#157)

    • Unexpected internal error: [Errno 2] No such file or directory: 'npx'
  • Broken links in docs (#141)

  • fastkafka run is not showing up in CLI docs (#132)

0.2.3

  • Fixed broken links on PyPi index page

0.2.2

New Features

  • Extract JDK and Kafka installation out of LocalKafkaBroker (#131)

  • PyYAML version relaxed (#119), thanks to @davorrunje

  • Replace docker based kafka with local (#68)

    • replace docker compose with a simple docker run (standard run_jupyter.sh should do)
    • replace all tests to use LocalKafkaBroker
    • update documentation

Bugs Squashed

  • Fix broken link for FastKafka docs in index notebook (#145)

  • Fix encoding issues when loading setup.py on windows OS (#135)

0.2.0

New Features

  • Replace kafka container with LocalKafkaBroker (#112)
      • Replace kafka container with LocalKafkaBroker in tests
  • Remove kafka container from tests environment
  • Fix failing tests

Bugs Squashed

  • Fix random failing in CI (#109)

0.1.3

  • version update in init.py

0.1.2

New Features

  • Git workflow action for publishing Kafka docs (#78)

Bugs Squashed

  • Include missing requirement (#110)
    • Typer is imported in this file but it is not included in settings.ini
    • Add aiohttp which is imported in this file
    • Add nbformat which is imported in _components/helpers.py
    • Add nbconvert which is imported in _components/helpers.py

0.1.1

Bugs Squashed

  • JDK install fails on Python 3.8 (#106)

0.1.0

Initial release

+ + + + \ No newline at end of file diff --git a/docs/CONTRIBUTING/index.html b/docs/CONTRIBUTING/index.html new file mode 100644 index 0000000..2127b15 --- /dev/null +++ b/docs/CONTRIBUTING/index.html @@ -0,0 +1,36 @@ + + + + + +Contributing to FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Contributing to FastKafka

First off, thanks for taking the time to contribute! ❤️

All types of contributions are encouraged and valued. See the Table of Contents for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. 🎉

And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:

  • Star the project
  • Tweet about it
  • Refer this project in your project's readme
  • Mention the project at local meetups and tell your friends/colleagues

Table of Contents

I Have a Question

If you want to ask a question, we assume that you have read the available Documentation.

Before you ask a question, it is best to search for existing Issues that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue.

If you then still feel the need to ask a question and need clarification, we recommend the following:

  • Contact us on Discord
  • Open an Issue
    • Provide as much context as you can about what you're running into

We will then take care of the issue as soon as possible.

I Want To Contribute

When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.

Reporting Bugs

Before Submitting a Bug Report

A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible.

  • Make sure that you are using the latest version.
  • Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the documentation. If you are looking for support, you might want to check this section).
  • To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the bug tracker.
  • Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue.
  • Collect information about the bug:
    • Stack trace (Traceback)
    • OS, Platform and Version (Windows, Linux, macOS, x86, ARM)
    • Python version
    • Possibly your input and the output
    • Can you reliably reproduce the issue? And can you also reproduce it with older versions?

How Do I Submit a Good Bug Report?

We use GitHub issues to track bugs and errors. If you run into an issue with the project:

  • Open an Issue. (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)
  • Explain the behavior you would expect and the actual behavior.
  • Please provide as much context as possible and describe the reproduction steps that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case.
  • Provide the information you collected in the previous section.

Once it's filed:

  • The project team will label the issue accordingly.
  • A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as needs-repro. Bugs with the needs-repro tag will not be addressed until they are reproduced.
  • If the team is able to reproduce the issue, it will be marked needs-fix, as well as possibly other tags (such as critical), and the issue will be left to be implemented.

Suggesting Enhancements

This section guides you through submitting an enhancement suggestion for FastKafka, including completely new features and minor improvements to existing functionality. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions.

Before Submitting an Enhancement

  • Make sure that you are using the latest version.
  • Read the documentation carefully and find out if the functionality is already covered, maybe by an individual configuration.
  • Perform a search to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one.
  • Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library.
  • If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on Discord

How Do I Submit a Good Enhancement Suggestion?

Enhancement suggestions are tracked as GitHub issues.

  • Use a clear and descriptive title for the issue to identify the suggestion.
  • Provide a step-by-step description of the suggested enhancement in as many details as possible.
  • Describe the current behavior and explain which behavior you expected to see instead and why. At this point you can also tell which alternatives do not work for you.
  • Explain why this enhancement would be useful to most FastKafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.

Your First Code Contribution

A great way to start contributing to FastKafka would be by solving an issue tagged with "good first issue". To find a list of issues that are tagged as "good first issue" and are suitable for newcomers, please visit the following link: Good first issues

These issues are beginner-friendly and provide a great opportunity to get started with contributing to FastKafka. Choose an issue that interests you, follow the contribution process mentioned in Way of working and Before a PR, and help us make FastKafka even better!

If you have any questions or need further assistance, feel free to reach out to us. Happy coding!

Development

Prepare the dev environment

To start contributing to FastKafka, you first have to prepare the development environment.

Clone the FastKafka repository

To clone the repository, run the following command in the CLI:

git clone https://github.com/airtai/fastkafka.git

Optional: create a virtual python environment

To prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your FastKafka project by running:

python3 -m venv fastkafka-env

And to activate your virtual environment run:

source fastkafka-env/bin/activate

To learn more about virtual environments, please have a look at official python documentation

Install FastKafka

To install FastKafka, navigate to the root directory of the cloned FastKafka project and run:

pip install fastkafka -e [."dev"]

Install JRE and Kafka toolkit

To be able to run tests and use all the functionalities of FastKafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:

  1. Use our fastkafka testing install-deps CLI command which will install JRE and Kafka toolkit for you in your .local folder +OR
  2. Install JRE and Kafka manually. +To do this, please refer to JDK and JRE installation guide and Apache Kafka quickstart

Install npm

To be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:

  1. Use our fastkafka docs install_deps CLI command which will install npm for you in your .local folder +OR
  2. Install npm manually. +To do this, please refer to NPM installation guide

Install docusaurus

To generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of FastKafka project.

Check if everything works

After installing FastKafka and all the necessary dependencies, run nbdev_test in the root of FastKafka project. This will take a couple of minutes as it will run all the tests on FastKafka project. If everythng is setup correctly, you will get a "Success." message in your terminal, otherwise please refer to previous steps.

Way of working

The development of FastKafka is done in Jupyter notebooks. Inside the nbs directory you will find all the source code of FastKafka, this is where you will implement your changes.

The testing, cleanup and exporting of the code is being handled by nbdev, please, before starting the work on FastKafka, get familiar with it by reading nbdev documentation.

The general philosopy you should follow when writing code for FastKafka is:

  • Function should be an atomic functionality, short and concise
    • Good rule of thumb: your function should be 5-10 lines long usually
  • If there are more than 2 params, enforce keywording using *
    • E.g.: def function(param1, *, param2, param3): ...
  • Define typing of arguments and return value
    • If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected
  • After the function cell, write test cells using the assert keyword
    • Whenever you implement something you should test that functionality immediately in the cells below
  • Add Google style python docstrings when function is implemented and tested

Before a PR

After you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as smooth for you and us, please do the following before opening the request (all the commands are to be run in the root of FastKafka project):

  1. Format your notebooks: nbqa black nbs
  2. Close, shutdown, and clean the metadata from your notebooks: nbdev_clean
  3. Export your code: nbdev_export
  4. Run the tests: nbdev_test
  5. Test code typing: mypy fastkafka
  6. Test code safety with bandit: bandit -r fastkafka
  7. Test code safety with semgrep: semgrep --config auto -r fastkafka

When you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge.

Attribution

This guide is based on the contributing-gen. Make your own!

+ + + + \ No newline at end of file diff --git a/docs/LICENSE/index.html b/docs/LICENSE/index.html new file mode 100644 index 0000000..ebe9524 --- /dev/null +++ b/docs/LICENSE/index.html @@ -0,0 +1,168 @@ + + + + + +LICENSE | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

LICENSE

Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/

TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

  1. Definitions.

    "License" shall mean the terms and conditions for use, reproduction, +and distribution as defined by Sections 1 through 9 of this document.

    "Licensor" shall mean the copyright owner or entity authorized by +the copyright owner that is granting the License.

    "Legal Entity" shall mean the union of the acting entity and all +other entities that control, are controlled by, or are under common +control with that entity. For the purposes of this definition, +"control" means (i) the power, direct or indirect, to cause the +direction or management of such entity, whether by contract or +otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity.

    "You" (or "Your") shall mean an individual or Legal Entity +exercising permissions granted by this License.

    "Source" form shall mean the preferred form for making modifications, +including but not limited to software source code, documentation +source, and configuration files.

    "Object" form shall mean any form resulting from mechanical +transformation or translation of a Source form, including but +not limited to compiled object code, generated documentation, +and conversions to other media types.

    "Work" shall mean the work of authorship, whether in Source or +Object form, made available under the License, as indicated by a +copyright notice that is included in or attached to the work +(an example is provided in the Appendix below).

    "Derivative Works" shall mean any work, whether in Source or Object +form, that is based on (or derived from) the Work and for which the +editorial revisions, annotations, elaborations, or other modifications +represent, as a whole, an original work of authorship. For the purposes +of this License, Derivative Works shall not include works that remain +separable from, or merely link (or bind by name) to the interfaces of, +the Work and Derivative Works thereof.

    "Contribution" shall mean any work of authorship, including +the original version of the Work and any modifications or additions +to that Work or Derivative Works thereof, that is intentionally +submitted to Licensor for inclusion in the Work by the copyright owner +or by an individual or Legal Entity authorized to submit on behalf of +the copyright owner. For the purposes of this definition, "submitted" +means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, +and issue tracking systems that are managed by, or on behalf of, the +Licensor for the purpose of discussing and improving the Work, but +excluding communication that is conspicuously marked or otherwise +designated in writing by the copyright owner as "Not a Contribution."

    "Contributor" shall mean Licensor and any individual or Legal Entity +on behalf of whom a Contribution has been received by Licensor and +subsequently incorporated within the Work.

  2. Grant of Copyright License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the +Work and such Derivative Works in Source or Object form.

  3. Grant of Patent License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +(except as stated in this section) patent license to make, have made, +use, offer to sell, sell, import, and otherwise transfer the Work, +where such license applies only to those patent claims licensable +by such Contributor that are necessarily infringed by their +Contribution(s) alone or by combination of their Contribution(s) +with the Work to which such Contribution(s) was submitted. If You +institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work +or a Contribution incorporated within the Work constitutes direct +or contributory patent infringement, then any patent licenses +granted to You under this License for that Work shall terminate +as of the date such litigation is filed.

  4. Redistribution. You may reproduce and distribute copies of the +Work or Derivative Works thereof in any medium, with or without +modifications, and in Source or Object form, provided that You +meet the following conditions:

    (a) You must give any other recipients of the Work or +Derivative Works a copy of this License; and

    (b) You must cause any modified files to carry prominent notices +stating that You changed the files; and

    (c) You must retain, in the Source form of any Derivative Works +that You distribute, all copyright, patent, trademark, and +attribution notices from the Source form of the Work, +excluding those notices that do not pertain to any part of +the Derivative Works; and

    (d) If the Work includes a "NOTICE" text file as part of its +distribution, then any Derivative Works that You distribute must +include a readable copy of the attribution notices contained +within such NOTICE file, excluding those notices that do not +pertain to any part of the Derivative Works, in at least one +of the following places: within a NOTICE text file distributed +as part of the Derivative Works; within the Source form or +documentation, if provided along with the Derivative Works; or, +within a display generated by the Derivative Works, if and +wherever such third-party notices normally appear. The contents +of the NOTICE file are for informational purposes only and +do not modify the License. You may add Your own attribution +notices within Derivative Works that You distribute, alongside +or as an addendum to the NOTICE text from the Work, provided +that such additional attribution notices cannot be construed +as modifying the License.

    You may add Your own copyright statement to Your modifications and +may provide additional or different license terms and conditions +for use, reproduction, or distribution of Your modifications, or +for any such Derivative Works as a whole, provided Your use, +reproduction, and distribution of the Work otherwise complies with +the conditions stated in this License.

  5. Submission of Contributions. Unless You explicitly state otherwise, +any Contribution intentionally submitted for inclusion in the Work +by You to the Licensor shall be under the terms and conditions of +this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify +the terms of any separate license agreement you may have executed +with Licensor regarding such Contributions.

  6. Trademarks. This License does not grant permission to use the trade +names, trademarks, service marks, or product names of the Licensor, +except as required for reasonable and customary use in describing the +origin of the Work and reproducing the content of the NOTICE file.

  7. Disclaimer of Warranty. Unless required by applicable law or +agreed to in writing, Licensor provides the Work (and each +Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +implied, including, without limitation, any warranties or conditions +of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A +PARTICULAR PURPOSE. You are solely responsible for determining the +appropriateness of using or redistributing the Work and assume any +risks associated with Your exercise of permissions under this License.

  8. Limitation of Liability. In no event and under no legal theory, +whether in tort (including negligence), contract, or otherwise, +unless required by applicable law (such as deliberate and grossly +negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, +incidental, or consequential damages of any character arising as a +result of this License or out of the use or inability to use the +Work (including but not limited to damages for loss of goodwill, +work stoppage, computer failure or malfunction, or any and all +other commercial damages or losses), even if such Contributor +has been advised of the possibility of such damages.

  9. Accepting Warranty or Additional Liability. While redistributing +the Work or Derivative Works thereof, You may choose to offer, +and charge a fee for, acceptance of support, warranty, indemnity, +or other liability obligations and/or rights consistent with this +License. However, in accepting such obligations, You may act only +on Your own behalf and on Your sole responsibility, not on behalf +of any other Contributor, and only if You agree to indemnify, +defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason +of your accepting any such warranty or additional liability.

    END OF TERMS AND CONDITIONS

    APPENDIX: How to apply the Apache License to your work.

    To apply the Apache License to your work, attach the following +boilerplate notice, with the fields enclosed by brackets "[]" +replaced with your own identifying information. (Don't include +the brackets!) The text should be enclosed in the appropriate +comment syntax for the file format. We also recommend that a +file or class name and description of purpose be included on the +same "printed page" as the copyright notice for easier +identification within third-party archives.

    Copyright [yyyy][name of copyright owner]

    Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

    Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License.

+ + + + \ No newline at end of file diff --git a/docs/api/fastkafka/EventMetadata/index.html b/docs/api/fastkafka/EventMetadata/index.html new file mode 100644 index 0000000..b4408fb --- /dev/null +++ b/docs/api/fastkafka/EventMetadata/index.html @@ -0,0 +1,32 @@ + + + + + +EventMetadata | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

EventMetadata

fastkafka.EventMetadata

View source

A class for encapsulating Kafka record metadata.

Parameters:

NameTypeDescriptionDefault
topicstrThe topic this record is received fromrequired
partitionintThe partition from which this record is receivedrequired
offsetintThe position of this record in the corresponding Kafka partitionrequired
timestampintThe timestamp of this recordrequired
timestamp_typeintThe timestamp type of this recordrequired
keyOptional[bytes]The key (or None if no key is specified)required
valueOptional[bytes]The valuerequired
serialized_key_sizeintThe size of the serialized, uncompressed key in bytesrequired
serialized_value_sizeintThe size of the serialized, uncompressed value in bytesrequired
headersSequence[Tuple[str, bytes]]The headersrequired

create_event_metadata

View source
@staticmethod
create_event_metadata(
record
)

Creates an instance of EventMetadata from a ConsumerRecord.

Parameters:

NameTypeDescriptionDefault
recordConsumerRecordThe Kafka ConsumerRecord.required

Returns:

TypeDescription
EventMetadataThe created EventMetadata instance.
+ + + + \ No newline at end of file diff --git a/docs/api/fastkafka/KafkaEvent/index.html b/docs/api/fastkafka/KafkaEvent/index.html new file mode 100644 index 0000000..9553fcf --- /dev/null +++ b/docs/api/fastkafka/KafkaEvent/index.html @@ -0,0 +1,32 @@ + + + + + +KafkaEvent | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

KafkaEvent

fastkafka.KafkaEvent

View source

A generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel

Parameters:

NameTypeDescriptionDefault
messageBaseSubmodelThe message contained in the Kafka event, can be of type pydantic.BaseModel.required
keyOptional[bytes]The optional key used to identify the Kafka event.None
+ + + + \ No newline at end of file diff --git a/docs/api/fastkafka/encoder/AvroBase/index.html b/docs/api/fastkafka/encoder/AvroBase/index.html new file mode 100644 index 0000000..cd53ef7 --- /dev/null +++ b/docs/api/fastkafka/encoder/AvroBase/index.html @@ -0,0 +1,38 @@ + + + + + +AvroBase | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

AvroBase

fastkafka.encoder.AvroBase

View source

This is base pydantic class that will add some methods

init

__init__(
__pydantic_self__, data
)

Create a new model by parsing and validating input data from keyword arguments.

Raises ValidationError if the input data cannot be parsed to form a valid model.

Uses __pydantic_self__ instead of the more common self for the first arg to +allow self as a field name.

avro_schema

View source
@classmethod
avro_schema(
by_alias=True, namespace=None
)

Returns the Avro schema for the Pydantic class.

Parameters:

NameTypeDescriptionDefault
by_aliasboolGenerate schemas using aliases defined. Defaults to True.True
namespaceOptional[str]Optional namespace string for schema generation.None

Returns:

TypeDescription
Dict[str, Any]The Avro schema for the model.

avro_schema_for_pydantic_class

View source
@classmethod
avro_schema_for_pydantic_class(
pydantic_model, by_alias=True, namespace=None
)

Returns the Avro schema for the given Pydantic class.

Parameters:

NameTypeDescriptionDefault
pydantic_modelType[pydantic.main.BaseModel]The Pydantic class.required
by_aliasboolGenerate schemas using aliases defined. Defaults to True.True
namespaceOptional[str]Optional namespace string for schema generation.None

Returns:

TypeDescription
Dict[str, Any]The Avro schema for the model.

avro_schema_for_pydantic_object

View source
@classmethod
avro_schema_for_pydantic_object(
pydantic_model, by_alias=True, namespace=None
)

Returns the Avro schema for the given Pydantic object.

Parameters:

NameTypeDescriptionDefault
pydantic_modelBaseModelThe Pydantic object.required
by_aliasboolGenerate schemas using aliases defined. Defaults to True.True
namespaceOptional[str]Optional namespace string for schema generation.None

Returns:

TypeDescription
Dict[str, Any]The Avro schema for the model.

copy

copy(
self, include=None, exclude=None, update=None, deep=False
)

Returns a copy of the model.

This method is now deprecated; use model_copy instead. If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)
data = {**data, **(update or {})}
copied = self.model_validate(data)

Parameters:

NameTypeDescriptionDefault
includeAbstractSetIntStrMappingIntStrAnyNone
excludeAbstractSetIntStrMappingIntStrAnyNone
update`Dict[str, Any]None`Optional dictionary of field-value pairs to override field valuesin the copied model.
deepboolIf True, the values of fields that are Pydantic models will be deep copied.False

Returns:

TypeDescription
ModelA copy of the model with included, excluded and updated fields as specified.

model_computed_fields

@property
model_computed_fields(
self
)

Get the computed fields of this model instance.

Returns:

TypeDescription
dict[str, ComputedFieldInfo]A dictionary of computed field names and their corresponding ComputedFieldInfo objects.

model_construct

@classmethod
model_construct(
_fields_set=None, values
)

Creates a new instance of the Model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed. +Behaves as if Config.extra = 'allow' was set since it adds all passed values

Parameters:

NameTypeDescriptionDefault
_fields_setset[str]NoneThe set of field names accepted for the Model instance.
valuesAnyTrusted or pre-validated data dictionary.required

Returns:

TypeDescription
ModelA new instance of the Model class with validated data.

model_copy

model_copy(
self, update=None, deep=False
)

Returns a copy of the model.

Parameters:

NameTypeDescriptionDefault
updatedict[str, Any]NoneValues to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data.
deepboolSet to True to make a deep copy of the model.False

Returns:

TypeDescription
ModelNew model instance.

model_dump

model_dump(
self,
mode='python',
include=None,
exclude=None,
by_alias=False,
exclude_unset=False,
exclude_defaults=False,
exclude_none=False,
round_trip=False,
warnings=True,
)

Usage docs: https://docs.pydantic.dev/dev-v2/usage/serialization/#modelmodel_dump

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

NameTypeDescriptionDefault
modeLiteral['json', 'python']strThe mode in which to_python should run.If mode is 'json', the dictionary will only contain JSON serializable types.If mode is 'python', the dictionary may contain any Python objects.
includeIncExA list of fields to include in the output.None
excludeIncExA list of fields to exclude from the output.None
by_aliasboolWhether to use the field's alias in the dictionary key if defined.False
exclude_unsetboolWhether to exclude fields that are unset or None from the output.False
exclude_defaultsboolWhether to exclude fields that are set to their default value from the output.False
exclude_noneboolWhether to exclude fields that have a value of None from the output.False
round_tripboolWhether to enable serialization and deserialization round-trip support.False
warningsboolWhether to log warnings when invalid fields are encountered.True

Returns:

TypeDescription
dict[str, Any]A dictionary representation of the model.

model_dump_json

model_dump_json(
self,
indent=None,
include=None,
exclude=None,
by_alias=False,
exclude_unset=False,
exclude_defaults=False,
exclude_none=False,
round_trip=False,
warnings=True,
)

Usage docs: https://docs.pydantic.dev/dev-v2/usage/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

NameTypeDescriptionDefault
indentintNoneIndentation to use in the JSON output. If None is passed, the output will be compact.
includeIncExField(s) to include in the JSON output. Can take either a string or set of strings.None
excludeIncExField(s) to exclude from the JSON output. Can take either a string or set of strings.None
by_aliasboolWhether to serialize using field aliases.False
exclude_unsetboolWhether to exclude fields that have not been explicitly set.False
exclude_defaultsboolWhether to exclude fields that have the default value.False
exclude_noneboolWhether to exclude fields that have a value of None.False
round_tripboolWhether to use serialization/deserialization between JSON and class instance.False
warningsboolWhether to show any warnings that occurred during serialization.True

Returns:

TypeDescription
strA JSON string representation of the model.

model_extra

@property
model_extra(
self
)

Get extra fields set during validation.

Returns:

TypeDescription
`dict[str, Any]None`

model_fields_set

@property
model_fields_set(
self
)

Returns the set of fields that have been set on this model instance.

Returns:

TypeDescription
set[str]A set of strings representing the fields that have been set,i.e. that were not filled from defaults.

model_json_schema

@classmethod
model_json_schema(
by_alias=True,
ref_template='#/$defs/{model}',
schema_generator=<class 'pydantic.json_schema.GenerateJsonSchema'>,
mode='validation',
)

Generates a JSON schema for a model class.

To override the logic used to generate the JSON schema, you can create a subclass of GenerateJsonSchema +with your desired modifications, then override this method on a custom base class and set the default +value of schema_generator to be your subclass.

Parameters:

NameTypeDescriptionDefault
by_aliasboolWhether to use attribute aliases or not.True
ref_templatestrThe reference template.'#/$defs/{model}'
schema_generatortype[GenerateJsonSchema]The JSON schema generator.<class 'pydantic.json_schema.GenerateJsonSchema'>
modeJsonSchemaModeThe mode in which to generate the schema.'validation'

Returns:

TypeDescription
dict[str, Any]The JSON schema for the given model class.

model_parametrized_name

@classmethod
model_parametrized_name(
params
)

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

NameTypeDescriptionDefault
paramstuple[type[Any], ...]Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int],the value (str, int) would be passed to params.required

Returns:

TypeDescription
strString representing the new class where params are passed to cls as type variables.

Exceptions:

TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.

model_post_init

model_post_init(
self, _BaseModel__context
)

Override this method to perform additional initialization after __init__ and model_construct.

This is useful if you want to do some validation that requires the entire model to be initialized.

model_rebuild

@classmethod
model_rebuild(
force=False,
raise_errors=True,
_parent_namespace_depth=2,
_types_namespace=None,
)

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

NameTypeDescriptionDefault
forceboolWhether to force the rebuilding of the model schema, defaults to False.False
raise_errorsboolWhether to raise errors, defaults to True.True
_parent_namespace_depthintThe depth level of the parent namespace, defaults to 2.2
_types_namespacedict[str, Any]NoneThe types namespace, defaults to None.

Returns:

TypeDescription
`boolNone`

model_validate

@classmethod
model_validate(
obj, strict=None, from_attributes=None, context=None
)

Validate a pydantic model instance.

Parameters:

NameTypeDescriptionDefault
objAnyThe object to validate.required
strictboolNoneWhether to raise an exception on invalid fields.
from_attributesboolNoneWhether to extract data from object attributes.
contextdict[str, Any]NoneAdditional context to pass to the validator.

Returns:

TypeDescription
ModelThe validated model instance.

Exceptions:

TypeDescription
ValidationErrorIf the object could not be validated.

model_validate_json

@classmethod
model_validate_json(
json_data, strict=None, context=None
)

Validate the given JSON data against the Pydantic model.

Parameters:

NameTypeDescriptionDefault
json_datastrbytesbytearray
strictboolNoneWhether to enforce types strictly.
contextdict[str, Any]NoneExtra variables to pass to the validator.

Returns:

TypeDescription
ModelThe validated Pydantic model.

Exceptions:

TypeDescription
ValueErrorIf json_data is not a JSON string.
+ + + + \ No newline at end of file diff --git a/docs/api/fastkafka/encoder/avro_decoder/index.html b/docs/api/fastkafka/encoder/avro_decoder/index.html new file mode 100644 index 0000000..dcdc757 --- /dev/null +++ b/docs/api/fastkafka/encoder/avro_decoder/index.html @@ -0,0 +1,32 @@ + + + + + +avro_decoder | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

avro_decoder

avro_decoder

View source
avro_decoder(
raw_msg, cls
)

Decoder to decode avro encoded messages to pydantic model instance

Parameters:

NameTypeDescriptionDefault
raw_msgbytesAvro encoded bytes message received from Kafka topicrequired
clsType[pydantic.main.BaseModel]Pydantic class; This pydantic class will be used to construct instance of same classrequired

Returns:

TypeDescription
AnyAn instance of given pydantic class
+ + + + \ No newline at end of file diff --git a/docs/api/fastkafka/encoder/avro_encoder/index.html b/docs/api/fastkafka/encoder/avro_encoder/index.html new file mode 100644 index 0000000..c8eb4b6 --- /dev/null +++ b/docs/api/fastkafka/encoder/avro_encoder/index.html @@ -0,0 +1,32 @@ + + + + + +avro_encoder | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

avro_encoder

avro_encoder

View source
avro_encoder(
msg
)

Encoder to encode pydantic instances to avro message

Parameters:

NameTypeDescriptionDefault
msgBaseModelAn instance of pydantic basemodelrequired

Returns:

TypeDescription
bytesA bytes message which is encoded from pydantic basemodel
+ + + + \ No newline at end of file diff --git a/docs/api/fastkafka/encoder/avsc_to_pydantic/index.html b/docs/api/fastkafka/encoder/avsc_to_pydantic/index.html new file mode 100644 index 0000000..4299546 --- /dev/null +++ b/docs/api/fastkafka/encoder/avsc_to_pydantic/index.html @@ -0,0 +1,32 @@ + + + + + +avsc_to_pydantic | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

avsc_to_pydantic

avsc_to_pydantic

View source
avsc_to_pydantic(
schema
)

Generate pydantic model from given Avro Schema

Parameters:

NameTypeDescriptionDefault
schemaDict[str, Any]Avro schema in dictionary formatrequired

Returns:

TypeDescription
Type[pydantic.main.BaseModel]Pydantic model class built from given avro schema
+ + + + \ No newline at end of file diff --git a/docs/api/fastkafka/encoder/json_decoder/index.html b/docs/api/fastkafka/encoder/json_decoder/index.html new file mode 100644 index 0000000..cced6b0 --- /dev/null +++ b/docs/api/fastkafka/encoder/json_decoder/index.html @@ -0,0 +1,32 @@ + + + + + +json_decoder | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

json_decoder

json_decoder

View source
json_decoder(
raw_msg, cls
)

Decoder to decode json string in bytes to pydantic model instance

Parameters:

NameTypeDescriptionDefault
raw_msgbytesBytes message received from Kafka topicrequired
clsType[pydantic.main.BaseModel]Pydantic class; This pydantic class will be used to construct instance of same classrequired

Returns:

TypeDescription
AnyAn instance of given pydantic class
+ + + + \ No newline at end of file diff --git a/docs/api/fastkafka/encoder/json_encoder/index.html b/docs/api/fastkafka/encoder/json_encoder/index.html new file mode 100644 index 0000000..64a6eeb --- /dev/null +++ b/docs/api/fastkafka/encoder/json_encoder/index.html @@ -0,0 +1,32 @@ + + + + + +json_encoder | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

json_encoder

json_encoder

View source
json_encoder(
msg
)

Encoder to encode pydantic instances to json string

Parameters:

NameTypeDescriptionDefault
msgBaseModelAn instance of pydantic basemodelrequired

Returns:

TypeDescription
bytesJson string in bytes which is encoded from pydantic basemodel
+ + + + \ No newline at end of file diff --git a/docs/api/fastkafka/executors/DynamicTaskExecutor/index.html b/docs/api/fastkafka/executors/DynamicTaskExecutor/index.html new file mode 100644 index 0000000..a16e29d --- /dev/null +++ b/docs/api/fastkafka/executors/DynamicTaskExecutor/index.html @@ -0,0 +1,33 @@ + + + + + +DynamicTaskExecutor | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

DynamicTaskExecutor

fastkafka.executors.DynamicTaskExecutor

View source

A class that implements a dynamic task executor for processing consumer records.

The DynamicTaskExecutor class extends the StreamExecutor class and provides functionality +for running a tasks in parallel using asyncio.Task.

init

View source
__init__(
self, throw_exceptions=False, max_buffer_size=100000, size=100000
)

Create an instance of DynamicTaskExecutor

Parameters:

NameTypeDescriptionDefault
throw_exceptionsboolFlag indicating whether exceptions should be thrown ot logged.Defaults to False.False
max_buffer_sizeintMaximum buffer size for the memory object stream.Defaults to 100_000.100000
sizeintSize of the task pool. Defaults to 100_000.100000

run

View source
run(
self, is_shutting_down_f, generator, processor
)

Runs the dynamic task executor.

Parameters:

NameTypeDescriptionDefault
is_shutting_down_fCallable[[], bool]Function to check if the executor is shutting down.required
generatorCallable[[], Awaitable[aiokafka.structs.ConsumerRecord]]Generator function for retrieving consumer records.required
processorCallable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]Processor function for processing consumer records.required
+ + + + \ No newline at end of file diff --git a/docs/api/fastkafka/executors/SequentialExecutor/index.html b/docs/api/fastkafka/executors/SequentialExecutor/index.html new file mode 100644 index 0000000..1ec56f7 --- /dev/null +++ b/docs/api/fastkafka/executors/SequentialExecutor/index.html @@ -0,0 +1,33 @@ + + + + + +SequentialExecutor | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

SequentialExecutor

fastkafka.executors.SequentialExecutor

View source

A class that implements a sequential executor for processing consumer records.

The SequentialExecutor class extends the StreamExecutor class and provides functionality +for running processing tasks in sequence by awaiting their coroutines.

init

View source
__init__(
self, throw_exceptions=False, max_buffer_size=100000
)

Create an instance of SequentialExecutor

Parameters:

NameTypeDescriptionDefault
throw_exceptionsboolFlag indicating whether exceptions should be thrown or logged.Defaults to False.False
max_buffer_sizeintMaximum buffer size for the memory object stream.Defaults to 100_000.100000

run

View source
run(
self, is_shutting_down_f, generator, processor
)

Runs the sequential executor.

Parameters:

NameTypeDescriptionDefault
is_shutting_down_fCallable[[], bool]Function to check if the executor is shutting down.required
generatorCallable[[], Awaitable[aiokafka.structs.ConsumerRecord]]Generator function for retrieving consumer records.required
processorCallable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]Processor function for processing consumer records.required
+ + + + \ No newline at end of file diff --git a/docs/api/fastkafka/index.html b/docs/api/fastkafka/index.html new file mode 100644 index 0000000..d58d1bc --- /dev/null +++ b/docs/api/fastkafka/index.html @@ -0,0 +1,39 @@ + + + + + +FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

FastKafka

init

View source
__init__(
self,
title=None,
description=None,
version=None,
contact=None,
kafka_brokers=None,
root_path=None,
lifespan=None,
bootstrap_servers_id='localhost',
loop=None,
client_id=None,
metadata_max_age_ms=300000,
request_timeout_ms=40000,
api_version='auto',
acks=<object object at 0x7ff10d5f9100>,
key_serializer=None,
value_serializer=None,
compression_type=None,
max_batch_size=16384,
partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>,
max_request_size=1048576,
linger_ms=0,
send_backoff_ms=100,
retry_backoff_ms=100,
security_protocol='PLAINTEXT',
ssl_context=None,
connections_max_idle_ms=540000,
enable_idempotence=False,
transactional_id=None,
transaction_timeout_ms=60000,
sasl_mechanism='PLAIN',
sasl_plain_password=None,
sasl_plain_username=None,
sasl_kerberos_service_name='kafka',
sasl_kerberos_domain_name=None,
sasl_oauth_token_provider=None,
group_id=None,
key_deserializer=None,
value_deserializer=None,
fetch_max_wait_ms=500,
fetch_max_bytes=52428800,
fetch_min_bytes=1,
max_partition_fetch_bytes=1048576,
auto_offset_reset='latest',
enable_auto_commit=True,
auto_commit_interval_ms=5000,
check_crcs=True,
partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),
max_poll_interval_ms=300000,
rebalance_timeout_ms=None,
session_timeout_ms=10000,
heartbeat_interval_ms=3000,
consumer_timeout_ms=200,
max_poll_records=None,
exclude_internal_topics=True,
isolation_level='read_uncommitted',
)

Creates FastKafka application

Parameters:

NameTypeDescriptionDefault
titleOptional[str]optional title for the documentation. If None,the title will be set to empty stringNone
descriptionOptional[str]optional description for the documentation. IfNone, the description will be set to empty stringNone
versionOptional[str]optional version for the documentation. If None,the version will be set to empty stringNone
contactOptional[Dict[str, str]]optional contact for the documentation. If None, thecontact will be set to placeholder values:name='Author' url=HttpUrl(' https://www.google.com ', ) email='noreply@gmail.com'None
kafka_brokersOptional[Dict[str, Any]]dictionary describing kafka brokers used for settingthe bootstrap server when running the applicationa and forgenerating documentation. Defaults to { "localhost": { "url": "localhost", "description": "local kafka broker", "port": "9092", } }None
root_pathUnion[pathlib.Path, str, NoneType]path to where documentation will be createdNone
lifespanOptional[Callable[[ForwardRef('FastKafka')], AsyncContextManager[NoneType]]]asynccontextmanager that is used for setting lifespan hooks.aenter is called before app start and aexit after app stop.The lifespan is called whe application is started as async contextmanager, e.g.:async with kafka_app...None
client_ida name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: aiokafka-producer-# (appended with a unique numberper instance)None
key_serializerused to convert user-supplied keys to bytesIf not :data:None, called as f(key), should return:class:bytes.Default: :data:None.None
value_serializerused to convert user-supplied messagevalues to :class:bytes. If not :data:None, called asf(value), should return :class:bytes.Default: :data:None.None
acksone of 0, 1, all. The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common: 0: Producer will not wait for any acknowledgment from the server at all. The message will immediately be added to the socket buffer and considered sent. No guarantee can be made that the server has received the record in this case, and the retries configuration will not take effect (as the client won't generally know of any failures). The offset given back for each record will always be set to -1. 1: The broker leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after acknowledging the record but before the followers have replicated it then the record will be lost.* all: The broker leader will wait for the full set of in-sync replicas to acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica remains alive. This is the strongest available guarantee.If unset, defaults to acks=1. If enable_idempotence is:data:True defaults to acks=all<object object at 0x7ff10d5f9100>
compression_typeThe compression type for all data generated bythe producer. Valid values are gzip, snappy, lz4, zstdor :data:None.Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:None.None
max_batch_sizeMaximum size of buffered data per partition.After this amount :meth:send coroutine will block until batch isdrained.Default: 1638416384
linger_msThe producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than linger_ms, producer will wait linger_ms - process_time.Default: 0 (i.e. no delay).0
partitionerCallable used to determine which partitioneach message is assigned to. Called (after key serialization):partitioner(key_bytes, all_partitions, available_partitions).The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:None, the message is delivered to a random partition(filtered to partitions with available leaders only, if possible).<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>
max_request_sizeThe maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576.1048576
metadata_max_age_msThe period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000300000
request_timeout_msProduce request timeout in milliseconds.As it's sent as part of:class:~kafka.protocol.produce.ProduceRequest (it's a blockingcall), maximum waiting time can be up to 2 *request_timeout_ms.Default: 40000.40000
retry_backoff_msMilliseconds to backoff when retrying onerrors. Default: 100.100
api_versionspecify which kafka API version to use.If set to auto, will attempt to infer the broker version byprobing various APIs. Default: auto'auto'
security_protocolProtocol used to communicate with brokers.Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT,SASL_SSL. Default: PLAINTEXT.'PLAINTEXT'
ssl_contextpre-configured :class:~ssl.SSLContextfor wrapping socket connections. Directly passed into asyncio's:meth:~asyncio.loop.create_connection. For moreinformation see :ref:ssl_auth.Default: :data:NoneNone
connections_max_idle_msClose idle connections after the numberof milliseconds specified by this config. Specifying :data:None willdisable idle checks. Default: 540000 (9 minutes).540000
enable_idempotenceWhen set to :data:True, the producer willensure that exactly one copy of each message is written in thestream. If :data:False, producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to all. If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:ValueError will be thrown.New in version 0.5.0.False
sasl_mechanismAuthentication mechanism when security_protocolis configured for SASL_PLAINTEXT or SASL_SSL. Valid valuesare: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512,OAUTHBEARER.Default: PLAIN'PLAIN'
sasl_plain_usernameusername for SASL PLAIN authentication.Default: :data:NoneNone
sasl_plain_passwordpassword for SASL PLAIN authentication.Default: :data:NoneNone
group_idname of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: NoneNone
key_deserializerAny callable that takes araw message key and returns a deserialized key.None
value_deserializerAny callable that takes araw message value and returns a deserialized value.None
fetch_min_bytesMinimum amount of data the server shouldreturn for a fetch request, otherwise wait up tofetch_max_wait_ms for more data to accumulate. Default: 1.1
fetch_max_bytesThe maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).52428800
fetch_max_wait_msThe maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500.500
max_partition_fetch_bytesThe maximum amount of dataper-partition the server will return. The maximum total memoryused for a request = #partitions * max_partition_fetch_bytes.This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576.1048576
max_poll_recordsThe maximum number of records returned in asingle call to :meth:.getmany. Defaults None, no limit.None
auto_offset_resetA policy for resetting offsets on:exc:.OffsetOutOfRangeError errors: earliest will move to the oldestavailable message, latest will move to the most recent, andnone will raise an exception so you can handle this case.Default: latest.'latest'
enable_auto_commitIf true the consumer's offset will beperiodically committed in the background. Default: True.True
auto_commit_interval_msmilliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000.5000
check_crcsAutomatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: TrueTrue
partition_assignment_strategyList of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: [:class:.RoundRobinPartitionAssignor](<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)
max_poll_interval_msMaximum allowed time between calls toconsume messages (e.g., :meth:.getmany). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See KIP-62_ for moreinformation. Default 300000300000
rebalance_timeout_msThe maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to max.poll.interval.ms configuration,but as aiokafka will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:.ConsumerRebalanceListener to delay rebalacing. Defaultsto session_timeout_msNone
session_timeout_msClient group session and failure detectiontimeout. The consumer sends periodic heartbeats(heartbeat.interval.ms) to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe broker configuration propertiesgroup.min.session.timeout.ms and group.max.session.timeout.ms.Default: 1000010000
heartbeat_interval_msThe expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than session_timeout_ms, but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 30003000
consumer_timeout_msmaximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200200
exclude_internal_topicsWhether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: TrueTrue
isolation_levelControls how to read messages writtentransactionally.If set to read_committed, :meth:.getmany will only returntransactional messages which have been committed.If set to read_uncommitted (the default), :meth:.getmany willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, inread_committed mode, :meth:.getmany will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, read_committed consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in read_committed the seek_to_end method willreturn the LSO. See method docs below. Default: read_uncommitted'read_uncommitted'
sasl_oauth_token_providerOAuthBearer token provider instance. (See :mod:kafka.oauth.abstract).Default: NoneNone

benchmark

View source
benchmark(
self, interval=1, sliding_window_size=None
)

Decorator to benchmark produces/consumes functions

Parameters:

NameTypeDescriptionDefault
intervalUnion[int, datetime.timedelta]Period to use to calculate throughput. If value is of type int,then it will be used as seconds. If value is of type timedelta,then it will be used as it is. default: 1 - one second1
sliding_window_sizeOptional[int]The size of the sliding window to use to calculateaverage throughput. default: None - By default average throughput isnot calculatedNone

consumes

View source
consumes(
self,
topic=None,
decoder='json',
executor=None,
brokers=None,
prefix='on_',
description=None,
loop=None,
bootstrap_servers='localhost',
client_id='aiokafka-0.8.1',
group_id=None,
key_deserializer=None,
value_deserializer=None,
fetch_max_wait_ms=500,
fetch_max_bytes=52428800,
fetch_min_bytes=1,
max_partition_fetch_bytes=1048576,
request_timeout_ms=40000,
retry_backoff_ms=100,
auto_offset_reset='latest',
enable_auto_commit=True,
auto_commit_interval_ms=5000,
check_crcs=True,
metadata_max_age_ms=300000,
partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),
max_poll_interval_ms=300000,
rebalance_timeout_ms=None,
session_timeout_ms=10000,
heartbeat_interval_ms=3000,
consumer_timeout_ms=200,
max_poll_records=None,
ssl_context=None,
security_protocol='PLAINTEXT',
api_version='auto',
exclude_internal_topics=True,
connections_max_idle_ms=540000,
isolation_level='read_uncommitted',
sasl_mechanism='PLAIN',
sasl_plain_password=None,
sasl_plain_username=None,
sasl_kerberos_service_name='kafka',
sasl_kerberos_domain_name=None,
sasl_oauth_token_provider=None,
)

Decorator registering the callback called when a message is received in a topic.

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

NameTypeDescriptionDefault
topicOptional[str]Kafka topic that the consumer will subscribe to and execute thedecorated function when it receives a message from the topic,default: None. If the topic is not specified, topic name will beinferred from the decorated function name by stripping the defined prefixNone
decoderUnion[str, Callable[[bytes, Type[pydantic.main.BaseModel]], Any]]Decoder to use to decode messages consumed from the topic,default: json - By default, it uses json decoder to decodebytes to json string and then it creates instance of pydanticBaseModel. It also accepts custom decoder function.'json'
executorUnion[str, fastkafka._components.task_streaming.StreamExecutor, NoneType]Type of executor to choose for consuming tasks. Avaliable optionsare "SequentialExecutor" and "DynamicTaskExecutor". The default option is"SequentialExecutor" which will execute the consuming tasks sequentially.If the consuming tasks have high latency it is recommended to use"DynamicTaskExecutor" which will wrap the consuming functions into tasksand run them in on asyncio loop in background. This comes with a cost ofincreased overhead so use it only in cases when your consume functions havehigh latency such as database queries or some other type of networking.None
prefixstrPrefix stripped from the decorated function to define a topic nameif the topic argument is not passed, default: "on_". If the decoratedfunction name is not prefixed with the defined prefix and topic argumentis not passed, then this method will throw ValueError'on_'
brokersUnion[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka.None
descriptionOptional[str]Optional description of the consuming function async docs.If not provided, consuming function doc attr will be used.None
bootstrap_serversa host[:port] string (or list ofhost[:port] strings) that the consumer should contact to bootstrapinitial cluster metadata.This does not have to be the full node list.It just needs to have at least one broker that will respond to aMetadata API Request. Default port is 9092. If no servers arespecified, will default to localhost:9092.'localhost'
client_ida name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client. Alsosubmitted to :class:~.consumer.group_coordinator.GroupCoordinatorfor logging with respect to consumer group administration. Default:aiokafka-{version}'aiokafka-0.8.1'
group_idname of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: NoneNone
key_deserializerAny callable that takes araw message key and returns a deserialized key.None
value_deserializerAny callable that takes araw message value and returns a deserialized value.None
fetch_min_bytesMinimum amount of data the server shouldreturn for a fetch request, otherwise wait up tofetch_max_wait_ms for more data to accumulate. Default: 1.1
fetch_max_bytesThe maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).52428800
fetch_max_wait_msThe maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500.500
max_partition_fetch_bytesThe maximum amount of dataper-partition the server will return. The maximum total memoryused for a request = #partitions * max_partition_fetch_bytes.This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576.1048576
max_poll_recordsThe maximum number of records returned in asingle call to :meth:.getmany. Defaults None, no limit.None
request_timeout_msClient request timeout in milliseconds.Default: 40000.40000
retry_backoff_msMilliseconds to backoff when retrying onerrors. Default: 100.100
auto_offset_resetA policy for resetting offsets on:exc:.OffsetOutOfRangeError errors: earliest will move to the oldestavailable message, latest will move to the most recent, andnone will raise an exception so you can handle this case.Default: latest.'latest'
enable_auto_commitIf true the consumer's offset will beperiodically committed in the background. Default: True.True
auto_commit_interval_msmilliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000.5000
check_crcsAutomatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: TrueTrue
metadata_max_age_msThe period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000300000
partition_assignment_strategyList of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: [:class:.RoundRobinPartitionAssignor](<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)
max_poll_interval_msMaximum allowed time between calls toconsume messages (e.g., :meth:.getmany). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See KIP-62_ for moreinformation. Default 300000300000
rebalance_timeout_msThe maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to max.poll.interval.ms configuration,but as aiokafka will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:.ConsumerRebalanceListener to delay rebalacing. Defaultsto session_timeout_msNone
session_timeout_msClient group session and failure detectiontimeout. The consumer sends periodic heartbeats(heartbeat.interval.ms) to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe broker configuration propertiesgroup.min.session.timeout.ms and group.max.session.timeout.ms.Default: 1000010000
heartbeat_interval_msThe expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than session_timeout_ms, but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 30003000
consumer_timeout_msmaximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200200
api_versionspecify which kafka API version to use.:class:AIOKafkaConsumer supports Kafka API versions >=0.9 only.If set to auto, will attempt to infer the broker version byprobing various APIs. Default: auto'auto'
security_protocolProtocol used to communicate with brokers.Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT,SASL_SSL. Default: PLAINTEXT.'PLAINTEXT'
ssl_contextpre-configured :class:~ssl.SSLContextfor wrapping socket connections. Directly passed into asyncio's:meth:~asyncio.loop.create_connection. For more information see:ref:ssl_auth. Default: None.None
exclude_internal_topicsWhether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: TrueTrue
connections_max_idle_msClose idle connections after the numberof milliseconds specified by this config. Specifying None willdisable idle checks. Default: 540000 (9 minutes).540000
isolation_levelControls how to read messages writtentransactionally.If set to read_committed, :meth:.getmany will only returntransactional messages which have been committed.If set to read_uncommitted (the default), :meth:.getmany willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, inread_committed mode, :meth:.getmany will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, read_committed consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in read_committed the seek_to_end method willreturn the LSO. See method docs below. Default: read_uncommitted'read_uncommitted'
sasl_mechanismAuthentication mechanism when security_protocolis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are:PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512,OAUTHBEARER.Default: PLAIN'PLAIN'
sasl_plain_usernameusername for SASL PLAIN authentication.Default: NoneNone
sasl_plain_passwordpassword for SASL PLAIN authentication.Default: NoneNone
sasl_oauth_token_providerOAuthBearer token provider instance. (See :mod:kafka.oauth.abstract).Default: NoneNone

Returns:

TypeDescription
Callable[[Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]], Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]]: A function returning the same function

create_docs

View source
create_docs(
self
)

Create the asyncapi documentation based on the configured consumers and producers.

This function exports the asyncapi specification based on the configured consumers +and producers in the FastKafka instance. It generates the asyncapi documentation by +extracting the topics and callbacks from the consumers and producers.

Note: +The asyncapi documentation is saved to the location specified by the _asyncapi_path +attribute of the FastKafka instance.

create_mocks

View source
create_mocks(
self
)

Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock

fastapi_lifespan

View source
fastapi_lifespan(
self, kafka_broker_name
)

Method for managing the lifespan of a FastAPI application with a specific Kafka broker.

Parameters:

NameTypeDescriptionDefault
kafka_broker_namestrThe name of the Kafka broker to start FastKafkarequired

Returns:

TypeDescription
Callable[[ForwardRef('FastAPI')], AsyncIterator[None]]Lifespan function to use for initializing FastAPI

get_topics

View source
get_topics(
self
)

Get all topics for both producing and consuming.

Returns:

TypeDescription
Iterable[str]A set of topics for both producing and consuming.

is_started

View source
@property
is_started(
self
)

Property indicating whether the FastKafka object is started.

The is_started property indicates if the FastKafka object is currently +in a started state. This implies that all background tasks, producers, +and consumers have been initiated, and the object is successfully connected +to the Kafka broker.

Returns:

TypeDescription
boolTrue if the object is started, False otherwise.

produces

View source
produces(
self,
topic=None,
encoder='json',
prefix='to_',
brokers=None,
description=None,
loop=None,
bootstrap_servers='localhost',
client_id=None,
metadata_max_age_ms=300000,
request_timeout_ms=40000,
api_version='auto',
acks=<object object at 0x7ff10d5f9100>,
key_serializer=None,
value_serializer=None,
compression_type=None,
max_batch_size=16384,
partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>,
max_request_size=1048576,
linger_ms=0,
send_backoff_ms=100,
retry_backoff_ms=100,
security_protocol='PLAINTEXT',
ssl_context=None,
connections_max_idle_ms=540000,
enable_idempotence=False,
transactional_id=None,
transaction_timeout_ms=60000,
sasl_mechanism='PLAIN',
sasl_plain_password=None,
sasl_plain_username=None,
sasl_kerberos_service_name='kafka',
sasl_kerberos_domain_name=None,
sasl_oauth_token_provider=None,
)

Decorator registering the callback called when delivery report for a produced message is received

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

NameTypeDescriptionDefault
topicOptional[str]Kafka topic that the producer will send returned values fromthe decorated function to, default: None- If the topic is notspecified, topic name will be inferred from the decorated functionname by stripping the defined prefix.None
encoderUnion[str, Callable[[pydantic.main.BaseModel], bytes]]Encoder to use to encode messages before sending it to topic,default: json - By default, it uses json encoder to convertpydantic basemodel to json string and then encodes the string to bytesusing 'utf-8' encoding. It also accepts custom encoder function.'json'
prefixstrPrefix stripped from the decorated function to define a topicname if the topic argument is not passed, default: "to_". If thedecorated function name is not prefixed with the defined prefixand topic argument is not passed, then this method will throw ValueError'to_'
brokersUnion[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka.None
descriptionOptional[str]Optional description of the producing function async docs.If not provided, producing function doc attr will be used.None
bootstrap_serversa host[:port] string or list ofhost[:port] strings that the producer should contact tobootstrap initial cluster metadata. This does not have to be thefull node list. It just needs to have at least one broker that willrespond to a Metadata API Request. Default port is 9092. If noservers are specified, will default to localhost:9092.'localhost'
client_ida name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: aiokafka-producer-# (appended with a unique numberper instance)None
key_serializerused to convert user-supplied keys to bytesIf not :data:None, called as f(key), should return:class:bytes.Default: :data:None.None
value_serializerused to convert user-supplied messagevalues to :class:bytes. If not :data:None, called asf(value), should return :class:bytes.Default: :data:None.None
acksone of 0, 1, all. The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common: 0: Producer will not wait for any acknowledgment from the server at all. The message will immediately be added to the socket buffer and considered sent. No guarantee can be made that the server has received the record in this case, and the retries configuration will not take effect (as the client won't generally know of any failures). The offset given back for each record will always be set to -1. 1: The broker leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after acknowledging the record but before the followers have replicated it then the record will be lost.* all: The broker leader will wait for the full set of in-sync replicas to acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica remains alive. This is the strongest available guarantee.If unset, defaults to acks=1. If enable_idempotence is:data:True defaults to acks=all<object object at 0x7ff10d5f9100>
compression_typeThe compression type for all data generated bythe producer. Valid values are gzip, snappy, lz4, zstdor :data:None.Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:None.None
max_batch_sizeMaximum size of buffered data per partition.After this amount :meth:send coroutine will block until batch isdrained.Default: 1638416384
linger_msThe producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than linger_ms, producer will wait linger_ms - process_time.Default: 0 (i.e. no delay).0
partitionerCallable used to determine which partitioneach message is assigned to. Called (after key serialization):partitioner(key_bytes, all_partitions, available_partitions).The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:None, the message is delivered to a random partition(filtered to partitions with available leaders only, if possible).<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>
max_request_sizeThe maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576.1048576
metadata_max_age_msThe period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000300000
request_timeout_msProduce request timeout in milliseconds.As it's sent as part of:class:~kafka.protocol.produce.ProduceRequest (it's a blockingcall), maximum waiting time can be up to 2 *request_timeout_ms.Default: 40000.40000
retry_backoff_msMilliseconds to backoff when retrying onerrors. Default: 100.100
api_versionspecify which kafka API version to use.If set to auto, will attempt to infer the broker version byprobing various APIs. Default: auto'auto'
security_protocolProtocol used to communicate with brokers.Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT,SASL_SSL. Default: PLAINTEXT.'PLAINTEXT'
ssl_contextpre-configured :class:~ssl.SSLContextfor wrapping socket connections. Directly passed into asyncio's:meth:~asyncio.loop.create_connection. For moreinformation see :ref:ssl_auth.Default: :data:NoneNone
connections_max_idle_msClose idle connections after the numberof milliseconds specified by this config. Specifying :data:None willdisable idle checks. Default: 540000 (9 minutes).540000
enable_idempotenceWhen set to :data:True, the producer willensure that exactly one copy of each message is written in thestream. If :data:False, producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to all. If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:ValueError will be thrown.New in version 0.5.0.False
sasl_mechanismAuthentication mechanism when security_protocolis configured for SASL_PLAINTEXT or SASL_SSL. Valid valuesare: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512,OAUTHBEARER.Default: PLAIN'PLAIN'
sasl_plain_usernameusername for SASL PLAIN authentication.Default: :data:NoneNone
sasl_plain_passwordpassword for SASL PLAIN authentication.Default: :data:NoneNone

Returns:

TypeDescription
Callable[[Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]], Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]]: A function returning the same function

Exceptions:

TypeDescription
ValueErrorwhen needed

run_in_background

View source
run_in_background(
self
)

Decorator to schedule a task to be run in the background.

This decorator is used to schedule a task to be run in the background when the app's _on_startup event is triggered.

Returns:

TypeDescription
Callable[[Callable[..., Coroutine[Any, Any, Any]]], Callable[..., Coroutine[Any, Any, Any]]]A decorator function that takes a background task as an input and stores it to be run in the backround.

set_kafka_broker

View source
set_kafka_broker(
self, kafka_broker_name
)

Sets the Kafka broker to start FastKafka with

Parameters:

NameTypeDescriptionDefault
kafka_broker_namestrThe name of the Kafka broker to start FastKafkarequired

Exceptions:

TypeDescription
ValueErrorIf the provided kafka_broker_name is not found in dictionary of kafka_brokers
+ + + + \ No newline at end of file diff --git a/docs/api/fastkafka/testing/ApacheKafkaBroker/index.html b/docs/api/fastkafka/testing/ApacheKafkaBroker/index.html new file mode 100644 index 0000000..124c628 --- /dev/null +++ b/docs/api/fastkafka/testing/ApacheKafkaBroker/index.html @@ -0,0 +1,34 @@ + + + + + +ApacheKafkaBroker | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

ApacheKafkaBroker

fastkafka.testing.ApacheKafkaBroker

View source

ApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing.

init

View source
__init__(
self,
topics=[],
retries=3,
apply_nest_asyncio=False,
zookeeper_port=2181,
listener_port=9092,
)

Initialises the ApacheKafkaBroker object

Parameters:

NameTypeDescriptionDefault
topicsIterable[str]List of topics to create after sucessfull Kafka broker startup[]
retriesintNumber of retries to create kafka and zookeeper services using random3
apply_nest_asyncioboolset to True if running in notebookFalse
zookeeper_portintPort for clients (Kafka brokes) to connect2181
listener_portintPort on which the clients (producers and consumers) can connect9092

get_service_config_string

View source
get_service_config_string(
self, service, data_dir
)

Gets the configuration string for a service.

Parameters:

NameTypeDescriptionDefault
servicestrName of the service ("kafka" or "zookeeper").required
data_dirPathPath to the directory where the service will save data.required

Returns:

TypeDescription
strThe service configuration string.

is_started

View source
@property
is_started(
self
)

Property indicating whether the ApacheKafkaBroker object is started.

The is_started property indicates if the ApacheKafkaBroker object is currently +in a started state. This implies that Zookeeper and Kafka broker processes have +sucesfully started and are ready for handling events.

Returns:

TypeDescription
boolTrue if the object is started, False otherwise.

start

View source
start(
self
)

Starts a local Kafka broker and ZooKeeper instance synchronously.

Returns:

TypeDescription
strThe Kafka broker bootstrap server address in string format: host:port.

stop

View source
stop(
self
)

Stops a local kafka broker and zookeeper instance synchronously

+ + + + \ No newline at end of file diff --git a/docs/api/fastkafka/testing/LocalRedpandaBroker/index.html b/docs/api/fastkafka/testing/LocalRedpandaBroker/index.html new file mode 100644 index 0000000..5f804bb --- /dev/null +++ b/docs/api/fastkafka/testing/LocalRedpandaBroker/index.html @@ -0,0 +1,34 @@ + + + + + +LocalRedpandaBroker | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

LocalRedpandaBroker

fastkafka.testing.LocalRedpandaBroker

View source

LocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing.

init

View source
__init__(
self,
topics=[],
retries=3,
apply_nest_asyncio=False,
listener_port=9092,
tag='v23.1.2',
seastar_core=1,
memory='1G',
mode='dev-container',
default_log_level='debug',
kwargs,
)

Initialises the LocalRedpandaBroker object

Parameters:

NameTypeDescriptionDefault
topicsIterable[str]List of topics to create after sucessfull redpanda broker startup[]
retriesintNumber of retries to create redpanda service3
apply_nest_asyncioboolset to True if running in notebookFalse
listener_portintPort on which the clients (producers and consumers) can connect9092
tagstrTag of Redpanda image to use to start container'v23.1.2'
seastar_coreintCore(s) to use byt Seastar (the framework Redpanda uses under the hood)1
memorystrThe amount of memory to make available to Redpanda'1G'
modestrMode to use to load configuration properties in container'dev-container'
default_log_levelstrLog levels to use for Redpanda'debug'

get_service_config_string

View source
get_service_config_string(
self, service, data_dir
)

Generates a configuration for a service

Parameters:

NameTypeDescriptionDefault
data_dirPathPath to the directory where the zookeepeer instance will save datarequired
servicestr"redpanda", defines which service to get config string forrequired

is_started

View source
@property
is_started(
self
)

Property indicating whether the LocalRedpandaBroker object is started.

The is_started property indicates if the LocalRedpandaBroker object is currently +in a started state. This implies that Redpanda docker container has sucesfully +started and is ready for handling events.

Returns:

TypeDescription
boolTrue if the object is started, False otherwise.

start

View source
start(
self
)

Starts a local redpanda broker instance synchronously

Returns:

TypeDescription
strRedpanda broker bootstrap server address in string format: add:port

stop

View source
stop(
self
)

Stops a local redpanda broker instance synchronously

+ + + + \ No newline at end of file diff --git a/docs/api/fastkafka/testing/Tester/index.html b/docs/api/fastkafka/testing/Tester/index.html new file mode 100644 index 0000000..29bfeb8 --- /dev/null +++ b/docs/api/fastkafka/testing/Tester/index.html @@ -0,0 +1,39 @@ + + + + + +Tester | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Tester

init

View source
__init__(
self, app, use_in_memory_broker=True
)

Mirror-like object for testing a FastKafka application

Can be used as context manager

Parameters:

NameTypeDescriptionDefault
appUnion[fastkafka.FastKafka, List[fastkafka.FastKafka]]The FastKafka application to be tested.required
use_in_memory_brokerboolWhether to use an in-memory broker for testing or not.True

benchmark

View source
benchmark(
self, interval=1, sliding_window_size=None
)

Decorator to benchmark produces/consumes functions

Parameters:

NameTypeDescriptionDefault
intervalUnion[int, datetime.timedelta]Period to use to calculate throughput. If value is of type int,then it will be used as seconds. If value is of type timedelta,then it will be used as it is. default: 1 - one second1
sliding_window_sizeOptional[int]The size of the sliding window to use to calculateaverage throughput. default: None - By default average throughput isnot calculatedNone

consumes

View source
consumes(
self,
topic=None,
decoder='json',
executor=None,
brokers=None,
prefix='on_',
description=None,
loop=None,
bootstrap_servers='localhost',
client_id='aiokafka-0.8.1',
group_id=None,
key_deserializer=None,
value_deserializer=None,
fetch_max_wait_ms=500,
fetch_max_bytes=52428800,
fetch_min_bytes=1,
max_partition_fetch_bytes=1048576,
request_timeout_ms=40000,
retry_backoff_ms=100,
auto_offset_reset='latest',
enable_auto_commit=True,
auto_commit_interval_ms=5000,
check_crcs=True,
metadata_max_age_ms=300000,
partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),
max_poll_interval_ms=300000,
rebalance_timeout_ms=None,
session_timeout_ms=10000,
heartbeat_interval_ms=3000,
consumer_timeout_ms=200,
max_poll_records=None,
ssl_context=None,
security_protocol='PLAINTEXT',
api_version='auto',
exclude_internal_topics=True,
connections_max_idle_ms=540000,
isolation_level='read_uncommitted',
sasl_mechanism='PLAIN',
sasl_plain_password=None,
sasl_plain_username=None,
sasl_kerberos_service_name='kafka',
sasl_kerberos_domain_name=None,
sasl_oauth_token_provider=None,
)

Decorator registering the callback called when a message is received in a topic.

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

NameTypeDescriptionDefault
topicOptional[str]Kafka topic that the consumer will subscribe to and execute thedecorated function when it receives a message from the topic,default: None. If the topic is not specified, topic name will beinferred from the decorated function name by stripping the defined prefixNone
decoderUnion[str, Callable[[bytes, Type[pydantic.main.BaseModel]], Any]]Decoder to use to decode messages consumed from the topic,default: json - By default, it uses json decoder to decodebytes to json string and then it creates instance of pydanticBaseModel. It also accepts custom decoder function.'json'
executorUnion[str, fastkafka._components.task_streaming.StreamExecutor, NoneType]Type of executor to choose for consuming tasks. Avaliable optionsare "SequentialExecutor" and "DynamicTaskExecutor". The default option is"SequentialExecutor" which will execute the consuming tasks sequentially.If the consuming tasks have high latency it is recommended to use"DynamicTaskExecutor" which will wrap the consuming functions into tasksand run them in on asyncio loop in background. This comes with a cost ofincreased overhead so use it only in cases when your consume functions havehigh latency such as database queries or some other type of networking.None
prefixstrPrefix stripped from the decorated function to define a topic nameif the topic argument is not passed, default: "on_". If the decoratedfunction name is not prefixed with the defined prefix and topic argumentis not passed, then this method will throw ValueError'on_'
brokersUnion[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka.None
descriptionOptional[str]Optional description of the consuming function async docs.If not provided, consuming function doc attr will be used.None
bootstrap_serversa host[:port] string (or list ofhost[:port] strings) that the consumer should contact to bootstrapinitial cluster metadata.This does not have to be the full node list.It just needs to have at least one broker that will respond to aMetadata API Request. Default port is 9092. If no servers arespecified, will default to localhost:9092.'localhost'
client_ida name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client. Alsosubmitted to :class:~.consumer.group_coordinator.GroupCoordinatorfor logging with respect to consumer group administration. Default:aiokafka-{version}'aiokafka-0.8.1'
group_idname of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: NoneNone
key_deserializerAny callable that takes araw message key and returns a deserialized key.None
value_deserializerAny callable that takes araw message value and returns a deserialized value.None
fetch_min_bytesMinimum amount of data the server shouldreturn for a fetch request, otherwise wait up tofetch_max_wait_ms for more data to accumulate. Default: 1.1
fetch_max_bytesThe maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).52428800
fetch_max_wait_msThe maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500.500
max_partition_fetch_bytesThe maximum amount of dataper-partition the server will return. The maximum total memoryused for a request = #partitions * max_partition_fetch_bytes.This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576.1048576
max_poll_recordsThe maximum number of records returned in asingle call to :meth:.getmany. Defaults None, no limit.None
request_timeout_msClient request timeout in milliseconds.Default: 40000.40000
retry_backoff_msMilliseconds to backoff when retrying onerrors. Default: 100.100
auto_offset_resetA policy for resetting offsets on:exc:.OffsetOutOfRangeError errors: earliest will move to the oldestavailable message, latest will move to the most recent, andnone will raise an exception so you can handle this case.Default: latest.'latest'
enable_auto_commitIf true the consumer's offset will beperiodically committed in the background. Default: True.True
auto_commit_interval_msmilliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000.5000
check_crcsAutomatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: TrueTrue
metadata_max_age_msThe period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000300000
partition_assignment_strategyList of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: [:class:.RoundRobinPartitionAssignor](<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)
max_poll_interval_msMaximum allowed time between calls toconsume messages (e.g., :meth:.getmany). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See KIP-62_ for moreinformation. Default 300000300000
rebalance_timeout_msThe maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to max.poll.interval.ms configuration,but as aiokafka will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:.ConsumerRebalanceListener to delay rebalacing. Defaultsto session_timeout_msNone
session_timeout_msClient group session and failure detectiontimeout. The consumer sends periodic heartbeats(heartbeat.interval.ms) to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe broker configuration propertiesgroup.min.session.timeout.ms and group.max.session.timeout.ms.Default: 1000010000
heartbeat_interval_msThe expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than session_timeout_ms, but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 30003000
consumer_timeout_msmaximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200200
api_versionspecify which kafka API version to use.:class:AIOKafkaConsumer supports Kafka API versions >=0.9 only.If set to auto, will attempt to infer the broker version byprobing various APIs. Default: auto'auto'
security_protocolProtocol used to communicate with brokers.Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT,SASL_SSL. Default: PLAINTEXT.'PLAINTEXT'
ssl_contextpre-configured :class:~ssl.SSLContextfor wrapping socket connections. Directly passed into asyncio's:meth:~asyncio.loop.create_connection. For more information see:ref:ssl_auth. Default: None.None
exclude_internal_topicsWhether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: TrueTrue
connections_max_idle_msClose idle connections after the numberof milliseconds specified by this config. Specifying None willdisable idle checks. Default: 540000 (9 minutes).540000
isolation_levelControls how to read messages writtentransactionally.If set to read_committed, :meth:.getmany will only returntransactional messages which have been committed.If set to read_uncommitted (the default), :meth:.getmany willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, inread_committed mode, :meth:.getmany will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, read_committed consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in read_committed the seek_to_end method willreturn the LSO. See method docs below. Default: read_uncommitted'read_uncommitted'
sasl_mechanismAuthentication mechanism when security_protocolis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are:PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512,OAUTHBEARER.Default: PLAIN'PLAIN'
sasl_plain_usernameusername for SASL PLAIN authentication.Default: NoneNone
sasl_plain_passwordpassword for SASL PLAIN authentication.Default: NoneNone
sasl_oauth_token_providerOAuthBearer token provider instance. (See :mod:kafka.oauth.abstract).Default: NoneNone

Returns:

TypeDescription
Callable[[Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]], Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]]: A function returning the same function

create_docs

View source
create_docs(
self
)

Create the asyncapi documentation based on the configured consumers and producers.

This function exports the asyncapi specification based on the configured consumers +and producers in the FastKafka instance. It generates the asyncapi documentation by +extracting the topics and callbacks from the consumers and producers.

Note: +The asyncapi documentation is saved to the location specified by the _asyncapi_path +attribute of the FastKafka instance.

create_mocks

View source
create_mocks(
self
)

Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock

fastapi_lifespan

View source
fastapi_lifespan(
self, kafka_broker_name
)

Method for managing the lifespan of a FastAPI application with a specific Kafka broker.

Parameters:

NameTypeDescriptionDefault
kafka_broker_namestrThe name of the Kafka broker to start FastKafkarequired

Returns:

TypeDescription
Callable[[ForwardRef('FastAPI')], AsyncIterator[None]]Lifespan function to use for initializing FastAPI

get_topics

View source
get_topics(
self
)

Get all topics for both producing and consuming.

Returns:

TypeDescription
Iterable[str]A set of topics for both producing and consuming.

is_started

View source
@property
is_started(
self
)

Property indicating whether the FastKafka object is started.

The is_started property indicates if the FastKafka object is currently +in a started state. This implies that all background tasks, producers, +and consumers have been initiated, and the object is successfully connected +to the Kafka broker.

Returns:

TypeDescription
boolTrue if the object is started, False otherwise.

produces

View source
produces(
self,
topic=None,
encoder='json',
prefix='to_',
brokers=None,
description=None,
loop=None,
bootstrap_servers='localhost',
client_id=None,
metadata_max_age_ms=300000,
request_timeout_ms=40000,
api_version='auto',
acks=<object object at 0x7ff10d5f9100>,
key_serializer=None,
value_serializer=None,
compression_type=None,
max_batch_size=16384,
partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>,
max_request_size=1048576,
linger_ms=0,
send_backoff_ms=100,
retry_backoff_ms=100,
security_protocol='PLAINTEXT',
ssl_context=None,
connections_max_idle_ms=540000,
enable_idempotence=False,
transactional_id=None,
transaction_timeout_ms=60000,
sasl_mechanism='PLAIN',
sasl_plain_password=None,
sasl_plain_username=None,
sasl_kerberos_service_name='kafka',
sasl_kerberos_domain_name=None,
sasl_oauth_token_provider=None,
)

Decorator registering the callback called when delivery report for a produced message is received

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

NameTypeDescriptionDefault
topicOptional[str]Kafka topic that the producer will send returned values fromthe decorated function to, default: None- If the topic is notspecified, topic name will be inferred from the decorated functionname by stripping the defined prefix.None
encoderUnion[str, Callable[[pydantic.main.BaseModel], bytes]]Encoder to use to encode messages before sending it to topic,default: json - By default, it uses json encoder to convertpydantic basemodel to json string and then encodes the string to bytesusing 'utf-8' encoding. It also accepts custom encoder function.'json'
prefixstrPrefix stripped from the decorated function to define a topicname if the topic argument is not passed, default: "to_". If thedecorated function name is not prefixed with the defined prefixand topic argument is not passed, then this method will throw ValueError'to_'
brokersUnion[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka.None
descriptionOptional[str]Optional description of the producing function async docs.If not provided, producing function doc attr will be used.None
bootstrap_serversa host[:port] string or list ofhost[:port] strings that the producer should contact tobootstrap initial cluster metadata. This does not have to be thefull node list. It just needs to have at least one broker that willrespond to a Metadata API Request. Default port is 9092. If noservers are specified, will default to localhost:9092.'localhost'
client_ida name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: aiokafka-producer-# (appended with a unique numberper instance)None
key_serializerused to convert user-supplied keys to bytesIf not :data:None, called as f(key), should return:class:bytes.Default: :data:None.None
value_serializerused to convert user-supplied messagevalues to :class:bytes. If not :data:None, called asf(value), should return :class:bytes.Default: :data:None.None
acksone of 0, 1, all. The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common: 0: Producer will not wait for any acknowledgment from the server at all. The message will immediately be added to the socket buffer and considered sent. No guarantee can be made that the server has received the record in this case, and the retries configuration will not take effect (as the client won't generally know of any failures). The offset given back for each record will always be set to -1. 1: The broker leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after acknowledging the record but before the followers have replicated it then the record will be lost.* all: The broker leader will wait for the full set of in-sync replicas to acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica remains alive. This is the strongest available guarantee.If unset, defaults to acks=1. If enable_idempotence is:data:True defaults to acks=all<object object at 0x7ff10d5f9100>
compression_typeThe compression type for all data generated bythe producer. Valid values are gzip, snappy, lz4, zstdor :data:None.Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:None.None
max_batch_sizeMaximum size of buffered data per partition.After this amount :meth:send coroutine will block until batch isdrained.Default: 1638416384
linger_msThe producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than linger_ms, producer will wait linger_ms - process_time.Default: 0 (i.e. no delay).0
partitionerCallable used to determine which partitioneach message is assigned to. Called (after key serialization):partitioner(key_bytes, all_partitions, available_partitions).The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:None, the message is delivered to a random partition(filtered to partitions with available leaders only, if possible).<kafka.partitioner.default.DefaultPartitioner object at 0x7ff10c16e2d0>
max_request_sizeThe maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576.1048576
metadata_max_age_msThe period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000300000
request_timeout_msProduce request timeout in milliseconds.As it's sent as part of:class:~kafka.protocol.produce.ProduceRequest (it's a blockingcall), maximum waiting time can be up to 2 *request_timeout_ms.Default: 40000.40000
retry_backoff_msMilliseconds to backoff when retrying onerrors. Default: 100.100
api_versionspecify which kafka API version to use.If set to auto, will attempt to infer the broker version byprobing various APIs. Default: auto'auto'
security_protocolProtocol used to communicate with brokers.Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT,SASL_SSL. Default: PLAINTEXT.'PLAINTEXT'
ssl_contextpre-configured :class:~ssl.SSLContextfor wrapping socket connections. Directly passed into asyncio's:meth:~asyncio.loop.create_connection. For moreinformation see :ref:ssl_auth.Default: :data:NoneNone
connections_max_idle_msClose idle connections after the numberof milliseconds specified by this config. Specifying :data:None willdisable idle checks. Default: 540000 (9 minutes).540000
enable_idempotenceWhen set to :data:True, the producer willensure that exactly one copy of each message is written in thestream. If :data:False, producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to all. If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:ValueError will be thrown.New in version 0.5.0.False
sasl_mechanismAuthentication mechanism when security_protocolis configured for SASL_PLAINTEXT or SASL_SSL. Valid valuesare: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512,OAUTHBEARER.Default: PLAIN'PLAIN'
sasl_plain_usernameusername for SASL PLAIN authentication.Default: :data:NoneNone
sasl_plain_passwordpassword for SASL PLAIN authentication.Default: :data:NoneNone

Returns:

TypeDescription
Callable[[Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]], Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]]: A function returning the same function

Exceptions:

TypeDescription
ValueErrorwhen needed

run_in_background

View source
run_in_background(
self
)

Decorator to schedule a task to be run in the background.

This decorator is used to schedule a task to be run in the background when the app's _on_startup event is triggered.

Returns:

TypeDescription
Callable[[Callable[..., Coroutine[Any, Any, Any]]], Callable[..., Coroutine[Any, Any, Any]]]A decorator function that takes a background task as an input and stores it to be run in the backround.

set_kafka_broker

View source
set_kafka_broker(
self, kafka_broker_name
)

Sets the Kafka broker to start FastKafka with

Parameters:

NameTypeDescriptionDefault
kafka_broker_namestrThe name of the Kafka broker to start FastKafkarequired

Exceptions:

TypeDescription
ValueErrorIf the provided kafka_broker_name is not found in dictionary of kafka_brokers
+ + + + \ No newline at end of file diff --git a/docs/cli/fastkafka/index.html b/docs/cli/fastkafka/index.html new file mode 100644 index 0000000..aa80b3b --- /dev/null +++ b/docs/cli/fastkafka/index.html @@ -0,0 +1,32 @@ + + + + + +fastkafka | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

fastkafka

Usage:

$ fastkafka [OPTIONS] COMMAND [ARGS]...

Options:

  • --install-completion: Install completion for the current shell.
  • --show-completion: Show completion for the current shell, to copy it or customize the installation.
  • --help: Show this message and exit.

Commands:

  • docs: Commands for managing FastKafka app...
  • run: Runs Fast Kafka API application
  • testing: Commands for managing FastKafka testing

fastkafka docs

Commands for managing FastKafka app documentation

Usage:

$ fastkafka docs [OPTIONS] COMMAND [ARGS]...

Options:

  • --help: Show this message and exit.

Commands:

  • generate: Generates documentation for a FastKafka...
  • install_deps: Installs dependencies for FastKafka...
  • serve: Generates and serves documentation for a...

fastkafka docs generate

Generates documentation for a FastKafka application

Usage:

$ fastkafka docs generate [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --root-path TEXT: root path under which documentation will be created; default is current directory
  • --help: Show this message and exit.

fastkafka docs install_deps

Installs dependencies for FastKafka documentation generation

Usage:

$ fastkafka docs install_deps [OPTIONS]

Options:

  • --help: Show this message and exit.

fastkafka docs serve

Generates and serves documentation for a FastKafka application

Usage:

$ fastkafka docs serve [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --root-path TEXT: root path under which documentation will be created; default is current directory
  • --bind TEXT: Some info [default: 127.0.0.1]
  • --port INTEGER: Some info [default: 8000]
  • --help: Show this message and exit.

fastkafka run

Runs Fast Kafka API application

Usage:

$ fastkafka run [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --num-workers INTEGER: Number of FastKafka instances to run, defaults to number of CPU cores. [default: 64]
  • --kafka-broker TEXT: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. [default: localhost]
  • --help: Show this message and exit.

fastkafka testing

Commands for managing FastKafka testing

Usage:

$ fastkafka testing [OPTIONS] COMMAND [ARGS]...

Options:

  • --help: Show this message and exit.

Commands:

  • install_deps: Installs dependencies for FastKafka app...

fastkafka testing install_deps

Installs dependencies for FastKafka app testing

Usage:

$ fastkafka testing install_deps [OPTIONS]

Options:

  • --help: Show this message and exit.
+ + + + \ No newline at end of file diff --git a/docs/cli/run_fastkafka_server_process/index.html b/docs/cli/run_fastkafka_server_process/index.html new file mode 100644 index 0000000..6214a52 --- /dev/null +++ b/docs/cli/run_fastkafka_server_process/index.html @@ -0,0 +1,32 @@ + + + + + +run_fastkafka_server_process | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

run_fastkafka_server_process

Usage:

$ run_fastkafka_server_process [OPTIONS] APP

Arguments:

  • APP: Input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --kafka-broker TEXT: Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class. [required]
  • --install-completion: Install completion for the current shell.
  • --show-completion: Show completion for the current shell, to copy it or customize the installation.
  • --help: Show this message and exit.
+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_00_FastKafka_Demo/index.html b/docs/guides/Guide_00_FastKafka_Demo/index.html new file mode 100644 index 0000000..fc135b4 --- /dev/null +++ b/docs/guides/Guide_00_FastKafka_Demo/index.html @@ -0,0 +1,122 @@ + + + + + +FastKafka tutorial | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

FastKafka tutorial

FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.

Install

FastKafka works on macOS, Linux, and most Unix-style operating systems. +You can install it with pip as usual:

pip install fastkafka
try:
import fastkafka
except:
! pip install fastkafka

Running in Colab

You can start this interactive tutorial in Google Colab by clicking the +button below:

Open In Colab

Writing server code

Here is an example python script using FastKafka that takes data from a +Kafka topic, makes a prediction using a predictive model, and outputs +the prediction to another Kafka topic.

Preparing the demo model

First we will prepare our model using the Iris dataset so that we can +demonstrate the preditions using FastKafka. The following call downloads +the dataset and trains the model.

We will wrap the model creation into a lifespan of our app so that the +model is created just before the app is started.

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines two message classes for use in a FastKafka +application:

  • The IrisInputData class is used to represent input data for a +predictive model. It has four fields of type +NonNegativeFloat, +which is a subclass of float that only allows non-negative floating +point values.

  • The IrisPrediction class is used to represent the output of the +predictive model. It has a single field species of type string +representing the predicted species.

These message classes will be used to parse and validate incoming data +in Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used for +generating the documentation only and it is not being checked by the +actual server.

Next, an object of the +FastKafka +class is initialized with the minimum set of arguments:

  • kafka_brokers: a dictionary used for generation of documentation
from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the IrisInputData message class. Specifying the +type of the single argument is instructing the Pydantic to use +IrisInputData.parse_raw() on the consumed message before passing it +to the user defined function on_input_data.

  • The @produces decorator is applied to the to_predictions function, +which specifies that this function should produce a message to the +“predictions" Kafka topic whenever it is called. The to_predictions +function takes a single integer argument species_class representing +one of three possible strign values predicted by the mdoel. It creates +a new IrisPrediction message using this value and then returns it. +The framework will call the IrisPrediction.json().encode("utf-8") +function on the returned value and produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Testing the service

The service can be tested using the +Tester +instances which internally starts Kafka broker and zookeeper.

Before running tests, we have to install Java runtime and Apache Kafka +locally. To simplify the process, we provide the following convenience +command:

fastkafka testing install_deps
[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
from fastkafka.testing import Tester

msg = IrisInputData(
sepal_length=0.1,
sepal_width=0.2,
petal_length=0.3,
petal_width=0.4,
)

# Start Tester app and create local Kafka broker for testing
async with Tester(kafka_app) as tester:
# Send IrisInputData message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with IrisPrediction in predictions topic
await tester.awaited_mocks.on_predictions.assert_awaited_with(
IrisPrediction(species="setosa"), timeout=2
)
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Recap

We have created a Iris classification model and encapulated it into our +fastkafka application. The app will consume the IrisInputData from the +input_data topic and produce the predictions to predictions topic.

To test the app we have:

  1. Created the app

  2. Started our Tester class which mirrors the developed app topics for +testing purpuoses

  3. Sent IrisInputData message to input_data topic

  4. Asserted and checked that the developed iris classification service +has reacted to IrisInputData message

Running the service

The service can be started using builtin faskafka run CLI command. +Before we can do that, we will concatenate the code snippets from above +and save them in a file "application.py"

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

To run the service, you will need a running Kafka broker on localhost as +specified in the kafka_brokers parameter above. We can start the Kafka +broker locally using the +ApacheKafkaBroker. +Notice that the same happens automatically in the +Tester +as shown above.

[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()
[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.

'127.0.0.1:9092'

Then, we start the FastKafka service by running the following command in +the folder where the application.py file is located:

fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app

In the above command, we use --num-workers option to specify how many +workers to launch and we use --kafka-broker option to specify which +kafka broker configuration to use from earlier specified kafka_brokers

[1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[1200654]: [ERROR] aiokafka: Unable to update metadata from [0]
[1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[1200656]: [ERROR] aiokafka: Unable to update metadata from [0]
^C
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...

You need to interupt running of the cell above by selecting +Runtime->Interupt execution on the toolbar above.

Finally, we can stop the local Kafka Broker:

[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.

Documentation

The kafka app comes with builtin documentation generation using +AsyncApi HTML generator.

When running in Colab, we need to update Node.js first:

We need to install all dependancies for the generator using the +following command line:

fastkafka docs install_deps
[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed

To generate the documentation programatically you just need to call the +folloving command:

fastkafka docs generate application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.

. This will generate the asyncapi folder in relative path where all +your documentation will be saved. You can check out the content of it +with:

ls -l asyncapi
total 8
drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs
drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec

In docs folder you will find the servable static html file of your +documentation. This can also be served using our fastkafka docs serve +CLI command (more on that in our guides).

In spec folder you will find a asyncapi.yml file containing the async +API specification of your application.

We can locally preview the generated documentation by running the +following command:

fastkafka docs serve application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.


Serving documentation on http://127.0.0.1:8000
^C
Interupting serving of documentation and cleaning up...

From the parameters passed to the application constructor, we get the +documentation bellow:

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
bootstrap_servers="localhost:9092",
)

Kafka_servers

The following documentation snippet are for the consumer as specified in +the code above:

Kafka_consumer

The following documentation snippet are for the producer as specified in +the code above:

Kafka_producer

Finally, all messages as defined as subclasses of BaseModel are +documented as well:

Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)

+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_01_Intro/index.html b/docs/guides/Guide_01_Intro/index.html new file mode 100644 index 0000000..e34e44a --- /dev/null +++ b/docs/guides/Guide_01_Intro/index.html @@ -0,0 +1,51 @@ + + + + + +Intro | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Intro

This tutorial will show you how to use FastKafkaAPI, step by +step.

The goal of FastKafkaAPI is to simplify the use of Apache Kafka in +Python inspired by FastAPI look and feel.

In this Intro tutorial we’ll go trough the basic requirements to run the +demos presented in future steps.

Installing FastKafkaAPI

First step is to install FastKafkaAPI

$ pip install fastkafka

Preparing a Kafka broker

Next step is to prepare the Kafka environment, our consumers and +producers will need some channel of communication.

!!! info "Hey, your first info!"

If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. 

To go through the tutorial, we recommend that you use dockerized Kafka +brokers, if you have Docker and docker-compose installed the setup +should take you no time (if we exclude the container download times).

!!! warning "Listen! This is important."

To be able to setup this configuration you need to have Docker and docker-compose installed

See here for more info on <a href = \"https://docs.docker.com/\" target=\"_blank\">Docker</a> and <a href = \"https://docs.docker.com/compose/install/\" target=\"_blank\">docker compose</a>

To setup the recommended environment, first, create a new folder wher +you want to save your demo files (e.g. fastkafka_demo). Inside the new +folder create a new YAML file named kafka_demo.yml and copy the +following configuration into it:

version: "3"
services:
zookeeper:
image: wurstmeister/zookeeper
hostname: zookeeper
container_name: zookeeper
networks:
- fastkafka-network
ports:
- "2181:2181"
- "22:22"
- "2888:2888"
- "3888:3888"
kafka:
image: wurstmeister/kafka
container_name: kafka
ports:
- "9093:9093"
environment:
HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d' ' -f 2"
KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093
KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093
KAFKA_INTER_BROKER_LISTENER_NAME: INTER
KAFKA_CREATE_TOPICS: "hello:1:1"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
- zookeeper
healthcheck:
test: [ "CMD", "kafka-topics.sh", "--list", "--zookeeper", "zookeeper:2181" ]
interval: 5s
timeout: 10s
retries: 5
networks:
- fastkafka-network
networks:
fastkafka-network:
name: "fastkafka-network"

This configuration will start a single instance of Zookeeper, single +instance of Kafka broker and create a ‘hello’ topic (quite enough for a +start). To start the configuration, run:

$ docker-compose -f kafka_demo.yaml up -d --wait

This will start the necessary containers and wait till they report that +they are Healthy. After the command finishes, you are good to go to try +out the FastKafkaAPI capabilities! 🎊

Running the code

After installing FastKafkaAPI and initialising the Kafka broker you can +proceed to the ‘First Steps’ part of the tutorial. There, you will write +your first Kafka client and producer apps, run them, and interact with +them.

You are highly encouraged to follow along the tutorials not just by +reading trough them but by implementing the code examples in your own +environment. This will not only help you remember the use cases better +but also, hopefully, demonstrate to you the ease of use of this library.

+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_02_First_Steps/index.html b/docs/guides/Guide_02_First_Steps/index.html new file mode 100644 index 0000000..93b546d --- /dev/null +++ b/docs/guides/Guide_02_First_Steps/index.html @@ -0,0 +1,49 @@ + + + + + +First Steps | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

First Steps

Creating a simple Kafka consumer app

For our first demo we will create the simplest possible Kafka consumer +and run it using ‘fastkafka run’ command.

The consumer will:

  1. Connect to the Kafka Broker we setup in the Intro guide

  2. Listen to the hello topic

  3. Write any message received from the hello topic to stdout

To create the consumer, first, create a file named

hello_kafka_consumer.py and copy the following code to it:

from os import environ

from fastkafka import FastKafka
from pydantic import BaseModel, Field

kafka_server_url = environ["KAFKA_HOSTNAME"]
kafka_server_port = environ["KAFKA_PORT"]

kafka_brokers = {
"localhost": {
"description": "local development kafka",
"url": kafka_server_url,
"port": kafka_server_port
}
}

class HelloKafkaMsg(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_app = FastKafka(
kafka_brokers=kafka_brokers
)

@kafka_app.consumes()
async def on_hello(msg: HelloKafkaMsg):
print(f"Got data, msg={msg.msg}", flush=True)

!!! info "Kafka configuration"

This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').

!!! warning "Remember to flush"

Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.

To run this consumer, in your terminal, run:

fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app

After running the command, you should see something similar to the ouput +below:

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})
[878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization
[878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.

[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.

Now you can interact with your consumer, by sending the messages to the +subscribed ‘hello’ topic, don’t worry, we will cover this in the next +step of this guide.

Sending first message to your consumer

After we have created and run our first consumer, we should send a +message to it, to make sure it is working properly.

If you are using the Kafka setup as described in the Intro guide, you +can follow the steps listed here to send a message to the hello topic.

First, connect to your running kafka broker by running:

docker run -it kafka /bin/bash

Then, when connected to the container, run:

kafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello

This will open an interactive connection to the hello topic, now you can +write your mesages to the topic and they will be consumed by our +consumer.

In the shell, type:

{"msg":"hello"}

and press enter. This will send a hello message to the topic which will +be read by our running consumer and outputed to stdout.

Check the output of your consumer (terminal where you ran the ‘fastkafka +run’ command) and confirm that your consumer has read the Kafka message. +You shoud see something like this:

Got data, msg=hello

Creating a hello Kafka producer

Consuming messages is only a part of this Library functionality, the +other big part is producing the messages. So, let’s create our first +kafka producer which will send it’s greetings to our consumer +periodically.

The producer will:

  1. Connect to the Kafka Broker we setup in the Intro guide
  2. Connect to the hello topic
  3. Periodically send a message to the hello world topic

To create the producer, first, create a file named

hello_kafka_producer.py and copy the following code to it:

from os import environ

import asyncio
from pydantic import BaseModel, Field

from fastkafka import FastKafka
from fastkafka._components.logger import get_logger

kafka_server_url = environ["KAFKA_HOSTNAME"]
kafka_server_port = environ["KAFKA_PORT"]

kafka_brokers = {
"localhost": {
"description": "local development kafka",
"url": kafka_server_url,
"port": kafka_server_port
}
}

class HelloKafkaMsg(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_app = FastKafka(
kafka_brokers=kafka_brokers
)

logger = get_logger(__name__)

@kafka_app.produces()
async def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:
logger.info(f"Producing: {msg}")
return msg

@kafka_app.run_in_background()
async def hello_every_second():
while(True):
await to_hello(HelloKafkaMsg(msg="hello"))
await asyncio.sleep(1)

!!! info "Kafka configuration"

This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').

To run this producer, in your terminal, run:

fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app

After running the command, you should see something similar to the ouput +below:

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.

[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.

Now, while the producer is running, it will send a HelloKafkaMsg every +second to the hello kafka topic. If your consumer is still running, you +should see the messages appear in its log.

Recap

In this guide we have:

  1. Created a simple Kafka consumer using FastKafka
  2. Sent a message to our consumer trough Kafka
  3. Created a simple Kafka producer using FastKafka
+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_03_Authentication/index.html b/docs/guides/Guide_03_Authentication/index.html new file mode 100644 index 0000000..fb50345 --- /dev/null +++ b/docs/guides/Guide_03_Authentication/index.html @@ -0,0 +1,37 @@ + + + + + +Authentication | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Authentication

TLS Authentication

sasl_mechanism (str) – Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN, +GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN

sasl_plain_username (str) – username for SASL PLAIN authentication. +Default: None

sasl_plain_password (str) – password for SASL PLAIN authentication. +Default: None

sasl_oauth_token_provider (AbstractTokenProvider) – OAuthBearer token +provider instance. (See kafka.oauth.abstract). Default: None

+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_04_Github_Actions_Workflow/index.html b/docs/guides/Guide_04_Github_Actions_Workflow/index.html new file mode 100644 index 0000000..59805b9 --- /dev/null +++ b/docs/guides/Guide_04_Github_Actions_Workflow/index.html @@ -0,0 +1,42 @@ + + + + + +Deploy FastKafka docs to GitHub Pages | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Deploy FastKafka docs to GitHub Pages

Getting started

Add your workflow file .github/workflows/fastkafka_docs_deploy.yml and +push it to your remote default branch.

Here is an example workflow:

name: Deploy FastKafka Generated Documentation to GitHub Pages

on:
push:
branches: [ "main", "master" ]
workflow_dispatch:

jobs:
deploy:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: airtai/workflows/fastkafka-ghp@main
with:
app: "test_fastkafka.application:kafka_app"

Options

Set app location

Input in the form of path:app, where path is the path to a Python +file and app is an object of type +FastKafka:

- name: Deploy
uses: airtai/workflows/fastkafka-ghp@main
with:
app: "test_fastkafka.application:kafka_app"

In the above example, +FastKafka +app is named as kafka_app and it is available in the application +submodule of the test_fastkafka module.

Example Repository

A +FastKafka-based +library that uses the above-mentioned workfow actions to publish +FastKafka docs to Github Pages can be found +here.

+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_05_Lifespan_Handler/index.html b/docs/guides/Guide_05_Lifespan_Handler/index.html new file mode 100644 index 0000000..aa508a8 --- /dev/null +++ b/docs/guides/Guide_05_Lifespan_Handler/index.html @@ -0,0 +1,75 @@ + + + + + +Lifespan Events | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Lifespan Events

Did you know that you can define some special code that runs before and +after your Kafka application? This code will be executed just once, but +it covers the whole lifespan of your app! 🚀

Lets break it down:

You can define logic (code) that should be executed before the +application starts up. This is like a warm-up for your app, getting it +ready to consume and produce messages.

Similarly, you can define logic (code) that should be executed when the +application is shutting down. This is like a cool-down for your app, +making sure everything is properly closed and cleaned up.

By executing code before consuming and after producing, you cover the +entire lifecycle of your application 🎉

This is super handy for setting up shared resources that are needed +across consumers and producers, like a database connection pool or a +machine learning model. And the best part? You can clean up these +resources when the app is shutting down!

So lets give it a try and see how it can make your Kafka app even more +awesome! 💪

Lifespan example - Iris prediction model

Let’s dive into an example to see how you can leverage the lifecycle +handler to solve a common use case. Imagine that you have some machine +learning models that need to consume incoming messages and produce +response/prediction messages. These models are shared among consumers +and producers, which means you don’t want to load them for every +message.

Here’s where the lifecycle handler comes to the rescue! By loading the +model before the messages are consumed and produced, but only right +before the application starts receiving messages, you can ensure that +the model is ready to use without compromising the performance of your +tests. In the upcoming sections, we’ll walk you through how to +initialize an Iris species prediction model and use it in your developed +application.

Lifespan

You can define this startup and shutdown logic using the lifespan +parameter of the FastKafka app, and an async context manager.

Let’s start with an example and then see it in detail.

We create an async function lifespan() with yield like this:

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
from contextlib import asynccontextmanager

from fastkafka import FastKafka

ml_models = {}

@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
print("Loading the model!")
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)
yield
# Clean up the ML models and release the resources

print("Exiting, clearing model dict!")
ml_models.clear()

The first thing to notice, is that we are defining an async function +with yield. This is very similar to Dependencies with yield.

The first part of the function, before the yield, will be executed +before the application starts. And the part after the yield will +be executed after the application has finished.

This lifespan will create an iris_prediction model on application +startup and cleanup the references after the app is shutdown.

The lifespan will be passed an KafkaApp reference on startup of your +application, which you can use to reference your application on startup.

For demonstration sake, we also added prints so that when running the +app we can see that our lifespan was called.

Async context manager

Context managers can be used in with blocks, our lifespan, for example +could be used like this:

ml_models = {}
async with lifespan(None):
print(ml_models)

When you create a context manager or an async context manager, what it +does is that, before entering the with block, it will execute the code +before the yield, and after exiting the with block, it will execute +the code after the yield.

If you want to learn more about context managers and contextlib +decorators, please visit Python official +docs

App demo

FastKafka app

Lets now create our application using the created lifespan handler.

Notice how we passed our lifespan handler to the app when constructing +it trough the lifespan argument.

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local development kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Data modeling

Lets model the Iris data for our app:

from pydantic import BaseModel, Field, NonNegativeFloat

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Consumers and producers

Lets create a consumer and producer for our app that will generate +predictions from input iris data.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Final app

The final app looks like this:

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
from contextlib import asynccontextmanager

from pydantic import BaseModel, Field, NonNegativeFloat

from fastkafka import FastKafka

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")
ml_models = {}

@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
print("Loading the model!")
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)
yield
# Clean up the ML models and release the resources

print("Exiting, clearing model dict!")
ml_models.clear()

kafka_brokers = {
"localhost": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local development kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Running the app

Now we can run the app with your custom lifespan handler. Copy the code +above in lifespan_example.py and run it by running

fastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app

When you run the app, you should see a simmilar output to the one below:

Recap

In this guide we have defined a lifespan handler and passed to our +FastKafka app.

Some important points are:

  1. Lifespan handler is implemented as +AsyncContextManager
  2. Code before yield in lifespan will be executed before +application startup
  3. Code after yield in lifespan will be executed after +application shutdown
  4. You can pass your lifespan handler to FastKafka app on +initialisation by passing a lifespan argument
+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_06_Benchmarking_FastKafka/index.html b/docs/guides/Guide_06_Benchmarking_FastKafka/index.html new file mode 100644 index 0000000..dbbb3f4 --- /dev/null +++ b/docs/guides/Guide_06_Benchmarking_FastKafka/index.html @@ -0,0 +1,80 @@ + + + + + +Benchmarking FastKafka app | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Benchmarking FastKafka app

Prerequisites

To benchmark a +FastKafka +project, you will need the following:

  1. A library built with +FastKafka.
  2. A running Kafka instance to benchmark the FastKafka application +against.

Creating FastKafka Code

Let’s create a +FastKafka-based +application and write it to the application.py file based on the +tutorial.

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

FastKafka +has a decorator for benchmarking which is appropriately called as +benchmark. Let’s edit our application.py file and add the +benchmark decorator to the consumes method.

# content of the "application.py" file with benchmark

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
@kafka_app.benchmark(interval=1, sliding_window_size=5)
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Here we are conducting a benchmark of a function that consumes data from +the input_data topic with an interval of 1 second and a sliding window +size of 5.

This benchmark method uses the interval parameter to calculate the +results over a specific time period, and the sliding_window_size +parameter to determine the maximum number of results to use in +calculating the average throughput and standard deviation.

This benchmark is important to ensure that the function is performing +optimally and to identify any areas for improvement.

Starting Kafka

If you already have a Kafka running somewhere, then you can skip this +step.

Please keep in mind that your benchmarking results may be affected by +bottlenecks such as network, CPU cores in the Kafka machine, or even the +Kafka configuration itself.

Installing Java and Kafka

We need a working Kafkainstance to benchmark our +FastKafka +app, and to run Kafka we need Java. Thankfully, +FastKafka +comes with a CLI to install both Java and Kafka on our machine.

So, let’s install Java and Kafka by executing the following command.

fastkafka testing install_deps

The above command will extract Kafka scripts at the location +“\$HOME/.local/kafka_2.13-3.3.2" on your machine.

Creating configuration for Zookeeper and Kafka

Now we need to start Zookeeper and Kafka separately, and to start +them we need zookeeper.properties and kafka.properties files.

Let’s create a folder inside the folder where Kafka scripts were +extracted and change directory into it.

mkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir

Let’s create a file called zookeeper.properties and write the +following content to the file:

dataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper
clientPort=2181
maxClientCnxns=0

Similarly, let’s create a file called kafka.properties and write the +following content to the file:

broker.id=0
listeners=PLAINTEXT://:9092

num.network.threads=3
num.io.threads=8
socket.send.buffer.bytes=102400
socket.receive.buffer.bytes=102400
socket.request.max.bytes=104857600

num.partitions=1
num.recovery.threads.per.data.dir=1
offsets.topic.replication.factor=1
transaction.state.log.replication.factor=1
transaction.state.log.min.isr=1

log.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs
log.flush.interval.messages=10000
log.flush.interval.ms=1000
log.retention.hours=168
log.retention.bytes=1073741824
log.segment.bytes=1073741824
log.retention.check.interval.ms=300000

zookeeper.connect=localhost:2181
zookeeper.connection.timeout.ms=18000

Starting Zookeeper and Kafka

We need two different terminals to run Zookeeper in one and Kafka in +another. Let’s open a new terminal and run the following commands to +start Zookeeper:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./zookeeper-server-start.sh ../data_dir/zookeeper.properties

Once Zookeeper is up and running, open a new terminal and execute the +follwing commands to start Kafka:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./kafka-server-start.sh ../data_dir/kafka.properties

Now we have both Zookeeper and Kafka up and running.

Creating topics in Kafka

In a new terminal, please execute the following command to create +necessary topics in Kafka:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092
./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092

Populating topics with dummy data

To benchmark our +FastKafka +app, we need some data in Kafka topics.

In the same terminal, let’s create some dummy data:

yes '{"sepal_length": 0.7739560486, "sepal_width": 0.8636615789, "petal_length": 0.6122663046, "petal_width": 0.1338914722}' | head -n 1000000 > /tmp/test_data

This command will create a file called test_data in the tmp folder +with one million rows of text. This will act as dummy data to populate +the input_data topic.

Let’s populate the created topic input_data with the dummy data which +we created above:

./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data

Now our topic input_data has one million records/messages in it. If +you want more messages in topic, you can simply execute the above +command again and again.

Benchmarking FastKafka

Once Zookeeper and Kafka are ready, benchmarking +FastKafka +app is as simple as running the fastkafka run command:

fastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app

This command will start the +FastKafka +app and begin consuming messages from Kafka, which we spun up earlier. +Additionally, the same command will output all of the benchmark +throughputs based on the interval and sliding_window_size values.

The output for the fastkafka run command is:

[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh
ost:9092', 'max_poll_records': 100}
[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc
[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin
[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1
[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition
=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)

Based on the output, when using 1 worker, our +FastKafka +app achieved a throughput of 93k messages per second and an +average throughput of 93k messages per second.

+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html b/docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html new file mode 100644 index 0000000..a0364aa --- /dev/null +++ b/docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html @@ -0,0 +1,150 @@ + + + + + +Encoding and Decoding Kafka Messages with FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Encoding and Decoding Kafka Messages with FastKafka

Prerequisites

  1. A basic knowledge of +FastKafka +is needed to proceed with this guide. If you are not familiar with +FastKafka, +please go through the tutorial first.
  2. FastKafka +with its dependencies installed is needed. Please install +FastKafka +using the command - pip install fastkafka

Ways to Encode and Decode Messages with FastKafka

In python, by default, we send Kafka messages as bytes. Even if our +message is a string, we convert it to bytes and then send it to Kafka +topic. imilarly, while consuming messages, we consume them as bytes and +then convert them to strings.

In FastKafka, we specify message schema using Pydantic models as +mentioned in tutorial:

# Define Pydantic models for Kafka messages
from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Then, we send and receive messages as instances of Pydantic models which +we defined. So, FastKafka needs a way to encode/decode to these Pydantic +model messages to bytes in order to send/receive messages to/from Kafka +topics.

The @consumes and @produces methods of FastKafka accept a parameter +called decoder/encoder to decode/encode Kafka messages. FastKafka +provides three ways to encode and decode messages:

  1. json - This is the default encoder/decoder option in FastKafka. +While producing, this option converts our instance of Pydantic model +messages to a JSON string and then converts it to bytes before +sending it to the topic. While consuming, it converts bytes to a +JSON string and then constructs an instance of Pydantic model from +the JSON string.
  2. avro - This option uses Avro encoding/decoding to convert instances +of Pydantic model messages to bytes while producing, and while +consuming, it constructs an instance of Pydantic model from bytes.
  3. custom encoder/decoder - If you are not happy with the json or avro +encoder/decoder options, you can write your own encoder/decoder +functions and use them to encode/decode Pydantic messages.

1. Json encoder and decoder

The default option in FastKafka is json encoder/decoder. This option, +while producing, converts our instance of pydantic model messages to +json string and then converts to bytes before sending it to the topics. +While consuming it converts bytes to json string and then constructs +instance of pydantic model from json string.

We can use the application from tutorial as +is, and it will use the json encoder/decoder by default. But, for +clarity, let’s modify it to explicitly accept the ‘json’ encoder/decoder +parameter:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder="json")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder="json")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

In the above code, the @kafka_app.consumes decorator sets up a +consumer for the “input_data" topic, using the ‘json’ decoder to convert +the message payload to an instance of IrisInputData. The +@kafka_app.produces decorator sets up a producer for the “predictions" +topic, using the ‘json’ encoder to convert the instance of +IrisPrediction to message payload.

2. Avro encoder and decoder

What is Avro?

Avro is a row-oriented remote procedure call and data serialization +framework developed within Apache’s Hadoop project. It uses JSON for +defining data types and protocols, and serializes data in a compact +binary format. To learn more about the Apache Avro, please check out the +docs.

Installing FastKafka with Avro dependencies

FastKafka +with dependencies for Apache Avro installed is needed to use avro +encoder/decoder. Please install +FastKafka +with Avro support using the command - pip install fastkafka[avro]

Defining Avro Schema Using Pydantic Models

By default, you can use Pydantic model to define your message schemas. +FastKafka internally takes care of encoding and decoding avro messages, +based on the Pydantic models.

So, similar to the tutorial, the message schema will +remain as it is.

# Define Pydantic models for Avro messages
from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

No need to change anything to support avro. You can use existing +Pydantic models as is.

Reusing existing avro schema

If you are using some other library to send and receive avro encoded +messages, it is highly likely that you already have an Avro schema +defined.

Building pydantic models from avro schema dictionary

Let’s modify the above example and let’s assume we have schemas already +for IrisInputData and IrisPrediction which will look like below:

iris_input_data_schema = {
"type": "record",
"namespace": "IrisInputData",
"name": "IrisInputData",
"fields": [
{"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},
{"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},
{"doc": "Petal length in cm", "type": "double", "name": "petal_length"},
{"doc": "Petal width in cm", "type": "double", "name": "petal_width"},
],
}
iris_prediction_schema = {
"type": "record",
"namespace": "IrisPrediction",
"name": "IrisPrediction",
"fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],
}

We can easily construct pydantic models from avro schema using +avsc_to_pydantic +function which is included as part of +FastKafka +itself.

from fastkafka.encoder import avsc_to_pydantic

IrisInputData = avsc_to_pydantic(iris_input_data_schema)
print(IrisInputData.model_fields)

IrisPrediction = avsc_to_pydantic(iris_prediction_schema)
print(IrisPrediction.model_fields)

The above code will convert avro schema to pydantic models and will +print pydantic models’ fields. The output of the above is:

{'sepal_length': ModelField(name='sepal_length', type=float, required=True),
'sepal_width': ModelField(name='sepal_width', type=float, required=True),
'petal_length': ModelField(name='petal_length', type=float, required=True),
'petal_width': ModelField(name='petal_width', type=float, required=True)}

{'species': ModelField(name='species', type=str, required=True)}

This is exactly same as manually defining the pydantic models ourselves. +You don’t have to worry about not making any mistakes while converting +avro schema to pydantic models manually. You can easily and +automatically accomplish it by using +avsc_to_pydantic +function as demonstrated above.

Building pydantic models from .avsc file

Not all cases will have avro schema conveniently defined as a python +dictionary. You may have it stored as the proprietary .avsc files in +filesystem. Let’s see how to convert those .avsc files to pydantic +models.

Let’s assume our avro files are stored in files called +iris_input_data_schema.avsc and iris_prediction_schema.avsc. In that +case, following code converts the schema to pydantic models:

import json
from fastkafka.encoder import avsc_to_pydantic


with open("iris_input_data_schema.avsc", "rb") as f:
iris_input_data_schema = json.load(f)

with open("iris_prediction_schema.avsc", "rb") as f:
iris_prediction_schema = json.load(f)


IrisInputData = avsc_to_pydantic(iris_input_data_schema)
print(IrisInputData.model_fields)

IrisPrediction = avsc_to_pydantic(iris_prediction_schema)
print(IrisPrediction.model_fields)

Consume/Produce avro messages with FastKafka

FastKafka +provides @consumes and @produces methods to consume/produces +messages to/from a Kafka topic. This is explained in +tutorial.

The @consumes and @produces methods accepts a parameter called +decoder/encoder to decode/encode avro messages.

@kafka_app.consumes(topic="input_data", encoder="avro")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", decoder="avro")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

In the above example, in @consumes and @produces methods, we +explicitly instruct FastKafka to decode and encode messages using +the avro decoder/encoder instead of the default json +decoder/encoder.

Assembling it all together

Let’s rewrite the sample code found in +tutorial to use avro to decode and +encode messages:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


iris_input_data_schema = {
"type": "record",
"namespace": "IrisInputData",
"name": "IrisInputData",
"fields": [
{"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},
{"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},
{"doc": "Petal length in cm", "type": "double", "name": "petal_length"},
{"doc": "Petal width in cm", "type": "double", "name": "petal_width"},
],
}
iris_prediction_schema = {
"type": "record",
"namespace": "IrisPrediction",
"name": "IrisPrediction",
"fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],
}
# Or load schema from avsc files

from fastkafka.encoder import avsc_to_pydantic

IrisInputData = avsc_to_pydantic(iris_input_data_schema)
IrisPrediction = avsc_to_pydantic(iris_prediction_schema)


from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder="avro")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder="avro")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

The above code is a sample implementation of using FastKafka to consume +and produce Avro-encoded messages from/to a Kafka topic. The code +defines two Avro schemas for the input data and the prediction result. +It then uses the +avsc_to_pydantic +function from the FastKafka library to convert the Avro schema into +Pydantic models, which will be used to decode and encode Avro messages.

The +FastKafka +class is then instantiated with the broker details, and two functions +decorated with @kafka_app.consumes and @kafka_app.produces are +defined to consume messages from the “input_data" topic and produce +messages to the “predictions" topic, respectively. The functions uses +the decoder=“avro" and encoder=“avro" parameters to decode and encode +the Avro messages.

In summary, the above code demonstrates a straightforward way to use +Avro-encoded messages with FastKafka to build a message processing +pipeline.

3. Custom encoder and decoder

If you are not happy with the json or avro encoder/decoder options, you +can write your own encoder/decoder functions and use them to +encode/decode Pydantic messages.

Writing a custom encoder and decoder

In this section, let’s see how to write a custom encoder and decoder +which obfuscates kafka message with simple +ROT13 cipher.

import codecs
import json
from typing import Any, Type


def custom_encoder(msg: BaseModel) -> bytes:
msg_str = msg.json()
obfuscated = codecs.encode(msg_str, 'rot13')
raw_bytes = obfuscated.encode("utf-8")
return raw_bytes

def custom_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:
obfuscated = raw_msg.decode("utf-8")
msg_str = codecs.decode(obfuscated, 'rot13')
msg_dict = json.loads(msg_str)
return cls(**msg_dict)

The above code defines two custom functions for encoding and decoding +messages in a Kafka application using the FastKafka library.

The encoding function, custom_encoder(), takes a message msg which +is an instance of a Pydantic model, converts it to a JSON string using +the json() method, obfuscates the resulting string using the ROT13 +algorithm from the codecs module, and finally encodes the obfuscated +string as raw bytes using the UTF-8 encoding.

The decoding function, custom_decoder(), takes a raw message raw_msg +in bytes format, a Pydantic class to construct instance with cls +parameter. It first decodes the raw message from UTF-8 encoding, then +uses the ROT13 algorithm to de-obfuscate the string. Finally, it loads +the resulting JSON string using the json.loads() method and returns a +new instance of the specified cls class initialized with the decoded +dictionary.

These functions can be used with FastKafka’s encoder and decoder +parameters to customize the serialization and deserialization of +messages in Kafka topics.

Let’s test the above code

i = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)

encoded = custom_encoder(i)
display(encoded)

decoded = custom_decoder(encoded, IrisInputData)
display(decoded)

This will result in following output

b'{"frcny_yratgu": 0.5, "frcny_jvqgu": 0.5, "crgny_yratgu": 0.5, "crgny_jvqgu": 0.5}'

IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)

Assembling it all together

Let’s rewrite the sample code found in +tutorial to use our custom decoder and +encoder functions:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")


import codecs
import json
from typing import Any, Type


def custom_encoder(msg: BaseModel) -> bytes:
msg_str = msg.json()
obfuscated = codecs.encode(msg_str, 'rot13')
raw_bytes = obfuscated.encode("utf-8")
return raw_bytes

def custom_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:
obfuscated = raw_msg.decode("utf-8")
msg_str = codecs.decode(obfuscated, 'rot13')
msg_dict = json.loads(msg_str)
return cls(**msg_dict)


from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder=custom_decoder)
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder=custom_encoder)
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

This code defines a custom encoder and decoder functions for encoding +and decoding messages sent through a Kafka messaging system.

The custom encoder function takes a message represented as a +BaseModel and encodes it as bytes by first converting it to a JSON +string and then obfuscating it using the ROT13 encoding. The obfuscated +message is then converted to bytes using UTF-8 encoding and returned.

The custom decoder function takes in the bytes representing an +obfuscated message, decodes it using UTF-8 encoding, then decodes the +ROT13 obfuscation, and finally loads it as a dictionary using the json +module. This dictionary is then converted to a BaseModel instance +using the cls parameter.

+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_11_Consumes_Basics/index.html b/docs/guides/Guide_11_Consumes_Basics/index.html new file mode 100644 index 0000000..ea4cffc --- /dev/null +++ b/docs/guides/Guide_11_Consumes_Basics/index.html @@ -0,0 +1,90 @@ + + + + + +@consumes basics | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

@consumes basics

You can use @consumes decorator to consume messages from Kafka topics.

In this guide we will create a simple FastKafka app that will consume +HelloWorld messages from hello_world topic.

Import FastKafka

To use the @consumes decorator, first we need to import the base +FastKafka app to create our application.

from fastkafka import FastKafka

In this demo we will log the messages to the output so that we can +inspect and verify that our app is consuming properly. For that we need +to import the logger.

from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

Define the structure of the messages

Next, you need to define the structure of the messages you want to +consume from the topic using pydantic. For +the guide we’ll stick to something basic, but you are free to define any +complex message structure you wish in your project, just make sure it +can be JSON encoded.

Let’s import BaseModel and Field from pydantic and create a simple +HelloWorld class containing one string parameter msg

from pydantic import BaseModel, Field
class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

Create a base FastKafka app

Now we will create and define a base FastKafka app, replace the +<url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values of your +Kafka bootstrap server

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

Create a consumer function and decorate it with @consumes

Let’s create a consumer function that will consume HelloWorld messages +from hello_world topic and log them.

@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

The function decorated with the @consumes decorator will be called +when a message is produced to Kafka.

The message will then be injected into the typed msg argument of the +function and its type will be used to parse the message.

In this example case, when the message is sent into a hello_world +topic, it will be parsed into a HelloWorld class and on_hello_world +function will be called with the parsed class as msg argument value.

Final app

Your app code should look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)
@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Run the app

Now we can run the app. Copy the code above in consumer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app

After running the command, you should see this output in your terminal:

[14442]: 23-06-15 07:16:00.564 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[14442]: 23-06-15 07:16:00.564 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}
[14442]: 23-06-15 07:16:00.577 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[14442]: 23-06-15 07:16:00.577 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})
[14442]: 23-06-15 07:16:00.577 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}
[14442]: 23-06-15 07:16:00.577 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[14442]: 23-06-15 07:16:00.585 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}.
Starting process cleanup, this may take a few seconds...
23-06-15 07:16:04.626 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 14442...
[14442]: 23-06-15 07:16:05.735 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[14442]: 23-06-15 07:16:05.735 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-15 07:16:05.853 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 14442 terminated.

Send the message to kafka topic

Lets send a HelloWorld message to the hello_world topic and check if +our consumer kafka application has logged the received message. In your +terminal, run:

echo { \"msg\": \"Hello world\" } | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>
[15588]: 23-06-15 07:16:15.282 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[15588]: 23-06-15 07:16:15.282 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}
[15588]: 23-06-15 07:16:15.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[15588]: 23-06-15 07:16:15.294 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})
[15588]: 23-06-15 07:16:15.295 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}
[15588]: 23-06-15 07:16:15.295 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[15588]: 23-06-15 07:16:15.302 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}.
[15588]: 23-06-15 07:16:25.867 [INFO] consumer_example: Got msg: msg='Hello world'
Starting process cleanup, this may take a few seconds...
23-06-15 07:16:34.168 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 15588...
[15588]: 23-06-15 07:16:35.358 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[15588]: 23-06-15 07:16:35.359 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-15 07:16:35.475 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 15588 terminated.

You should see the “Got msg: msg='Hello world'" being logged by your +consumer.

Choosing a topic

You probably noticed that you didn’t define which topic you are +receiving the message from, this is because the @consumes decorator +determines the topic by default from your function name. The decorator +will take your function name and strip the default “on_" prefix from it +and use the rest as the topic name. In this example case, the topic is +hello_world.

You can choose your custom prefix by defining the prefix parameter in +consumes decorator, like this:

@app.consumes(prefix="read_from_")
async def read_from_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Also, you can define the topic name completely by defining the topic +in parameter in consumes decorator, like this:

@app.consumes(topic="my_special_topic")
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Message data

The message received from kafka is translated from binary JSON +representation int the class defined by typing of msg parameter in the +function decorated by the @consumes decorator.

In this example case, the message will be parsed into a HelloWorld +class.

Message metadata

If you need any of Kafka message metadata such as timestamp, partition +or headers you can access the metadata by adding a EventMetadata typed +argument to your consumes function and the metadata from the incoming +message will be automatically injected when calling the consumes +function.

Let’s demonstrate that.

Create a consumer function with metadata

The only difference from the original basic consume function is that we +are now passing the meta: EventMetadata argument to the function. The +@consumes decorator will register that and, when a message is +consumed, it will also pass the metadata to your function. Now you can +use the metadata in your consume function. Lets log it to see what it +contains.

First, we need to import the EventMetadata

from fastkafka import EventMetadata

Now we can add the meta argument to our consuming function.

@app.consumes()
async def on_hello_world(msg: HelloWorld, meta: EventMetadata):
logger.info(f"Got metadata: {meta}")

Your final app should look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field
from fastkafka import EventMetadata
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)
class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)
kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)
@app.consumes()
async def on_hello_world(msg: HelloWorld, meta: EventMetadata):
logger.info(f"Got metadata: {meta}")

Now lets run the app and send a message to the broker to see the logged +message metadata.

You should see a similar log as the one below and the metadata being +logged in your app.

[20050]: 23-06-15 07:18:55.661 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[20050]: 23-06-15 07:18:55.661 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}
[20050]: 23-06-15 07:18:55.675 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[20050]: 23-06-15 07:18:55.675 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})
[20050]: 23-06-15 07:18:55.675 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}
[20050]: 23-06-15 07:18:55.675 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[20050]: 23-06-15 07:18:55.682 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}.
[20050]: 23-06-15 07:19:06.337 [INFO] consumer_example: Got metadata: EventMetadata(topic='hello_world', partition=0, offset=0, timestamp=1686813546255, timestamp_type=0, key=None, value=b'{ "msg": "Hello world" }', checksum=None, serialized_key_size=-1, serialized_value_size=24, headers=())
Starting process cleanup, this may take a few seconds...
23-06-15 07:19:14.547 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 20050...
[20050]: 23-06-15 07:19:15.630 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[20050]: 23-06-15 07:19:15.630 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-15 07:19:15.742 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 20050 terminated.

As you can see in the log, from the metadata you now have the +information about the partition, offset, timestamp, key and headers. +🎉

Dealing with high latency consuming functions

If your functions have high latency due to, for example, lengthy +database calls you will notice a big decrease in performance. This is +due to the issue of how the consumes decorator executes your consume +functions when consuming events. By default, the consume function will +run the consuming funtions for one topic sequentially, this is the most +straightforward approach and results with the least amount of overhead.

But, to handle those high latency tasks and run them in parallel, +FastKafka has a +DynamicTaskExecutor +prepared for your consumers. This executor comes with additional +overhead, so use it only when you need to handle high latency functions.

Lets demonstrate how to use it.

To your consumes decorator, add an executor option and set it to +"DynamicTaskExecutor", this will enable the consumer to handle high +latency functions effectively.

Your consuming function should now look like this:

@app.consumes(executor="DynamicTaskExecutor")
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

And the complete app should now look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.consumes(executor="DynamicTaskExecutor")
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

You can now run your app using the CLI commands described in this guide.

Lets send a HelloWorld message to the hello_world topic and check if +our consumer kafka application has logged the received message. In your +terminal, run:

echo { \"msg\": \"Hello world\" } | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>

You should see a similar log as the one below.

[21539]: 23-06-15 07:19:25.135 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[21539]: 23-06-15 07:19:25.135 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}
[21539]: 23-06-15 07:19:25.147 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[21539]: 23-06-15 07:19:25.147 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})
[21539]: 23-06-15 07:19:25.147 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}
[21539]: 23-06-15 07:19:25.147 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[21539]: 23-06-15 07:19:25.154 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}.
[21539]: 23-06-15 07:19:35.512 [INFO] consumer_example: Got msg: msg='Hello world'
Starting process cleanup, this may take a few seconds...
23-06-15 07:19:44.023 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 21539...
[21539]: 23-06-15 07:19:45.202 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[21539]: 23-06-15 07:19:45.203 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-15 07:19:45.313 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 21539 terminated.

Inside the log, you should see the “Got msg: msg='Hello world'" being +logged by your consumer.

+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_12_Batch_Consuming/index.html b/docs/guides/Guide_12_Batch_Consuming/index.html new file mode 100644 index 0000000..e7b9efa --- /dev/null +++ b/docs/guides/Guide_12_Batch_Consuming/index.html @@ -0,0 +1,47 @@ + + + + + +Batch consuming | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Batch consuming

If you want to consume data in batches @consumes decorator makes that +possible for you. By typing a consumed msg object as a list of +messages the consumer will call your consuming function with a batch of +messages consumed from a single partition. Let’s demonstrate that now.

Consume function with batching

To consume messages in batches, you need to wrap you message type into a +list and the @consumes decorator will take care of the rest for you. +Your consumes function will be called with batches grouped by partition +now.

@app.consumes(auto_offset_reset="earliest")
async def on_hello_world(msg: List[HelloWorld]):
logger.info(f"Got msg batch: {msg}")

App example

We will modify the app example from @consumes +basics guide to consume +HelloWorld messages batch. The final app will look like this (make +sure you replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


import asyncio
from typing import List
from pydantic import BaseModel, Field

from fastkafka import FastKafka
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.consumes(auto_offset_reset="earliest")
async def on_hello_world(msg: List[HelloWorld]):
logger.info(f"Got msg batch: {msg}")

Send the messages to kafka topic

Lets send a couple of HelloWorld messages to the hello_world topic +and check if our consumer kafka application has logged the received +messages batch. In your terminal, run the following command at least two +times to create multiple messages in your kafka queue:

echo { ^"msg^": ^"Hello world^" }
echo { ^"msg^": ^"Hello world^" } | kafka-console-producer.bat --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>

Now we can run the app. Copy the code of the example app in +consumer_example.py and run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app

You should see the your Kafka messages being logged in batches by your +consumer.

+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_21_Produces_Basics/index.html b/docs/guides/Guide_21_Produces_Basics/index.html new file mode 100644 index 0000000..cc4bd39 --- /dev/null +++ b/docs/guides/Guide_21_Produces_Basics/index.html @@ -0,0 +1,62 @@ + + + + + +@produces basics | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

@produces basics

You can use @produces decorator to produce messages to Kafka topics.

In this guide we will create a simple FastKafka app that will produce +hello world messages to hello_world topic.

Import FastKafka

To use the @produces decorator, frist we need to import the base +FastKafka app to create our application.

from fastkafka import FastKafka

Define the structure of the messages

Next, you need to define the structure of the messages you want to send +to the topic using pydantic. For the guide +we’ll stick to something basic, but you are free to define any complex +message structure you wish in your project, just make sure it can be +JSON encoded.

Let’s import BaseModel and Field from pydantic and create a simple +HelloWorld class containing one string parameter msg

from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

Create a base FastKafka app

Now we will create and define a base FastKafka app, replace the +<url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values of your +Kafka bootstrap server



kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

Create a producer function and decorate it with @produces

Let’s create a producer function that will produce HelloWorld messages +to hello_world topic:


@app.produces()
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Now you can call your defined function as any normal python function in +your code. The side effect of calling the function will be that the +value you are returning will also be sent to a kafka topic.

By default, the topic is determined from your function name, the “to_" +prefix is stripped and what is left over is used as a topic name. I this +case, that is hello_world.

Instruct the app to start sending HelloWorld messages

Let’s use @run_in_background decorator to instruct our app to send +HelloWorld messages to hello_world topic every second.


import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Final app

Your app code should look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)


kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.produces()
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Run the app

script_file = "producer_example.py"
cmd = "fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app"
md(
f"Now we can run the app. Copy the code above in producer_example.py and run it by running\n```shell\n{cmd}\n```"
)

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app

After running the command, you should see this output in your terminal:

[84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization
[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.

Check if the message was sent to the Kafka topic

Lets check the topic and see if there is a “Hello world!" message in the +hello_world topic. In your terminal run:

kafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>

You should see the {“msg": “Hello world!"} messages in your topic.

Choosing a topic

You probably noticed that you didn’t define which topic you are sending +the message to, this is because the @produces decorator determines the +topic by default from your function name. The decorator will take your +function name and strip the default “to_" prefix from it and use the +rest as the topic name. In this example case, the topic is +hello_world.

!!! warn "New topics"

Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.

You can choose your custom prefix by defining the prefix parameter in +produces decorator, like this:


@app.produces(prefix="send_to_")
async def send_to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Also, you can define the topic name completely by defining the topic +in parameter in produces decorator, like this:


@app.produces(topic="my_special_topic")
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Message data

The return value from your function will be translated JSON string and +then to bytes and sent to defined Kafka topic. The typing of the return +value is used for generating the documentation for your Kafka app.

In this example case, the return value is HelloWorld class which will be +translated into JSON formatted string and then to bytes. The translated +data will then be sent to Kafka. In the from of: +b'{"msg":"Hello world!"}'

+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_22_Partition_Keys/index.html b/docs/guides/Guide_22_Partition_Keys/index.html new file mode 100644 index 0000000..b21ef75 --- /dev/null +++ b/docs/guides/Guide_22_Partition_Keys/index.html @@ -0,0 +1,55 @@ + + + + + +Defining a partition key | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Defining a partition key

Partition keys are used in Apache Kafka to determine which partition a +message should be written to. This ensures that related messages are +kept together in the same partition, which can be useful for ensuring +order or for grouping related messages together for efficient +processing. Additionally, partitioning data across multiple partitions +allows Kafka to distribute load across multiple brokers and scale +horizontally, while replicating data across multiple brokers provides +fault tolerance.

You can define your partition keys when using the @produces decorator, +this guide will demonstrate to you this feature.

Return a key from the producing function

To define a key for the message that you want to produce to Kafka topic, +you need to wrap the response into +KafkaEvent +class and set the key value. Check the example below:


from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:
return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")

In the example, we want to return the HelloWorld message class with +the key defined as my_key. So, we wrap the message and key into a +KafkaEvent class and return it as such.

While generating the documentation, the +KafkaEvent +class will be unwrapped and the HelloWorld class will be documented in +the definition of message type, same way if you didn’t use the key.

!!! info "Which key to choose?"

Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.

App example

We will modify the app example from @producer basics guide to return +the HelloWorld with our key. The final app will look like this (make +sure you replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:
return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")

import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Run the app

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app

After running the command, you should see this output in your terminal:

[347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.

Check if the message was sent to the Kafka topic with the desired key

Lets check the topic and see if there is a “Hello world!" message in the +hello_world topic with the defined key. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the my_key {“msg": “Hello world!"} messages in your +topic appearing, the my_key part of the message is the key that we +defined in our producing function.

+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_23_Batch_Producing/index.html b/docs/guides/Guide_23_Batch_Producing/index.html new file mode 100644 index 0000000..ecbc73c --- /dev/null +++ b/docs/guides/Guide_23_Batch_Producing/index.html @@ -0,0 +1,55 @@ + + + + + +Batch producing | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Batch producing

If you want to send your data in batches @produces decorator makes +that possible for you. By returning a list of messages you want to +send in a batch the producer will collect the messages and send them in +a batch to a Kafka broker.

This guide will demonstrate how to use this feature.

Return a batch from the producing function

To define a batch that you want to produce to Kafka topic, you need to +return the List of the messages that you want to be batched from your +producing function.


from typing import List

@app.produces()
async def to_hello_world(msgs: List[str]) -> List[HelloWorld]:
return [HelloWorld(msg=msg) for msg in msgs]

In the example, we want to return the HelloWorld message class batch +that is created from a list of msgs we passed into our producing +function.

Lets also prepare a backgound task that will send a batch of “hello +world" messages when the app starts.


@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

App example

We will modify the app example from @producer +basics guide to return the +HelloWorld batch. The final app will look like this (make sure you +replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


import asyncio
from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

from typing import List

@app.produces()
async def to_hello_world(msgs: List[str]) -> List[HelloWorld]:
return [HelloWorld(msg=msg) for msg in msgs]

Run the app

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app

After running the command, you should see this output in your terminal:

[46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task
[46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.

Check if the batch was sent to the Kafka topic with the defined key

Lets check the topic and see if there are “Hello world" messages in the +hello_world topic. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the batch of messages in your topic.

Batch key

To define a key for your batch like in Defining a partition +key guide you can wrap the +returning value in a +KafkaEvent +class. To learn more about defining a partition ke and +KafkaEvent +class, please, have a look at Defining a partition +key guide.

Let’s demonstrate that.

To define a key, we just need to modify our producing function, like +this:


from typing import List
from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:
return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")

Now our app looks like this:


import asyncio
from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

from typing import List
from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:
return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")

Check if the batch was sent to the Kafka topic

Lets check the topic and see if there are “Hello world" messages in the +hello_world topic, containing a defined key. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the batch of messages with the defined key in your topic.

+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html b/docs/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html new file mode 100644 index 0000000..3f62cf9 --- /dev/null +++ b/docs/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html @@ -0,0 +1,155 @@ + + + + + +Using multiple Kafka clusters | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Using multiple Kafka clusters

Ready to take your FastKafka app to the next level? This guide shows you +how to connect to multiple Kafka clusters effortlessly. Consolidate +topics and produce messages across clusters like a pro. Unleash the full +potential of your Kafka-powered app with FastKafka. Let’s dive in and +elevate your application’s capabilities!

Test message

To showcase the functionalities of FastKafka and illustrate the concepts +discussed, we can use a simple test message called TestMsg. Here’s the +definition of the TestMsg class:

class TestMsg(BaseModel):
msg: str = Field(...)

Defining multiple broker configurations

When building a FastKafka application, you may need to consume messages +from multiple Kafka clusters, each with its own set of broker +configurations. FastKafka provides the flexibility to define different +broker clusters using the brokers argument in the consumes decorator. +Let’s explore an example code snippet

from pydantic import BaseModel, Field

from fastkafka import FastKafka


class TestMsg(BaseModel):
msg: str = Field(...)


kafka_brokers_1 = dict(
development=dict(url="dev.server_1", port=9092),
production=dict(url="prod.server_1", port=9092),
)
kafka_brokers_2 = dict(
development=dict(url="dev.server_2", port=9092),
production=dict(url="prod.server_1", port=9092),
)

app = FastKafka(kafka_brokers=kafka_brokers_1, bootstrap_servers_id="development")


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals_1(msg: TestMsg):
print(f"Received on s1: {msg=}")
await to_predictions_1(msg)


@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)
async def on_preprocessed_signals_2(msg: TestMsg):
print(f"Received on s2: {msg=}")
await to_predictions_2(msg)


@app.produces(topic="predictions")
async def to_predictions_1(msg: TestMsg) -> TestMsg:
return msg


@app.produces(topic="predictions", brokers=kafka_brokers_2)
async def to_predictions_2(msg: TestMsg) -> TestMsg:
return msg

In this example, the application has two consumes endpoints, both of +which will consume events from preprocessed_signals topic. +on_preprocessed_signals_1 will consume events from kafka_brokers_1 +configuration and on_preprocessed_signals_2 will consume events from +kafka_brokers_2 configuration. When producing, to_predictions_1 will +produce to predictions topic on kafka_brokers_1 cluster and +to_predictions_2 will produce to predictions topic on +kafka_brokers_2 cluster.

How it works

The kafka_brokers_1 configuration represents the primary cluster, +while kafka_brokers_2 serves as an alternative cluster specified in +the decorator.

Using the FastKafka class, the app object is initialized with the +primary broker configuration (kafka_brokers_1). By default, the +@app.consumes decorator without the brokers argument consumes messages +from the preprocessed_signals topic on kafka_brokers_1.

To consume messages from a different cluster, the @app.consumes +decorator includes the brokers argument. This allows explicit +specification of the broker cluster in the on_preprocessed_signals_2 +function, enabling consumption from the same topic but using the +kafka_brokers_2 configuration.

The brokers argument can also be used in the @app.produces decorator to +define multiple broker clusters for message production.

It’s important to ensure that all broker configurations have the same +required settings as the primary cluster to ensure consistent behavior.

Testing the application

To test our FastKafka ‘mirroring’ application, we can use our testing +framework. Lets take a look how it’s done:

from fastkafka.testing import Tester

async with Tester(app) as tester:
# Send TestMsg to topic/broker pair on_preprocessed_signals_1 is consuming from
await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal_s1"))
# Assert on_preprocessed_signals_1 consumed sent message
await app.awaited_mocks.on_preprocessed_signals_1.assert_called_with(
TestMsg(msg="signal_s1"), timeout=5
)
# Assert app has produced a prediction
await tester.mirrors[app.to_predictions_1].assert_called_with(
TestMsg(msg="signal_s1"), timeout=5
)

# Send TestMsg to topic/broker pair on_preprocessed_signals_2 is consuming from
await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal_s2"))
# Assert on_preprocessed_signals_2 consumed sent message
await app.awaited_mocks.on_preprocessed_signals_2.assert_called_with(
TestMsg(msg="signal_s2"), timeout=5
)
# Assert app has produced a prediction
await tester.mirrors[app.to_predictions_2].assert_called_with(
TestMsg(msg="signal_s2"), timeout=5
)
23-06-23 12:15:51.156 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-06-23 12:15:51.157 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-06-23 12:15:51.157 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'
23-06-23 12:15:51.158 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:15:51.158 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'
23-06-23 12:15:51.159 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:15:51.178 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'
23-06-23 12:15:51.178 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:15:51.179 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'
23-06-23 12:15:51.180 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:15:51.180 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:15:51.180 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}
23-06-23 12:15:51.181 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:15:51.181 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:15:51.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:15:51.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-06-23 12:15:51.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:15:51.186 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:15:51.187 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}
23-06-23 12:15:51.187 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:15:51.188 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:15:51.188 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:15:51.189 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-06-23 12:15:51.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:15:51.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:15:51.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}
23-06-23 12:15:51.190 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:15:51.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:15:51.191 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:15:51.191 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-06-23 12:15:51.191 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:15:51.192 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:15:51.192 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}
23-06-23 12:15:51.193 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:15:51.193 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:15:51.193 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:15:51.194 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-06-23 12:15:51.194 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
Received on s1: msg=TestMsg(msg='signal_s1')
Received on s2: msg=TestMsg(msg='signal_s2')
23-06-23 12:15:56.181 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:15:56.181 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:15:56.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:15:56.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:15:56.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:15:56.183 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:15:56.183 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:15:56.183 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:15:56.184 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:15:56.184 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:15:56.185 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:15:56.185 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:15:56.185 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:15:56.186 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:15:56.186 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:15:56.186 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:15:56.188 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

The usage of the tester.mirrors dictionary allows specifying the +desired topic/broker combination for sending the test messages, +especially when working with multiple Kafka clusters. This ensures that +the data is sent to the appropriate topic/broker based on the consuming +function, and consumed from appropriate topic/broker based on the +producing function.

Running the application

You can run your application using fastkafka run CLI command in the +same way that you would run a single cluster app.

To start your app, copy the code above in multi_cluster_example.py and +run it by running:

Now we can run the app. Copy the code above in multi_cluster_example.py, +adjust your server configurations, and run it by running

fastkafka run --num-workers=1 --kafka-broker=development multi_cluster_example:app

In your app logs, you should see your app starting up and your two +consumer functions connecting to different kafka clusters.

[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:24092'}
[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:24093'}
[182747]: 23-06-23 12:16:14.131 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[182747]: 23-06-23 12:16:14.131 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})
[182747]: 23-06-23 12:16:14.131 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}
[182747]: 23-06-23 12:16:14.131 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[182747]: 23-06-23 12:16:14.136 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[182747]: 23-06-23 12:16:14.136 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})
[182747]: 23-06-23 12:16:14.136 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}
[182747]: 23-06-23 12:16:14.136 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[182747]: 23-06-23 12:16:14.141 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}.
[182747]: 23-06-23 12:16:14.141 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}.
Starting process cleanup, this may take a few seconds...
23-06-23 12:16:18.294 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 182747...
[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:19.471 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 182747 terminated.

Application documentation

At the moment the documentation for multicluster app is not yet +implemented, but it is under development and you can expecti it soon!

Examples on how to use multiple broker configurations

Example #1

In this section, we’ll explore how you can effectively forward topics +between different Kafka clusters, enabling seamless data synchronization +for your applications.

Imagine having two Kafka clusters, namely kafka_brokers_1 and +kafka_brokers_2, each hosting its own set of topics and messages. Now, +if you want to forward a specific topic (in this case: +preprocessed_signals) from kafka_brokers_1 to kafka_brokers_2, +FastKafka provides an elegant solution.

Let’s examine the code snippet that configures our application for topic +forwarding:

from pydantic import BaseModel, Field

from fastkafka import FastKafka

class TestMsg(BaseModel):
msg: str = Field(...)

kafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))
kafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))

app = FastKafka(kafka_brokers=kafka_brokers_1)


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals_original(msg: TestMsg):
await to_preprocessed_signals_forward(msg)


@app.produces(topic="preprocessed_signals", brokers=kafka_brokers_2)
async def to_preprocessed_signals_forward(data: TestMsg) -> TestMsg:
return data

Here’s how it works: our FastKafka application is configured to consume +messages from kafka_brokers_1 and process them in the +on_preprocessed_signals_original function. We want to forward these +messages to kafka_brokers_2. To achieve this, we define the +to_preprocessed_signals_forward function as a producer, seamlessly +producing the processed messages to the preprocessed_signals topic +within the kafka_brokers_2 cluster.

Testing

To test our FastKafka forwarding application, we can use our testing +framework. Let’s take a look at the testing code snippet:

from fastkafka.testing import Tester

async with Tester(app) as tester:
await tester.mirrors[app.on_preprocessed_signals_original](TestMsg(msg="signal"))
await tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)
23-06-23 12:16:31.689 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-06-23 12:16:31.690 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-06-23 12:16:31.691 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'
23-06-23 12:16:31.691 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:31.701 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-06-23 12:16:31.702 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:31.702 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:31.703 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-06-23 12:16:31.703 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:31.704 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:31.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:31.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-06-23 12:16:31.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:16:31.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:31.707 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}
23-06-23 12:16:31.707 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:31.708 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:31.708 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:31.709 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-06-23 12:16:31.709 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:16:35.703 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:35.703 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:35.704 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:35.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:35.705 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:35.705 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:35.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:35.707 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:35.707 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

With the help of the Tester object, we can simulate and verify the +behavior of our FastKafka application. Here’s how it works:

  1. We create an instance of the Tester by passing in our app +object, which represents our FastKafka application.

  2. Using the tester.mirrors dictionary, we can send a message to a +specific Kafka broker and topic combination. In this case, we use +tester.mirrors[app.on_preprocessed_signals_original] to send a +TestMsg message with the content “signal" to the appropriate Kafka +broker and topic.

  3. After sending the message, we can perform assertions on the mirrored +function using +tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5). +This assertion ensures that the mirrored function has been called +within a specified timeout period (in this case, 5 seconds).

Example #2

In this section, we’ll explore how you can effortlessly consume data +from multiple sources, process it, and aggregate the results into a +single topic on a specific cluster.

Imagine you have two Kafka clusters: kafka_brokers_1 and +kafka_brokers_2, each hosting its own set of topics and messages. +Now, what if you want to consume data from both clusters, perform some +processing, and produce the results to a single topic on +kafka_brokers_1? FastKafka has got you covered!

Let’s take a look at the code snippet that configures our application +for aggregating multiple clusters:

from pydantic import BaseModel, Field

from fastkafka import FastKafka

class TestMsg(BaseModel):
msg: str = Field(...)

kafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))
kafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))

app = FastKafka(kafka_brokers=kafka_brokers_1)


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals_1(msg: TestMsg):
print(f"Default: {msg=}")
await to_predictions(msg)


@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)
async def on_preprocessed_signals_2(msg: TestMsg):
print(f"Specified: {msg=}")
await to_predictions(msg)


@app.produces(topic="predictions")
async def to_predictions(prediction: TestMsg) -> TestMsg:
print(f"Sending prediction: {prediction}")
return [prediction]

Here’s the idea: our FastKafka application is set to consume messages +from the topic “preprocessed_signals" on kafka_brokers_1 cluster, as +well as from the same topic on kafka_brokers_2 cluster. We have two +consuming functions, on_preprocessed_signals_1 and +on_preprocessed_signals_2, that handle the messages from their +respective clusters. These functions perform any required processing, in +this case, just calling the to_predictions function.

The exciting part is that the to_predictions function acts as a +producer, sending the processed results to the “predictions" topic on +kafka_brokers_1 cluster. By doing so, we effectively aggregate the +data from multiple sources into a single topic on a specific cluster.

This approach enables you to consume data from multiple Kafka clusters, +process it, and produce the aggregated results to a designated topic. +Whether you’re generating predictions, performing aggregations, or any +other form of data processing, FastKafka empowers you to harness the +full potential of multiple clusters.

Testing

Let’s take a look at the testing code snippet:

from fastkafka.testing import Tester

async with Tester(app) as tester:
await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal"))
await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal"))
await tester.on_predictions.assert_called(timeout=5)
23-06-23 12:16:41.222 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-06-23 12:16:41.223 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-06-23 12:16:41.224 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-06-23 12:16:41.224 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:41.239 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-06-23 12:16:41.239 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:41.240 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'
23-06-23 12:16:41.240 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:41.241 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:41.241 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-06-23 12:16:41.241 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:41.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:41.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:41.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-06-23 12:16:41.243 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:16:41.243 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:41.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}
23-06-23 12:16:41.245 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:41.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:41.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:41.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-06-23 12:16:41.247 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:16:41.247 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:41.248 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-06-23 12:16:41.248 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:41.249 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:41.249 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:41.249 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-06-23 12:16:41.249 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
Default: msg=TestMsg(msg='signal')
Sending prediction: msg='signal'
Specified: msg=TestMsg(msg='signal')
Sending prediction: msg='signal'
23-06-23 12:16:45.241 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:45.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:45.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:45.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:45.243 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:45.243 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:45.244 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:45.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:45.245 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:45.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:45.246 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:45.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:45.247 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Here’s how the code above works:

  1. Within an async with block, create an instance of the Tester by +passing in your app object, representing your FastKafka application.

  2. Using the tester.mirrors dictionary, you can send messages to +specific Kafka broker and topic combinations. In this case, we use +tester.mirrors[app.on_preprocessed_signals_1] and +tester.mirrors[app.on_preprocessed_signals_2] to send TestMsg +messages with the content “signal" to the corresponding Kafka broker +and topic combinations.

  3. After sending the messages, you can perform assertions on the +on_predictions function using +tester.on_predictions.assert_called(timeout=5). This assertion +ensures that the on_predictions function has been called within a +specified timeout period (in this case, 5 seconds).

Example #3

In some scenarios, you may need to produce messages to multiple Kafka +clusters simultaneously. FastKafka simplifies this process by allowing +you to configure your application to produce messages to multiple +clusters effortlessly. Let’s explore how you can achieve this:

Consider the following code snippet that demonstrates producing messages +to multiple clusters:

from pydantic import BaseModel, Field

from fastkafka import FastKafka

class TestMsg(BaseModel):
msg: str = Field(...)

kafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))
kafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))

app = FastKafka(kafka_brokers=kafka_brokers_1)


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals(msg: TestMsg):
print(f"{msg=}")
await to_predictions_1(TestMsg(msg="prediction"))
await to_predictions_2(TestMsg(msg="prediction"))


@app.produces(topic="predictions")
async def to_predictions_1(prediction: TestMsg) -> TestMsg:
print(f"Sending prediction to s1: {prediction}")
return [prediction]


@app.produces(topic="predictions", brokers=kafka_brokers_2)
async def to_predictions_2(prediction: TestMsg) -> TestMsg:
print(f"Sending prediction to s2: {prediction}")
return [prediction]

Here’s what you need to know about producing to multiple clusters:

  1. We define two Kafka broker configurations: kafka_brokers_1 and +kafka_brokers_2, representing different clusters with their +respective connection details.

  2. We create an instance of the FastKafka application, specifying +kafka_brokers_1 as the primary cluster for producing messages.

  3. The on_preprocessed_signals function serves as a consumer, +handling incoming messages from the “preprocessed_signals" topic. +Within this function, we invoke two producer functions: +to_predictions_1 and to_predictions_2.

  4. The to_predictions_1 function sends predictions to the +“predictions" topic on kafka_brokers_1 cluster.

  5. Additionally, the to_predictions_2 function sends the same +predictions to the “predictions" topic on kafka_brokers_2 cluster. +This allows for producing the same data to multiple clusters +simultaneously.

By utilizing this approach, you can seamlessly produce messages to +multiple Kafka clusters, enabling you to distribute data across +different environments or leverage the strengths of various clusters.

Feel free to customize the producer functions as per your requirements, +performing any necessary data transformations or enrichment before +sending the predictions.

With FastKafka, producing to multiple clusters becomes a breeze, +empowering you to harness the capabilities of multiple environments +effortlessly.

Testing

Let’s take a look at the testing code snippet:

from fastkafka.testing import Tester

async with Tester(app) as tester:
await tester.to_preprocessed_signals(TestMsg(msg="signal"))
await tester.mirrors[to_predictions_1].assert_called(timeout=5)
await tester.mirrors[to_predictions_2].assert_called(timeout=5)
23-06-23 12:16:49.903 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-06-23 12:16:49.904 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-06-23 12:16:49.904 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-06-23 12:16:49.905 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:49.905 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'
23-06-23 12:16:49.906 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:49.921 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-06-23 12:16:49.921 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:49.921 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:49.922 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-06-23 12:16:49.922 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:49.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:49.923 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:49.924 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-06-23 12:16:49.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:16:49.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:49.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-06-23 12:16:49.925 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:49.926 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:49.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:49.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-06-23 12:16:49.927 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:16:49.927 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:49.928 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}
23-06-23 12:16:49.928 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:49.928 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:49.929 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:49.929 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-06-23 12:16:49.929 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
msg=TestMsg(msg='signal')
Sending prediction to s1: msg='prediction'
Sending prediction to s2: msg='prediction'
23-06-23 12:16:53.922 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:53.922 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:53.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:53.923 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:53.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:53.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:53.924 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:53.925 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:53.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:53.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Here’s how you can perform the necessary tests:

  1. Within an async with block, create an instance of the Tester by +passing in your app object, representing your FastKafka application.

  2. Using the tester.to_preprocessed_signals method, you can send a +TestMsg message with the content “signal".

  3. After sending the message, you can perform assertions on the +to_predictions_1 and to_predictions_2 functions using +tester.mirrors[to_predictions_1].assert_called(timeout=5) and +tester.mirrors[to_predictions_2].assert_called(timeout=5). These +assertions ensure that the respective producer functions have +produced data to their respective topic/broker combinations.

By employing this testing approach, you can verify that the producing +functions correctly send messages to their respective clusters. The +testing framework provided by FastKafka enables you to ensure the +accuracy and reliability of your application’s producing logic.

+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html b/docs/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html new file mode 100644 index 0000000..2b4fc89 --- /dev/null +++ b/docs/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html @@ -0,0 +1,73 @@ + + + + + +Deploying FastKafka using Docker | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Deploying FastKafka using Docker

Building a Docker Image

To build a Docker image for a FastKafka project, we need the following +items:

  1. A library that is built using FastKafka.
  2. A file in which the requirements are specified. This could be a +requirements.txt file, a setup.py file, or even a wheel file.
  3. A Dockerfile to build an image that will include the two files +mentioned above.

Creating FastKafka Code

Let’s create a +FastKafka-based +application and write it to the application.py file based on the +tutorial.

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Creating requirements.txt file

The above code only requires FastKafka. So, we will add only that to the +requirements.txt file, but you can add additional requirements to it +as well.

fastkafka>=0.3.0

Here we are using requirements.txt to store the project’s +dependencies. However, other methods like setup.py, pipenv, and +wheel files can also be used. setup.py is commonly used for +packaging and distributing Python modules, while pipenv is a tool used +for managing virtual environments and package dependencies. wheel +files are built distributions of Python packages that can be installed +with pip.

Creating Dockerfile

# (1)
FROM python:3.9-slim-bullseye
# (2)
WORKDIR /project
# (3)
COPY application.py requirements.txt /project/
# (4)
RUN pip install --no-cache-dir --upgrade -r /project/requirements.txt
# (5)
CMD ["fastkafka", "run", "--num-workers", "2", "--kafka-broker", "production", "application:kafka_app"]
  1. Start from the official Python base image.

  2. Set the current working directory to /project.

    This is where we’ll put the requirements.txt file and the +application.py file.

  3. Copy the application.py file and requirements.txt file inside +the /project directory.

  4. Install the package dependencies in the requirements file.

    The --no-cache-dir option tells pip to not save the downloaded +packages locally, as that is only if pip was going to be run again +to install the same packages, but that’s not the case when working +with containers.

    The --upgrade option tells pip to upgrade the packages if they +are already installed.

  5. Set the command to run the fastkafka run command.

    CMD takes a list of strings, each of these strings is what you +would type in the command line separated by spaces.

    This command will be run from the current working directory, the +same /project directory you set above with WORKDIR /project.

    We supply additional parameters --num-workers and --kafka-broker +for the run command. Finally, we specify the location of our +FastKafka application as a command argument.

    To learn more about fastkafka run command please check the CLI +docs.

Build the Docker Image

Now that all the files are in place, let’s build the container image.

  1. Go to the project directory (where your Dockerfile is, containing +your application.py file).

  2. Run the following command to build the image:

    docker build -t fastkafka_project_image .

    This command will create a docker image with the name +fastkafka_project_image and the latest tag.

That’s it! You have now built a docker image for your FastKafka project.

Start the Docker Container

Run a container based on the built image:

docker run -d --name fastkafka_project_container fastkafka_project_image

Additional Security

Trivy is an open-source tool that scans Docker images for +vulnerabilities. It can be integrated into your CI/CD pipeline to ensure +that your images are secure and free from known vulnerabilities. Here’s +how you can use trivy to scan your fastkafka_project_image:

  1. Install trivy on your local machine by following the instructions +provided in the official trivy +documentation.

  2. Run the following command to scan your fastkafka_project_image:

    trivy image fastkafka_project_image

    This command will scan your fastkafka_project_image for any +vulnerabilities and provide you with a report of its findings.

  3. Fix any vulnerabilities identified by trivy. You can do this by +updating the vulnerable package to a more secure version or by using +a different package altogether.

  4. Rebuild your fastkafka_project_image and repeat steps 2 and 3 +until trivy reports no vulnerabilities.

By using trivy to scan your Docker images, you can ensure that your +containers are secure and free from known vulnerabilities.

Example repo

A +FastKafka +based library which uses above mentioned Dockerfile to build a docker +image can be found +here

+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html b/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html new file mode 100644 index 0000000..d6d93a6 --- /dev/null +++ b/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html @@ -0,0 +1,143 @@ + + + + + +Using Redpanda to test FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Using Redpanda to test FastKafka

What is FastKafka?

FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.

What is Redpanda?

Redpanda is a drop-in replacement for Kafka. Most of the Kafka tools +work out of the box with Redpanda.

From redpanda.com:

Redpanda is a Kafka®-compatible streaming data platform that is proven +to be 10x faster and 6x lower in total costs. It is also JVM-free, +ZooKeeper®-free, Jepsen-tested and source available.

Some of the advantages of Redpanda over Kafka are

  1. A single binary with built-in everything, no ZooKeeper® or JVM +needed.
  2. Costs upto 6X less than Kafka.
  3. Up to 10x lower average latencies and up to 6x faster Kafka +transactions without compromising correctness.

To learn more about Redpanda, please visit their +website or checkout this blog +post +comparing Redpanda and Kafka’s performance benchmarks.

Example repo

A sample FastKafka-based library that uses Redpanda for testing, based +on this guide, can be found +here.

The process

Here are the steps we’ll be walking through to build our example:

  1. Set up the prerequisites.
  2. Clone the example repo.
  3. Explain how to write an application using FastKafka.
  4. Explain how to write a test case to test FastKafka with Redpanda.
  5. Run the test case and produce/consume messages.

1. Prerequisites

Before starting, make sure you have the following prerequisites set up:

  1. Python 3.x: A Python 3.x installation is required to run +FastKafka. You can download the latest version of Python from the +official website. You’ll also +need to have pip installed and updated, which is Python’s package +installer.
  2. Docker Desktop: Docker is used to run Redpanda, which is +required for testing FastKafka. You can download and install Docker +Desktop from the official +website.
  3. Git: You’ll need to have Git installed to clone the example +repo. You can download Git from the official +website.

2. Cloning and setting up the example repo

To get started with the example code, clone the GitHub +repository by +running the following command in your terminal:

git clone https://github.com/airtai/sample_fastkafka_with_redpanda.git
cd sample_fastkafka_with_redpanda

This will create a new directory called sample_fastkafka_with_redpanda +and download all the necessary files.

Create a virtual environment

Before writing any code, let’s create a new virtual +environment +for our project.

A virtual environment is an isolated environment for a Python project, +which allows you to manage project-specific dependencies and avoid +conflicts between different projects.

To create a new virtual environment, run the following commands in your +terminal:

python3 -m venv venv

This will create a new directory called venv in your project +directory, which will contain the virtual environment.

To activate the virtual environment, run the following command:

source venv/bin/activate

This will change your shell’s prompt to indicate that you are now +working inside the virtual environment.

Finally, run the following command to upgrade pip, the Python package +installer:

pip install --upgrade pip

Install Python dependencies

Next, let’s install the required Python dependencies. In this guide, +we’ll be using +FastKafka +to write our application code and pytest and pytest-asyncio to test +it.

You can install the dependencies from the requirements.txt file +provided in the cloned repository by running:

pip install -r requirements.txt

This will install all the required packages and their dependencies.

3. Writing server code

The application.py file in the cloned repository demonstrates how to +use FastKafka to consume messages from a Kafka topic, make predictions +using a predictive model, and publish the predictions to another Kafka +topic. Here is an explanation of the code:

Preparing the demo model

First we will prepare our model using the Iris dataset so that we can +demonstrate the predictions using FastKafka. The following call +downloads the dataset and trains the model.

We will wrap the model creation into a lifespan of our app so that the +model is created just before the app is started.

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines two message classes for use in a FastKafka +application:

  • The IrisInputData class is used to represent input data for a +predictive model. It has four fields of type +NonNegativeFloat, +which is a subclass of float that only allows non-negative floating +point values.

  • The IrisPrediction class is used to represent the output of the +predictive model. It has a single field species of type string +representing the predicted species.

These message classes will be used to parse and validate incoming data +in Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used both +to generate documentation and to later run the server against one of the +given kafka broker.

Next, an instance of the +FastKafka +class is initialized with the minimum required arguments:

  • kafka_brokers: a dictionary used for generating documentation
from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the IrisInputData message class. Specifying the +type of the single argument is instructing the Pydantic to use +IrisInputData.parse_raw() on the consumed message before passing it +to the user defined function on_input_data.

  • The @produces decorator is applied to the to_predictions function, +which specifies that this function should produce a message to the +“predictions" Kafka topic whenever it is called. The to_predictions +function takes a single integer argument species_class representing +one of three possible strign values predicted by the mdoel. It creates +a new IrisPrediction message using this value and then returns it. +The framework will call the IrisPrediction.json().encode("utf-8") +function on the returned value and produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

4. Writing the test code

The service can be tested using the +Tester +instance which can be configured to start a Redpanda +broker for testing +purposes. The test.py file in the cloned repository contains the +following code for testing.

import pytest
from application import IrisInputData, IrisPrediction, kafka_app

from fastkafka.testing import Tester

msg = IrisInputData(
sepal_length=0.1,
sepal_width=0.2,
petal_length=0.3,
petal_width=0.4,
)


@pytest.mark.asyncio
async def test():
# Start Tester app and create local Redpanda broker for testing
async with Tester(kafka_app).using_local_redpanda(
tag="v23.1.2", listener_port=9092
) as tester:
# Send IrisInputData message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with IrisPrediction in predictions topic
await tester.awaited_mocks.on_predictions.assert_awaited_with(
IrisPrediction(species="setosa"), timeout=2
)

The +Tester +module utilizes uses +LocalRedpandaBroker +to start and stop a Redpanda broker for testing purposes using Docker

5. Running the tests

We can run the tests which is in test.py file by executing the +following command:

pytest test.py

This will start a Redpanda broker using Docker and executes tests. The +output of the command is:

(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest
============================== test session starts ===============================
platform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0
rootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py
plugins: asyncio-0.21.0, anyio-3.6.2
asyncio: mode=strict
collected 1 item

test.py . [100%]

=============================== 1 passed in 7.28s ================================
(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$

Running the tests with the Redpanda broker ensures that your code is +working correctly with a real Kafka-like message broker, making your +tests more reliable.

Recap

We have created an Iris classification model and encapulated it into our +FastKafka +application. The app will consume the IrisInputData from the +input_data topic and produce the predictions to predictions topic.

To test the app we have:

  1. Created the app

  2. Started our +Tester +class with Redpanda broker which mirrors the developed app topics +for testing purposes

  3. Sent IrisInputData message to input_data topic

  4. Asserted and checked that the developed iris classification service +has reacted to IrisInputData message

+ + + + \ No newline at end of file diff --git a/docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html b/docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html new file mode 100644 index 0000000..cc069f2 --- /dev/null +++ b/docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html @@ -0,0 +1,78 @@ + + + + + +Using FastAPI to Run FastKafka Application | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

Using FastAPI to Run FastKafka Application

When deploying a FastKafka application, the default approach is to +utilize the fastkafka run CLI +command. This command allows you to launch your FastKafka application as +a standalone service. However, if you already have a FastAPI application +in place and wish to run FastKafka application alongside it, you have an +alternative option.

FastKafka provides a method called +FastKafka.fastapi_lifespan +that leverages FastAPI’s +lifespan +feature. This method allows you to run your FastKafka application +together with your existing FastAPI app, seamlessly integrating their +functionalities. By using the +FastKafka.fastapi_lifespan +method, you can start the FastKafka application within the same process +as the FastAPI app.

The +FastKafka.fastapi_lifespan +method ensures that both FastAPI and FastKafka are initialized and start +working simultaneously. This approach enables the execution of +Kafka-related tasks, such as producing and consuming messages, while +also handling HTTP requests through FastAPI’s routes.

By combining FastAPI and FastKafka in this manner, you can build a +comprehensive application that harnesses the power of both frameworks. +Whether you require real-time messaging capabilities or traditional HTTP +endpoints, this approach allows you to leverage the strengths of FastAPI +and FastKafka within a single deployment setup.

Prerequisites

  1. A basic knowledge of +FastKafka +is needed to proceed with this guide. If you are not familiar with +FastKafka, +please go through the tutorial first.
  2. FastKafka +and FastAPI libraries needs to be installed.

This guide will provide a step-by-step explanation, taking you through +each stage individually, before combining all the components in the +final section for a comprehensive understanding of the process.

1. Basic FastKafka app

In this step, we will begin by creating a simple FastKafka application.

from pydantic import BaseModel, Field, NonNegativeFloat
from typing import *

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Greetings",
kafka_brokers=kafka_brokers,
)


class TestMsg(BaseModel):
msg: str = Field(...)


@kafka_app.consumes()
async def on_names(msg: TestMsg):
await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))


@kafka_app.produces()
async def to_greetings(greeting: TestMsg) -> TestMsg:
return greeting

In the above example, we consume messages from a topic called names, +we prepend “Hello" to the message, and send it back to another topic +called greetings.

We now have a simple +FastKafka +app to produce and consume from two topics.

2. Using fastapi_lifespan method

In this step of the guide, we will explore the integration of a +FastKafka application with a FastAPI application using the +FastKafka.fastapi_lifespan +method. The +FastKafka.fastapi_lifespan +method is a feature provided by FastKafka, which allows you to +seamlessly integrate a FastKafka application with a FastAPI application +by leveraging FastAPI’s lifespan feature.

from fastapi import FastAPI

fastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(kafka_broker_name="localhost"))


@fastapi_app.get("/hello")
async def hello():
return {"msg": "hello there"}

In the above example, a new instance of the FastAPI app is created, +and when the app is started using uvicorn, it also runs the +FastKafka +application concurrently.

Putting it all together

Let’s put the above code together and write it in a file called +fast_apps.py.

# content of the "fast_apps.py" file

from pydantic import BaseModel, Field, NonNegativeFloat
from typing import *

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Greetings",
kafka_brokers=kafka_brokers,
)


class TestMsg(BaseModel):
msg: str = Field(...)


@kafka_app.consumes()
async def on_names(msg: TestMsg):
await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))


@kafka_app.produces()
async def to_greetings(greeting: TestMsg) -> TestMsg:
return greeting


from fastapi import FastAPI

fastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan("localhost"))

@fastapi_app.get("/hello")
async def hello():
return {"msg": "hello there"}

Finally, you can run the FastAPI application using a web server of your +choice, such as Uvicorn or Hypercorn by running the below command:

uvicorn fast_apps:fastapi_app --host=0.0.0.0 --port=8080
+ + + + \ No newline at end of file diff --git a/docs/index.html b/docs/index.html new file mode 100644 index 0000000..99da345 --- /dev/null +++ b/docs/index.html @@ -0,0 +1,121 @@ + + + + + +FastKafka | FastKafka + + + + + + + + + + +
+
Version: 0.8.0

FastKafka

Effortless Kafka integration for your web services

PyPI PyPI -
+Downloads PyPI - Python
+Version

GitHub Workflow
+Status +CodeQL +Dependency
+Review

GitHub


FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.


⭐⭐⭐ Stay in touch ⭐⭐⭐

Please show your support and stay in touch by:

Your support helps us to stay in touch with you and encourages us to +continue developing and improving the library. Thank you for your +support!


🐝🐝🐝 We were busy lately 🐝🐝🐝

Activity

Install

FastKafka works on Windows, macOS, Linux, and most Unix-style operating +systems. You can install base version of FastKafka with pip as usual:

pip install fastkafka

To install FastKafka with testing features please use:

pip install fastkafka[test]

To install FastKafka with asyncapi docs please use:

pip install fastkafka[docs]

To install FastKafka with all the features please use:

pip install fastkafka[test,docs]

Tutorial

You can start an interactive tutorial in Google Colab by clicking the +button below:

Open in Colab

Writing server code

To demonstrate FastKafka simplicity of using @produces and @consumes +decorators, we will focus on a simple app.

The app will consume jsons containig positive floats from one topic, log +them and then produce incremented values to another topic.

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines one Data mesage class. This Class will model the +consumed and produced data in our app demo, it contains one +NonNegativeFloat field data that will be logged and “processed" +before being produced to another topic.

These message class will be used to parse and validate incoming data in +Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class Data(BaseModel):
data: NonNegativeFloat = Field(
..., example=0.5, description="Float data example"
)

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used for +both generating the documentation and later to run the actual server +against one of the given kafka broker.

Next, an object of the +FastKafka +class is initialized with the minimum set of arguments:

  • kafka_brokers: a dictionary used for generation of documentation

We will also import and create a logger so that we can log the incoming +data in our consuming function.

from logging import getLogger
from fastkafka import FastKafka

logger = getLogger("Demo Kafka app")

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Demo Kafka app",
kafka_brokers=kafka_brokers,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the Data message class. Specifying the type of the +single argument is instructing the Pydantic to use Data.parse_raw() +on the consumed message before passing it to the user defined function +on_input_data.

  • The @produces decorator is applied to the to_output_data function, +which specifies that this function should produce a message to the +“output_data" Kafka topic whenever it is called. The to_output_data +function takes a single float argument data. It it increments the +data returns it wrapped in a Data object. The framework will call +the Data.json().encode("utf-8") function on the returned value and +produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: Data):
logger.info(f"Got data: {msg.data}")
await to_output_data(msg.data)


@kafka_app.produces(topic="output_data")
async def to_output_data(data: float) -> Data:
processed_data = Data(data=data+1.0)
return processed_data

Testing the service

The service can be tested using the +Tester +instances which internally starts InMemory implementation of Kafka +broker.

The Tester will redirect your consumes and produces decorated functions +to the InMemory Kafka broker so that you can quickly test your app +without the need for a running Kafka broker and all its dependencies.

from fastkafka.testing import Tester

msg = Data(
data=0.1,
)

# Start Tester app and create InMemory Kafka broker for testing
async with Tester(kafka_app) as tester:
# Send Data message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with incremented data in output_data topic
await tester.awaited_mocks.on_output_data.assert_awaited_with(
Data(data=1.1), timeout=2
)
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] Demo Kafka app: Got data: 0.1
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Recap

We have created a simple FastKafka application. The app will consume the +Data from the input_data topic, log it and produce the incremented +data to output_data topic.

To test the app we have:

  1. Created the app

  2. Started our Tester class which mirrors the developed app topics for +testing purposes

  3. Sent Data message to input_data topic

  4. Asserted and checked that the developed service has reacted to Data +message

Running the service

The service can be started using builtin faskafka run CLI command. +Before we can do that, we will concatenate the code snippets from above +and save them in a file "application.py"

# content of the "application.py" file

from pydantic import BaseModel, Field, NonNegativeFloat

from fastkafka import FastKafka
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class Data(BaseModel):
data: NonNegativeFloat = Field(
..., example=0.5, description="Float data example"
)

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Demo Kafka app",
kafka_brokers=kafka_brokers,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: Data):
logger.info(f"Got data: {msg.data}")
await to_output_data(msg.data)


@kafka_app.produces(topic="output_data")
async def to_output_data(data: float) -> Data:
processed_data = Data(data=data+1.0)
return processed_data

To run the service, use the FastKafka CLI command and pass the module +(in this case, the file where the app implementation is located) and the +app simbol to the command.

fastkafka run --num-workers=1 --kafka-broker localhost application:kafka_app

After running the command, you should see the following output in your +command line:

[1504]: 23-05-31 11:36:45.874 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1504]: 23-05-31 11:36:45.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1506]: 23-05-31 11:36:45.993 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1506]: 23-05-31 11:36:45.994 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1506]: 23-05-31 11:36:46.014 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1506]: 23-05-31 11:36:46.015 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1506]: 23-05-31 11:36:46.040 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1506]: 23-05-31 11:36:46.042 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1506]: 23-05-31 11:36:46.043 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1506]: 23-05-31 11:36:46.043 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1506]: 23-05-31 11:36:46.068 [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata
[1506]: 23-05-31 11:36:46.070 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1504]: 23-05-31 11:36:46.131 [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1504]: 23-05-31 11:36:46.132 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)
[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable to update metadata from [0]
[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)
[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable to update metadata from [0]
[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
Starting process cleanup, this may take a few seconds...
23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1504...
23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1506...
[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-31 11:37:00.607 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1506 was already terminated.
23-05-31 11:37:00.822 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1504 was already terminated.

Documentation

The kafka app comes with builtin documentation generation using +AsyncApi HTML generator.

AsyncApi requires Node.js to be installed and we provide the following +convenience command line for it:

fastkafka docs install_deps
23-05-31 11:38:24.128 [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed

To generate the documentation programatically you just need to call the +following command:

fastkafka docs generate application:kafka_app
23-05-31 11:38:25.113 [INFO] fastkafka._components.asyncapi: Old async specifications at '/content/asyncapi/spec/asyncapi.yml' does not exist.
23-05-31 11:38:25.118 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'
23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /content/asyncapi/docs.

This will generate the asyncapi folder in relative path where all your +documentation will be saved. You can check out the content of it with:

ls -l asyncapi
total 8
drwxr-xr-x 4 root root 4096 May 31 11:38 docs
drwxr-xr-x 2 root root 4096 May 31 11:38 spec

In docs folder you will find the servable static html file of your +documentation. This can also be served using our fastkafka docs serve +CLI command (more on that in our guides).

In spec folder you will find a asyncapi.yml file containing the async +API specification of your application.

We can locally preview the generated documentation by running the +following command:

fastkafka docs serve application:kafka_app
23-05-31 11:38:45.250 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'
23-05-31 11:39:04.410 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
23-05-31 11:39:04.411 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /content/asyncapi/docs.


Serving documentation on http://127.0.0.1:8000
127.0.0.1 - - [31/May/2023 11:39:14] "GET / HTTP/1.1" 200 -
127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/global.min.css HTTP/1.1" 200 -
127.0.0.1 - - [31/May/2023 11:39:14] "GET /js/asyncapi-ui.min.js HTTP/1.1" 200 -
127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/asyncapi.min.css HTTP/1.1" 200 -
Interupting serving of documentation and cleaning up...

From the parameters passed to the application constructor, we get the +documentation bellow:

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Demo Kafka app",
kafka_brokers=kafka_brokers,
)

Kafka_servers

The following documentation snippet are for the consumer as specified in +the code above:

Kafka_consumer

The following documentation snippet are for the producer as specified in +the code above:

Kafka_producer

Finally, all messages as defined as subclasses of BaseModel are +documented as well:

Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)

License

FastKafka is licensed under the Apache License 2.0

A permissive license whose main conditions require preservation of +copyright and license notices. Contributors provide an express grant of +patent rights. Licensed works, modifications, and larger works may be +distributed under different terms and without source code.

The full text of the license can be found +here.

+ + + + \ No newline at end of file diff --git a/docs/next/CHANGELOG/index.html b/docs/next/CHANGELOG/index.html new file mode 100644 index 0000000..c58e9f9 --- /dev/null +++ b/docs/next/CHANGELOG/index.html @@ -0,0 +1,33 @@ + + + + + +Release notes | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Release notes

0.8.0

New Features

  • Add support for Pydantic v2 (#408), thanks to @kumaranvpl
    • FastKafka now uses Pydantic v2 for serialization/deserialization of messages
  • Enable nbdev_test on windows and run CI tests on windows (#356), thanks to @kumaranvpl

Bugs Squashed

  • Fix ´fastkafka testing install deps´ failing (#385), thanks to @Sternakt

  • Create asyncapi docs directory only while building asyncapi docs (#368), thanks to @kumaranvpl

  • Add retries to producer in case of raised KafkaTimeoutError exception (#423), thanks to @Sternakt

0.7.1

Bugs Squashed

  • Limit pydantic version to <2.0 (#427)

  • Fix Kafka broker version installation issues (#427)

  • Fix ApacheKafkaBroker startup issues (#427)

0.7.0

New Features

  • Optional description argument to consumes and produces decorator implemented (#338), thanks to @Sternakt

    • Consumes and produces decorators now have optional description argument that is used instead of function docstring in async doc generation when specified
  • FastKafka Windows OS support enabled (#326), thanks to @kumaranvpl

    • FastKafka can now run on Windows
  • FastKafka and FastAPI integration implemented (#304), thanks to @kumaranvpl

    • FastKafka can now be run alongside FastAPI
  • Batch consuming option to consumers implemented (#298), thanks to @Sternakt

    • Consumers can consume events in batches by specifying msg type of consuming function as List[YourMsgType]
  • Removed support for synchronous produce functions (#295), thanks to @kumaranvpl

  • Added default broker values and update docs (#292), thanks to @Sternakt

Bugs Squashed

  • Fix index.ipynb to be runnable in colab (#342)

  • Use cli option root_path docs generate and serve CLI commands (#341), thanks to @kumaranvpl

  • Fix incorrect asyncapi docs path on fastkafka docs serve command (#335), thanks to @Sternakt

    • Serve docs now takes app root_path argument into consideration when specified in app
  • Fix typo (supress_timestamps->suppress_timestamps) and remove fix for enabling timestamps (#315)

  • Fix logs printing timestamps (#308)

  • Fix topics with dots causing failure of tester instantiation (#306), thanks to @Sternakt

    • Specified topics can now have "." in their names

0.6.0

New Features

  • Timestamps added to CLI commands (#283), thanks to @davorrunje

  • Added option to process messages concurrently (#278), thanks to @Sternakt

    • A new executor option is added that supports either sequential processing for tasks with small latencies or concurrent processing for tasks with larger latencies.
  • Add consumes and produces functions to app (#274), thanks to @Sternakt

  • Export encoders, decoders from fastkafka.encoder (#246), thanks to @kumaranvpl
  • Create a Github action file to automatically index the website and commit it to the FastKafkachat repository. (#239)
  • UI Improvement: Post screenshots with links to the actual messages in testimonials section (#228)

Bugs Squashed

  • Batch testing fix (#280), thanks to @Sternakt

  • Tester breaks when using Batching or KafkaEvent producers (#279)

  • Consumer loop callbacks are not executing in parallel (#276)

0.5.0

New Features

  • Significant speedup of Kafka producer (#236), thanks to @Sternakt

Bugs Squashed

0.4.0

New Features

0.3.1

  • README.md file updated

0.3.0

New Features

  • Guide for FastKafka produces using partition key (#172), thanks to @Sternakt

    • Closes #161
  • Add support for Redpanda for testing and deployment (#181), thanks to @kumaranvpl

  • Remove bootstrap_servers from init and use the name of broker as an option when running/testing (#134)

  • Add a GH action file to check for broken links in the docs (#163)

  • Optimize requirements for testing and docs (#151)

  • Break requirements into base and optional for testing and dev (#124)

    • Minimize base requirements needed just for running the service.
  • Add link to example git repo into guide for building docs using actions (#81)

  • Add logging for run_in_background (#46)

  • Implement partition Key mechanism for producers (#16)

Bugs Squashed

  • Implement checks for npm installation and version (#176), thanks to @Sternakt

    • Closes #158 by checking if the npx is installed and more verbose error handling
  • Fix the helper.py link in CHANGELOG.md (#165)

  • fastkafka docs install_deps fails (#157)

    • Unexpected internal error: [Errno 2] No such file or directory: 'npx'
  • Broken links in docs (#141)

  • fastkafka run is not showing up in CLI docs (#132)

0.2.3

  • Fixed broken links on PyPi index page

0.2.2

New Features

  • Extract JDK and Kafka installation out of LocalKafkaBroker (#131)

  • PyYAML version relaxed (#119), thanks to @davorrunje

  • Replace docker based kafka with local (#68)

    • replace docker compose with a simple docker run (standard run_jupyter.sh should do)
    • replace all tests to use LocalKafkaBroker
    • update documentation

Bugs Squashed

  • Fix broken link for FastKafka docs in index notebook (#145)

  • Fix encoding issues when loading setup.py on windows OS (#135)

0.2.0

New Features

  • Replace kafka container with LocalKafkaBroker (#112)
      • Replace kafka container with LocalKafkaBroker in tests
  • Remove kafka container from tests environment
  • Fix failing tests

Bugs Squashed

  • Fix random failing in CI (#109)

0.1.3

  • version update in init.py

0.1.2

New Features

  • Git workflow action for publishing Kafka docs (#78)

Bugs Squashed

  • Include missing requirement (#110)
    • Typer is imported in this file but it is not included in settings.ini
    • Add aiohttp which is imported in this file
    • Add nbformat which is imported in _components/helpers.py
    • Add nbconvert which is imported in _components/helpers.py

0.1.1

Bugs Squashed

  • JDK install fails on Python 3.8 (#106)

0.1.0

Initial release

+ + + + \ No newline at end of file diff --git a/docs/next/CONTRIBUTING/index.html b/docs/next/CONTRIBUTING/index.html new file mode 100644 index 0000000..df8ea25 --- /dev/null +++ b/docs/next/CONTRIBUTING/index.html @@ -0,0 +1,36 @@ + + + + + +Contributing to FastKafka | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Contributing to FastKafka

First off, thanks for taking the time to contribute! ❤️

All types of contributions are encouraged and valued. See the Table of Contents for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. 🎉

And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:

  • Star the project
  • Tweet about it
  • Refer this project in your project's readme
  • Mention the project at local meetups and tell your friends/colleagues

Table of Contents

I Have a Question

If you want to ask a question, we assume that you have read the available Documentation.

Before you ask a question, it is best to search for existing Issues that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue.

If you then still feel the need to ask a question and need clarification, we recommend the following:

  • Contact us on Discord
  • Open an Issue
    • Provide as much context as you can about what you're running into

We will then take care of the issue as soon as possible.

I Want To Contribute

When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.

Reporting Bugs

Before Submitting a Bug Report

A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible.

  • Make sure that you are using the latest version.
  • Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the documentation. If you are looking for support, you might want to check this section).
  • To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the bug tracker.
  • Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue.
  • Collect information about the bug:
    • Stack trace (Traceback)
    • OS, Platform and Version (Windows, Linux, macOS, x86, ARM)
    • Python version
    • Possibly your input and the output
    • Can you reliably reproduce the issue? And can you also reproduce it with older versions?

How Do I Submit a Good Bug Report?

We use GitHub issues to track bugs and errors. If you run into an issue with the project:

  • Open an Issue. (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)
  • Explain the behavior you would expect and the actual behavior.
  • Please provide as much context as possible and describe the reproduction steps that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case.
  • Provide the information you collected in the previous section.

Once it's filed:

  • The project team will label the issue accordingly.
  • A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as needs-repro. Bugs with the needs-repro tag will not be addressed until they are reproduced.
  • If the team is able to reproduce the issue, it will be marked needs-fix, as well as possibly other tags (such as critical), and the issue will be left to be implemented.

Suggesting Enhancements

This section guides you through submitting an enhancement suggestion for FastKafka, including completely new features and minor improvements to existing functionality. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions.

Before Submitting an Enhancement

  • Make sure that you are using the latest version.
  • Read the documentation carefully and find out if the functionality is already covered, maybe by an individual configuration.
  • Perform a search to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one.
  • Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library.
  • If you are not sure or would like to discuiss the enhancement with us directly, you can always contact us on Discord

How Do I Submit a Good Enhancement Suggestion?

Enhancement suggestions are tracked as GitHub issues.

  • Use a clear and descriptive title for the issue to identify the suggestion.
  • Provide a step-by-step description of the suggested enhancement in as many details as possible.
  • Describe the current behavior and explain which behavior you expected to see instead and why. At this point you can also tell which alternatives do not work for you.
  • Explain why this enhancement would be useful to most FastKafka users. You may also want to point out the other projects that solved it better and which could serve as inspiration.

Your First Code Contribution

A great way to start contributing to FastKafka would be by solving an issue tagged with "good first issue". To find a list of issues that are tagged as "good first issue" and are suitable for newcomers, please visit the following link: Good first issues

These issues are beginner-friendly and provide a great opportunity to get started with contributing to FastKafka. Choose an issue that interests you, follow the contribution process mentioned in Way of working and Before a PR, and help us make FastKafka even better!

If you have any questions or need further assistance, feel free to reach out to us. Happy coding!

Development

Prepare the dev environment

To start contributing to FastKafka, you first have to prepare the development environment.

Clone the FastKafka repository

To clone the repository, run the following command in the CLI:

git clone https://github.com/airtai/fastkafka.git

Optional: create a virtual python environment

To prevent library version clashes with you other projects, it is reccomended that you create a virtual python environment for your FastKafka project by running:

python3 -m venv fastkafka-env

And to activate your virtual environment run:

source fastkafka-env/bin/activate

To learn more about virtual environments, please have a look at official python documentation

Install FastKafka

To install FastKafka, navigate to the root directory of the cloned FastKafka project and run:

pip install fastkafka -e [."dev"]

Install JRE and Kafka toolkit

To be able to run tests and use all the functionalities of FastKafka, you have to have JRE and Kafka toolkit installed on your machine. To do this, you have two options:

  1. Use our fastkafka testing install-deps CLI command which will install JRE and Kafka toolkit for you in your .local folder +OR
  2. Install JRE and Kafka manually. +To do this, please refer to JDK and JRE installation guide and Apache Kafka quickstart

Install npm

To be able to run tests you must have npm installed, because of documentation generation. To do this, you have two options:

  1. Use our fastkafka docs install_deps CLI command which will install npm for you in your .local folder +OR
  2. Install npm manually. +To do this, please refer to NPM installation guide

Install docusaurus

To generate the documentation, you need docusaurus. To install it run 'docusaurus/scripts/install_docusaurus_deps.sh' in the root of FastKafka project.

Check if everything works

After installing FastKafka and all the necessary dependencies, run nbdev_test in the root of FastKafka project. This will take a couple of minutes as it will run all the tests on FastKafka project. If everythng is setup correctly, you will get a "Success." message in your terminal, otherwise please refer to previous steps.

Way of working

The development of FastKafka is done in Jupyter notebooks. Inside the nbs directory you will find all the source code of FastKafka, this is where you will implement your changes.

The testing, cleanup and exporting of the code is being handled by nbdev, please, before starting the work on FastKafka, get familiar with it by reading nbdev documentation.

The general philosopy you should follow when writing code for FastKafka is:

  • Function should be an atomic functionality, short and concise
    • Good rule of thumb: your function should be 5-10 lines long usually
  • If there are more than 2 params, enforce keywording using *
    • E.g.: def function(param1, *, param2, param3): ...
  • Define typing of arguments and return value
    • If not, mypy tests will fail and a lot of easily avoidable bugs will go undetected
  • After the function cell, write test cells using the assert keyword
    • Whenever you implement something you should test that functionality immediately in the cells below
  • Add Google style python docstrings when function is implemented and tested

Before a PR

After you have implemented your changes you will want to open a pull request to merge those changes into our main branch. To make this as smooth for you and us, please do the following before opening the request (all the commands are to be run in the root of FastKafka project):

  1. Format your notebooks: nbqa black nbs
  2. Close, shutdown, and clean the metadata from your notebooks: nbdev_clean
  3. Export your code: nbdev_export
  4. Run the tests: nbdev_test
  5. Test code typing: mypy fastkafka
  6. Test code safety with bandit: bandit -r fastkafka
  7. Test code safety with semgrep: semgrep --config auto -r fastkafka

When you have done this, and all the tests are passing, your code should be ready for a merge. Please commit and push your code and open a pull request and assign it to one of the core developers. We will then review your changes and if everythng is in order, we will approve your merge.

Attribution

This guide is based on the contributing-gen. Make your own!

+ + + + \ No newline at end of file diff --git a/docs/next/LICENSE/index.html b/docs/next/LICENSE/index.html new file mode 100644 index 0000000..92b217c --- /dev/null +++ b/docs/next/LICENSE/index.html @@ -0,0 +1,168 @@ + + + + + +LICENSE | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

LICENSE

Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/

TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

  1. Definitions.

    "License" shall mean the terms and conditions for use, reproduction, +and distribution as defined by Sections 1 through 9 of this document.

    "Licensor" shall mean the copyright owner or entity authorized by +the copyright owner that is granting the License.

    "Legal Entity" shall mean the union of the acting entity and all +other entities that control, are controlled by, or are under common +control with that entity. For the purposes of this definition, +"control" means (i) the power, direct or indirect, to cause the +direction or management of such entity, whether by contract or +otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity.

    "You" (or "Your") shall mean an individual or Legal Entity +exercising permissions granted by this License.

    "Source" form shall mean the preferred form for making modifications, +including but not limited to software source code, documentation +source, and configuration files.

    "Object" form shall mean any form resulting from mechanical +transformation or translation of a Source form, including but +not limited to compiled object code, generated documentation, +and conversions to other media types.

    "Work" shall mean the work of authorship, whether in Source or +Object form, made available under the License, as indicated by a +copyright notice that is included in or attached to the work +(an example is provided in the Appendix below).

    "Derivative Works" shall mean any work, whether in Source or Object +form, that is based on (or derived from) the Work and for which the +editorial revisions, annotations, elaborations, or other modifications +represent, as a whole, an original work of authorship. For the purposes +of this License, Derivative Works shall not include works that remain +separable from, or merely link (or bind by name) to the interfaces of, +the Work and Derivative Works thereof.

    "Contribution" shall mean any work of authorship, including +the original version of the Work and any modifications or additions +to that Work or Derivative Works thereof, that is intentionally +submitted to Licensor for inclusion in the Work by the copyright owner +or by an individual or Legal Entity authorized to submit on behalf of +the copyright owner. For the purposes of this definition, "submitted" +means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, +and issue tracking systems that are managed by, or on behalf of, the +Licensor for the purpose of discussing and improving the Work, but +excluding communication that is conspicuously marked or otherwise +designated in writing by the copyright owner as "Not a Contribution."

    "Contributor" shall mean Licensor and any individual or Legal Entity +on behalf of whom a Contribution has been received by Licensor and +subsequently incorporated within the Work.

  2. Grant of Copyright License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the +Work and such Derivative Works in Source or Object form.

  3. Grant of Patent License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +(except as stated in this section) patent license to make, have made, +use, offer to sell, sell, import, and otherwise transfer the Work, +where such license applies only to those patent claims licensable +by such Contributor that are necessarily infringed by their +Contribution(s) alone or by combination of their Contribution(s) +with the Work to which such Contribution(s) was submitted. If You +institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work +or a Contribution incorporated within the Work constitutes direct +or contributory patent infringement, then any patent licenses +granted to You under this License for that Work shall terminate +as of the date such litigation is filed.

  4. Redistribution. You may reproduce and distribute copies of the +Work or Derivative Works thereof in any medium, with or without +modifications, and in Source or Object form, provided that You +meet the following conditions:

    (a) You must give any other recipients of the Work or +Derivative Works a copy of this License; and

    (b) You must cause any modified files to carry prominent notices +stating that You changed the files; and

    (c) You must retain, in the Source form of any Derivative Works +that You distribute, all copyright, patent, trademark, and +attribution notices from the Source form of the Work, +excluding those notices that do not pertain to any part of +the Derivative Works; and

    (d) If the Work includes a "NOTICE" text file as part of its +distribution, then any Derivative Works that You distribute must +include a readable copy of the attribution notices contained +within such NOTICE file, excluding those notices that do not +pertain to any part of the Derivative Works, in at least one +of the following places: within a NOTICE text file distributed +as part of the Derivative Works; within the Source form or +documentation, if provided along with the Derivative Works; or, +within a display generated by the Derivative Works, if and +wherever such third-party notices normally appear. The contents +of the NOTICE file are for informational purposes only and +do not modify the License. You may add Your own attribution +notices within Derivative Works that You distribute, alongside +or as an addendum to the NOTICE text from the Work, provided +that such additional attribution notices cannot be construed +as modifying the License.

    You may add Your own copyright statement to Your modifications and +may provide additional or different license terms and conditions +for use, reproduction, or distribution of Your modifications, or +for any such Derivative Works as a whole, provided Your use, +reproduction, and distribution of the Work otherwise complies with +the conditions stated in this License.

  5. Submission of Contributions. Unless You explicitly state otherwise, +any Contribution intentionally submitted for inclusion in the Work +by You to the Licensor shall be under the terms and conditions of +this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify +the terms of any separate license agreement you may have executed +with Licensor regarding such Contributions.

  6. Trademarks. This License does not grant permission to use the trade +names, trademarks, service marks, or product names of the Licensor, +except as required for reasonable and customary use in describing the +origin of the Work and reproducing the content of the NOTICE file.

  7. Disclaimer of Warranty. Unless required by applicable law or +agreed to in writing, Licensor provides the Work (and each +Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +implied, including, without limitation, any warranties or conditions +of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A +PARTICULAR PURPOSE. You are solely responsible for determining the +appropriateness of using or redistributing the Work and assume any +risks associated with Your exercise of permissions under this License.

  8. Limitation of Liability. In no event and under no legal theory, +whether in tort (including negligence), contract, or otherwise, +unless required by applicable law (such as deliberate and grossly +negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, +incidental, or consequential damages of any character arising as a +result of this License or out of the use or inability to use the +Work (including but not limited to damages for loss of goodwill, +work stoppage, computer failure or malfunction, or any and all +other commercial damages or losses), even if such Contributor +has been advised of the possibility of such damages.

  9. Accepting Warranty or Additional Liability. While redistributing +the Work or Derivative Works thereof, You may choose to offer, +and charge a fee for, acceptance of support, warranty, indemnity, +or other liability obligations and/or rights consistent with this +License. However, in accepting such obligations, You may act only +on Your own behalf and on Your sole responsibility, not on behalf +of any other Contributor, and only if You agree to indemnify, +defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason +of your accepting any such warranty or additional liability.

    END OF TERMS AND CONDITIONS

    APPENDIX: How to apply the Apache License to your work.

    To apply the Apache License to your work, attach the following +boilerplate notice, with the fields enclosed by brackets "[]" +replaced with your own identifying information. (Don't include +the brackets!) The text should be enclosed in the appropriate +comment syntax for the file format. We also recommend that a +file or class name and description of purpose be included on the +same "printed page" as the copyright notice for easier +identification within third-party archives.

    Copyright [yyyy][name of copyright owner]

    Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

    Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License.

+ + + + \ No newline at end of file diff --git a/docs/next/api/fastkafka/EventMetadata/index.html b/docs/next/api/fastkafka/EventMetadata/index.html new file mode 100644 index 0000000..3b5e4a5 --- /dev/null +++ b/docs/next/api/fastkafka/EventMetadata/index.html @@ -0,0 +1,32 @@ + + + + + +EventMetadata | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

EventMetadata

fastkafka.EventMetadata

View source

A class for encapsulating Kafka record metadata.

Parameters:

NameTypeDescriptionDefault
topicstrThe topic this record is received fromrequired
partitionintThe partition from which this record is receivedrequired
offsetintThe position of this record in the corresponding Kafka partitionrequired
timestampintThe timestamp of this recordrequired
timestamp_typeintThe timestamp type of this recordrequired
keyOptional[bytes]The key (or None if no key is specified)required
valueOptional[bytes]The valuerequired
serialized_key_sizeintThe size of the serialized, uncompressed key in bytesrequired
serialized_value_sizeintThe size of the serialized, uncompressed value in bytesrequired
headersSequence[Tuple[str, bytes]]The headersrequired

init

__init__(
self,
topic,
partition,
offset,
timestamp,
timestamp_type,
key,
value,
checksum,
serialized_key_size,
serialized_value_size,
headers,
)

create_event_metadata

View source
@staticmethod
create_event_metadata(
record
)

Creates an instance of EventMetadata from a ConsumerRecord.

Parameters:

NameTypeDescriptionDefault
recordConsumerRecordThe Kafka ConsumerRecord.required

Returns:

TypeDescription
EventMetadataThe created EventMetadata instance.
+ + + + \ No newline at end of file diff --git a/docs/next/api/fastkafka/KafkaEvent/index.html b/docs/next/api/fastkafka/KafkaEvent/index.html new file mode 100644 index 0000000..df0b3e5 --- /dev/null +++ b/docs/next/api/fastkafka/KafkaEvent/index.html @@ -0,0 +1,32 @@ + + + + + +KafkaEvent | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

KafkaEvent

fastkafka.KafkaEvent

View source

A generic class for representing Kafka events. Based on BaseSubmodel, bound to pydantic.BaseModel

Parameters:

NameTypeDescriptionDefault
messageBaseSubmodelThe message contained in the Kafka event, can be of type pydantic.BaseModel.required
keyOptional[bytes]The optional key used to identify the Kafka event.None

init

__init__(
self, message, key=None
)
+ + + + \ No newline at end of file diff --git a/docs/next/api/fastkafka/encoder/AvroBase/index.html b/docs/next/api/fastkafka/encoder/AvroBase/index.html new file mode 100644 index 0000000..710ac40 --- /dev/null +++ b/docs/next/api/fastkafka/encoder/AvroBase/index.html @@ -0,0 +1,38 @@ + + + + + +AvroBase | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

AvroBase

fastkafka.encoder.AvroBase

View source

This is base pydantic class that will add some methods

init

__init__(
__pydantic_self__, data
)

Create a new model by parsing and validating input data from keyword arguments.

Raises [ValidationError][pydantic_core.ValidationError] if the input data cannot be +validated to form a valid model.

__init__ uses __pydantic_self__ instead of the more common self for the first arg to +allow self as a field name.

avro_schema

View source
@classmethod
avro_schema(
by_alias=True, namespace=None
)

Returns the Avro schema for the Pydantic class.

Parameters:

NameTypeDescriptionDefault
by_aliasboolGenerate schemas using aliases defined. Defaults to True.True
namespaceOptional[str]Optional namespace string for schema generation.None

Returns:

TypeDescription
Dict[str, Any]The Avro schema for the model.

avro_schema_for_pydantic_class

View source
@classmethod
avro_schema_for_pydantic_class(
pydantic_model, by_alias=True, namespace=None
)

Returns the Avro schema for the given Pydantic class.

Parameters:

NameTypeDescriptionDefault
pydantic_modelType[pydantic.main.BaseModel]The Pydantic class.required
by_aliasboolGenerate schemas using aliases defined. Defaults to True.True
namespaceOptional[str]Optional namespace string for schema generation.None

Returns:

TypeDescription
Dict[str, Any]The Avro schema for the model.

avro_schema_for_pydantic_object

View source
@classmethod
avro_schema_for_pydantic_object(
pydantic_model, by_alias=True, namespace=None
)

Returns the Avro schema for the given Pydantic object.

Parameters:

NameTypeDescriptionDefault
pydantic_modelBaseModelThe Pydantic object.required
by_aliasboolGenerate schemas using aliases defined. Defaults to True.True
namespaceOptional[str]Optional namespace string for schema generation.None

Returns:

TypeDescription
Dict[str, Any]The Avro schema for the model.

construct

@classmethod
construct(
_fields_set=None, values
)

copy

copy(
self, include=None, exclude=None, update=None, deep=False
)

Returns a copy of the model.

!!! warning "Deprecated" +This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)
data = {**data, **(update or {})}
copied = self.model_validate(data)

Parameters:

NameTypeDescriptionDefault
includeAbstractSetIntStrMappingIntStrAnyNone
excludeAbstractSetIntStrMappingIntStrAnyNone
update`Dict[str, Any]None`Optional dictionary of field-value pairs to override field valuesin the copied model.
deepboolIf True, the values of fields that are Pydantic models will be deep copied.False

Returns:

TypeDescription
ModelA copy of the model with included, excluded and updated fields as specified.

dict

dict(
self,
include=None,
exclude=None,
by_alias=False,
exclude_unset=False,
exclude_defaults=False,
exclude_none=False,
)

from_orm

@classmethod
from_orm(
obj
)

json

json(
self,
include=None,
exclude=None,
by_alias=False,
exclude_unset=False,
exclude_defaults=False,
exclude_none=False,
encoder=PydanticUndefined,
models_as_dict=PydanticUndefined,
dumps_kwargs,
)

model_computed_fields

@property
model_computed_fields(
self
)

Get the computed fields of this model instance.

Returns:

TypeDescription
dict[str, ComputedFieldInfo]A dictionary of computed field names and their corresponding ComputedFieldInfo objects.

model_construct

@classmethod
model_construct(
_fields_set=None, values
)

Creates a new instance of the Model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed. +Behaves as if Config.extra = 'allow' was set since it adds all passed values

Parameters:

NameTypeDescriptionDefault
_fields_setset[str]NoneThe set of field names accepted for the Model instance.
valuesAnyTrusted or pre-validated data dictionary.required

Returns:

TypeDescription
ModelA new instance of the Model class with validated data.

model_copy

model_copy(
self, update=None, deep=False
)

Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#model_copy

Returns a copy of the model.

Parameters:

NameTypeDescriptionDefault
updatedict[str, Any]NoneValues to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data.
deepboolSet to True to make a deep copy of the model.False

Returns:

TypeDescription
ModelNew model instance.

model_dump

model_dump(
self,
mode='python',
include=None,
exclude=None,
by_alias=False,
exclude_unset=False,
exclude_defaults=False,
exclude_none=False,
round_trip=False,
warnings=True,
)

Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

NameTypeDescriptionDefault
modeLiteral['json', 'python']strThe mode in which to_python should run.If mode is 'json', the dictionary will only contain JSON serializable types.If mode is 'python', the dictionary may contain any Python objects.
includeIncExA list of fields to include in the output.None
excludeIncExA list of fields to exclude from the output.None
by_aliasboolWhether to use the field's alias in the dictionary key if defined.False
exclude_unsetboolWhether to exclude fields that are unset or None from the output.False
exclude_defaultsboolWhether to exclude fields that are set to their default value from the output.False
exclude_noneboolWhether to exclude fields that have a value of None from the output.False
round_tripboolWhether to enable serialization and deserialization round-trip support.False
warningsboolWhether to log warnings when invalid fields are encountered.True

Returns:

TypeDescription
dict[str, Any]A dictionary representation of the model.

model_dump_json

model_dump_json(
self,
indent=None,
include=None,
exclude=None,
by_alias=False,
exclude_unset=False,
exclude_defaults=False,
exclude_none=False,
round_trip=False,
warnings=True,
)

Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

NameTypeDescriptionDefault
indentintNoneIndentation to use in the JSON output. If None is passed, the output will be compact.
includeIncExField(s) to include in the JSON output. Can take either a string or set of strings.None
excludeIncExField(s) to exclude from the JSON output. Can take either a string or set of strings.None
by_aliasboolWhether to serialize using field aliases.False
exclude_unsetboolWhether to exclude fields that have not been explicitly set.False
exclude_defaultsboolWhether to exclude fields that have the default value.False
exclude_noneboolWhether to exclude fields that have a value of None.False
round_tripboolWhether to use serialization/deserialization between JSON and class instance.False
warningsboolWhether to show any warnings that occurred during serialization.True

Returns:

TypeDescription
strA JSON string representation of the model.

model_extra

@property
model_extra(
self
)

Get extra fields set during validation.

Returns:

TypeDescription
`dict[str, Any]None`

model_fields_set

@property
model_fields_set(
self
)

Returns the set of fields that have been set on this model instance.

Returns:

TypeDescription
set[str]A set of strings representing the fields that have been set,i.e. that were not filled from defaults.

model_json_schema

@classmethod
model_json_schema(
by_alias=True,
ref_template='#/$defs/{model}',
schema_generator=<class 'pydantic.json_schema.GenerateJsonSchema'>,
mode='validation',
)

Generates a JSON schema for a model class.

Parameters:

NameTypeDescriptionDefault
by_aliasboolWhether to use attribute aliases or not.True
ref_templatestrThe reference template.'#/$defs/{model}'
schema_generatortype[GenerateJsonSchema]To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications<class 'pydantic.json_schema.GenerateJsonSchema'>
modeJsonSchemaModeThe mode in which to generate the schema.'validation'

Returns:

TypeDescription
dict[str, Any]The JSON schema for the given model class.

model_parametrized_name

@classmethod
model_parametrized_name(
params
)

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

NameTypeDescriptionDefault
paramstuple[type[Any], ...]Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int],the value (str, int) would be passed to params.required

Returns:

TypeDescription
strString representing the new class where params are passed to cls as type variables.

Exceptions:

TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.

model_post_init

model_post_init(
self, _BaseModel__context
)

Override this method to perform additional initialization after __init__ and model_construct.

This is useful if you want to do some validation that requires the entire model to be initialized.

model_rebuild

@classmethod
model_rebuild(
force=False,
raise_errors=True,
_parent_namespace_depth=2,
_types_namespace=None,
)

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

NameTypeDescriptionDefault
forceboolWhether to force the rebuilding of the model schema, defaults to False.False
raise_errorsboolWhether to raise errors, defaults to True.True
_parent_namespace_depthintThe depth level of the parent namespace, defaults to 2.2
_types_namespacedict[str, Any]NoneThe types namespace, defaults to None.

Returns:

TypeDescription
`boolNone`

model_validate

@classmethod
model_validate(
obj, strict=None, from_attributes=None, context=None
)

Validate a pydantic model instance.

Parameters:

NameTypeDescriptionDefault
objAnyThe object to validate.required
strictboolNoneWhether to raise an exception on invalid fields.
from_attributesboolNoneWhether to extract data from object attributes.
contextdict[str, Any]NoneAdditional context to pass to the validator.

Returns:

TypeDescription
ModelThe validated model instance.

Exceptions:

TypeDescription
ValidationErrorIf the object could not be validated.

model_validate_json

@classmethod
model_validate_json(
json_data, strict=None, context=None
)

Validate the given JSON data against the Pydantic model.

Parameters:

NameTypeDescriptionDefault
json_datastrbytesbytearray
strictboolNoneWhether to enforce types strictly.
contextdict[str, Any]NoneExtra variables to pass to the validator.

Returns:

TypeDescription
ModelThe validated Pydantic model.

Exceptions:

TypeDescription
ValueErrorIf json_data is not a JSON string.

parse_file

@classmethod
parse_file(
path,
content_type=None,
encoding='utf8',
proto=None,
allow_pickle=False,
)

parse_obj

@classmethod
parse_obj(
obj
)

parse_raw

@classmethod
parse_raw(
b,
content_type=None,
encoding='utf8',
proto=None,
allow_pickle=False,
)

schema

@classmethod
schema(
by_alias=True, ref_template='#/$defs/{model}'
)

schema_json

@classmethod
schema_json(
by_alias=True, ref_template='#/$defs/{model}', dumps_kwargs
)

update_forward_refs

@classmethod
update_forward_refs(
localns
)

validate

@classmethod
validate(
value
)
+ + + + \ No newline at end of file diff --git a/docs/next/api/fastkafka/encoder/avro_decoder/index.html b/docs/next/api/fastkafka/encoder/avro_decoder/index.html new file mode 100644 index 0000000..f013f73 --- /dev/null +++ b/docs/next/api/fastkafka/encoder/avro_decoder/index.html @@ -0,0 +1,32 @@ + + + + + +avro_decoder | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

avro_decoder

avro_decoder

View source
avro_decoder(
raw_msg, cls
)

Decoder to decode avro encoded messages to pydantic model instance

Parameters:

NameTypeDescriptionDefault
raw_msgbytesAvro encoded bytes message received from Kafka topicrequired
clsType[pydantic.main.BaseModel]Pydantic class; This pydantic class will be used to construct instance of same classrequired

Returns:

TypeDescription
AnyAn instance of given pydantic class
+ + + + \ No newline at end of file diff --git a/docs/next/api/fastkafka/encoder/avro_encoder/index.html b/docs/next/api/fastkafka/encoder/avro_encoder/index.html new file mode 100644 index 0000000..9522a95 --- /dev/null +++ b/docs/next/api/fastkafka/encoder/avro_encoder/index.html @@ -0,0 +1,32 @@ + + + + + +avro_encoder | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

avro_encoder

avro_encoder

View source
avro_encoder(
msg
)

Encoder to encode pydantic instances to avro message

Parameters:

NameTypeDescriptionDefault
msgBaseModelAn instance of pydantic basemodelrequired

Returns:

TypeDescription
bytesA bytes message which is encoded from pydantic basemodel
+ + + + \ No newline at end of file diff --git a/docs/next/api/fastkafka/encoder/avsc_to_pydantic/index.html b/docs/next/api/fastkafka/encoder/avsc_to_pydantic/index.html new file mode 100644 index 0000000..3fd9cbb --- /dev/null +++ b/docs/next/api/fastkafka/encoder/avsc_to_pydantic/index.html @@ -0,0 +1,32 @@ + + + + + +avsc_to_pydantic | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

avsc_to_pydantic

avsc_to_pydantic

View source
avsc_to_pydantic(
schema
)

Generate pydantic model from given Avro Schema

Parameters:

NameTypeDescriptionDefault
schemaDict[str, Any]Avro schema in dictionary formatrequired

Returns:

TypeDescription
Type[pydantic.main.BaseModel]Pydantic model class built from given avro schema
+ + + + \ No newline at end of file diff --git a/docs/next/api/fastkafka/encoder/json_decoder/index.html b/docs/next/api/fastkafka/encoder/json_decoder/index.html new file mode 100644 index 0000000..ba02457 --- /dev/null +++ b/docs/next/api/fastkafka/encoder/json_decoder/index.html @@ -0,0 +1,32 @@ + + + + + +json_decoder | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

json_decoder

json_decoder

View source
json_decoder(
raw_msg, cls
)

Decoder to decode json string in bytes to pydantic model instance

Parameters:

NameTypeDescriptionDefault
raw_msgbytesBytes message received from Kafka topicrequired
clsType[pydantic.main.BaseModel]Pydantic class; This pydantic class will be used to construct instance of same classrequired

Returns:

TypeDescription
AnyAn instance of given pydantic class
+ + + + \ No newline at end of file diff --git a/docs/next/api/fastkafka/encoder/json_encoder/index.html b/docs/next/api/fastkafka/encoder/json_encoder/index.html new file mode 100644 index 0000000..68a7595 --- /dev/null +++ b/docs/next/api/fastkafka/encoder/json_encoder/index.html @@ -0,0 +1,32 @@ + + + + + +json_encoder | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

json_encoder

json_encoder

View source
json_encoder(
msg
)

Encoder to encode pydantic instances to json string

Parameters:

NameTypeDescriptionDefault
msgBaseModelAn instance of pydantic basemodelrequired

Returns:

TypeDescription
bytesJson string in bytes which is encoded from pydantic basemodel
+ + + + \ No newline at end of file diff --git a/docs/next/api/fastkafka/executors/DynamicTaskExecutor/index.html b/docs/next/api/fastkafka/executors/DynamicTaskExecutor/index.html new file mode 100644 index 0000000..849e4a7 --- /dev/null +++ b/docs/next/api/fastkafka/executors/DynamicTaskExecutor/index.html @@ -0,0 +1,33 @@ + + + + + +DynamicTaskExecutor | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

DynamicTaskExecutor

fastkafka.executors.DynamicTaskExecutor

View source

A class that implements a dynamic task executor for processing consumer records.

The DynamicTaskExecutor class extends the StreamExecutor class and provides functionality +for running a tasks in parallel using asyncio.Task.

init

View source
__init__(
self, throw_exceptions=False, max_buffer_size=100000, size=100000
)

Create an instance of DynamicTaskExecutor

Parameters:

NameTypeDescriptionDefault
throw_exceptionsboolFlag indicating whether exceptions should be thrown ot logged.Defaults to False.False
max_buffer_sizeintMaximum buffer size for the memory object stream.Defaults to 100_000.100000
sizeintSize of the task pool. Defaults to 100_000.100000

run

View source
run(
self, is_shutting_down_f, generator, processor
)

Runs the dynamic task executor.

Parameters:

NameTypeDescriptionDefault
is_shutting_down_fCallable[[], bool]Function to check if the executor is shutting down.required
generatorCallable[[], Awaitable[aiokafka.structs.ConsumerRecord]]Generator function for retrieving consumer records.required
processorCallable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]Processor function for processing consumer records.required
+ + + + \ No newline at end of file diff --git a/docs/next/api/fastkafka/executors/SequentialExecutor/index.html b/docs/next/api/fastkafka/executors/SequentialExecutor/index.html new file mode 100644 index 0000000..d373a11 --- /dev/null +++ b/docs/next/api/fastkafka/executors/SequentialExecutor/index.html @@ -0,0 +1,33 @@ + + + + + +SequentialExecutor | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

SequentialExecutor

fastkafka.executors.SequentialExecutor

View source

A class that implements a sequential executor for processing consumer records.

The SequentialExecutor class extends the StreamExecutor class and provides functionality +for running processing tasks in sequence by awaiting their coroutines.

init

View source
__init__(
self, throw_exceptions=False, max_buffer_size=100000
)

Create an instance of SequentialExecutor

Parameters:

NameTypeDescriptionDefault
throw_exceptionsboolFlag indicating whether exceptions should be thrown or logged.Defaults to False.False
max_buffer_sizeintMaximum buffer size for the memory object stream.Defaults to 100_000.100000

run

View source
run(
self, is_shutting_down_f, generator, processor
)

Runs the sequential executor.

Parameters:

NameTypeDescriptionDefault
is_shutting_down_fCallable[[], bool]Function to check if the executor is shutting down.required
generatorCallable[[], Awaitable[aiokafka.structs.ConsumerRecord]]Generator function for retrieving consumer records.required
processorCallable[[aiokafka.structs.ConsumerRecord], Awaitable[NoneType]]Processor function for processing consumer records.required
+ + + + \ No newline at end of file diff --git a/docs/next/api/fastkafka/index.html b/docs/next/api/fastkafka/index.html new file mode 100644 index 0000000..2b8e384 --- /dev/null +++ b/docs/next/api/fastkafka/index.html @@ -0,0 +1,39 @@ + + + + + +FastKafka | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

FastKafka

fastkafka.FastKafka

View source

init

View source
__init__(
self,
title=None,
description=None,
version=None,
contact=None,
kafka_brokers=None,
root_path=None,
lifespan=None,
bootstrap_servers_id='localhost',
loop=None,
client_id=None,
metadata_max_age_ms=300000,
request_timeout_ms=40000,
api_version='auto',
acks=<object object at 0x7f21fc189d70>,
key_serializer=None,
value_serializer=None,
compression_type=None,
max_batch_size=16384,
partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7f21fa7a9150>,
max_request_size=1048576,
linger_ms=0,
send_backoff_ms=100,
retry_backoff_ms=100,
security_protocol='PLAINTEXT',
ssl_context=None,
connections_max_idle_ms=540000,
enable_idempotence=False,
transactional_id=None,
transaction_timeout_ms=60000,
sasl_mechanism='PLAIN',
sasl_plain_password=None,
sasl_plain_username=None,
sasl_kerberos_service_name='kafka',
sasl_kerberos_domain_name=None,
sasl_oauth_token_provider=None,
group_id=None,
key_deserializer=None,
value_deserializer=None,
fetch_max_wait_ms=500,
fetch_max_bytes=52428800,
fetch_min_bytes=1,
max_partition_fetch_bytes=1048576,
auto_offset_reset='latest',
enable_auto_commit=True,
auto_commit_interval_ms=5000,
check_crcs=True,
partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),
max_poll_interval_ms=300000,
rebalance_timeout_ms=None,
session_timeout_ms=10000,
heartbeat_interval_ms=3000,
consumer_timeout_ms=200,
max_poll_records=None,
exclude_internal_topics=True,
isolation_level='read_uncommitted',
)

Creates FastKafka application

Parameters:

NameTypeDescriptionDefault
titleOptional[str]optional title for the documentation. If None,the title will be set to empty stringNone
descriptionOptional[str]optional description for the documentation. IfNone, the description will be set to empty stringNone
versionOptional[str]optional version for the documentation. If None,the version will be set to empty stringNone
contactOptional[Dict[str, str]]optional contact for the documentation. If None, thecontact will be set to placeholder values:name='Author' url=HttpUrl(' https://www.google.com ', ) email='noreply@gmail.com'None
kafka_brokersOptional[Dict[str, Any]]dictionary describing kafka brokers used for settingthe bootstrap server when running the applicationa and forgenerating documentation. Defaults to { "localhost": { "url": "localhost", "description": "local kafka broker", "port": "9092", } }None
root_pathUnion[pathlib.Path, str, NoneType]path to where documentation will be createdNone
lifespanOptional[Callable[[ForwardRef('FastKafka')], AsyncContextManager[NoneType]]]asynccontextmanager that is used for setting lifespan hooks.aenter is called before app start and aexit after app stop.The lifespan is called whe application is started as async contextmanager, e.g.:async with kafka_app...None
client_ida name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: aiokafka-producer-# (appended with a unique numberper instance)None
key_serializerused to convert user-supplied keys to bytesIf not :data:None, called as f(key), should return:class:bytes.Default: :data:None.None
value_serializerused to convert user-supplied messagevalues to :class:bytes. If not :data:None, called asf(value), should return :class:bytes.Default: :data:None.None
acksone of 0, 1, all. The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common: 0: Producer will not wait for any acknowledgment from the server at all. The message will immediately be added to the socket buffer and considered sent. No guarantee can be made that the server has received the record in this case, and the retries configuration will not take effect (as the client won't generally know of any failures). The offset given back for each record will always be set to -1. 1: The broker leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after acknowledging the record but before the followers have replicated it then the record will be lost.* all: The broker leader will wait for the full set of in-sync replicas to acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica remains alive. This is the strongest available guarantee.If unset, defaults to acks=1. If enable_idempotence is:data:True defaults to acks=all<object object at 0x7f21fc189d70>
compression_typeThe compression type for all data generated bythe producer. Valid values are gzip, snappy, lz4, zstdor :data:None.Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:None.None
max_batch_sizeMaximum size of buffered data per partition.After this amount :meth:send coroutine will block until batch isdrained.Default: 1638416384
linger_msThe producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than linger_ms, producer will wait linger_ms - process_time.Default: 0 (i.e. no delay).0
partitionerCallable used to determine which partitioneach message is assigned to. Called (after key serialization):partitioner(key_bytes, all_partitions, available_partitions).The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:None, the message is delivered to a random partition(filtered to partitions with available leaders only, if possible).<kafka.partitioner.default.DefaultPartitioner object at 0x7f21fa7a9150>
max_request_sizeThe maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576.1048576
metadata_max_age_msThe period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000300000
request_timeout_msProduce request timeout in milliseconds.As it's sent as part of:class:~kafka.protocol.produce.ProduceRequest (it's a blockingcall), maximum waiting time can be up to 2 *request_timeout_ms.Default: 40000.40000
retry_backoff_msMilliseconds to backoff when retrying onerrors. Default: 100.100
api_versionspecify which kafka API version to use.If set to auto, will attempt to infer the broker version byprobing various APIs. Default: auto'auto'
security_protocolProtocol used to communicate with brokers.Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT,SASL_SSL. Default: PLAINTEXT.'PLAINTEXT'
ssl_contextpre-configured :class:~ssl.SSLContextfor wrapping socket connections. Directly passed into asyncio's:meth:~asyncio.loop.create_connection. For moreinformation see :ref:ssl_auth.Default: :data:NoneNone
connections_max_idle_msClose idle connections after the numberof milliseconds specified by this config. Specifying :data:None willdisable idle checks. Default: 540000 (9 minutes).540000
enable_idempotenceWhen set to :data:True, the producer willensure that exactly one copy of each message is written in thestream. If :data:False, producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to all. If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:ValueError will be thrown.New in version 0.5.0.False
sasl_mechanismAuthentication mechanism when security_protocolis configured for SASL_PLAINTEXT or SASL_SSL. Valid valuesare: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512,OAUTHBEARER.Default: PLAIN'PLAIN'
sasl_plain_usernameusername for SASL PLAIN authentication.Default: :data:NoneNone
sasl_plain_passwordpassword for SASL PLAIN authentication.Default: :data:NoneNone
group_idname of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: NoneNone
key_deserializerAny callable that takes araw message key and returns a deserialized key.None
value_deserializerAny callable that takes araw message value and returns a deserialized value.None
fetch_min_bytesMinimum amount of data the server shouldreturn for a fetch request, otherwise wait up tofetch_max_wait_ms for more data to accumulate. Default: 1.1
fetch_max_bytesThe maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).52428800
fetch_max_wait_msThe maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500.500
max_partition_fetch_bytesThe maximum amount of dataper-partition the server will return. The maximum total memoryused for a request = #partitions * max_partition_fetch_bytes.This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576.1048576
max_poll_recordsThe maximum number of records returned in asingle call to :meth:.getmany. Defaults None, no limit.None
auto_offset_resetA policy for resetting offsets on:exc:.OffsetOutOfRangeError errors: earliest will move to the oldestavailable message, latest will move to the most recent, andnone will raise an exception so you can handle this case.Default: latest.'latest'
enable_auto_commitIf true the consumer's offset will beperiodically committed in the background. Default: True.True
auto_commit_interval_msmilliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000.5000
check_crcsAutomatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: TrueTrue
partition_assignment_strategyList of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: [:class:.RoundRobinPartitionAssignor](<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)
max_poll_interval_msMaximum allowed time between calls toconsume messages (e.g., :meth:.getmany). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See KIP-62_ for moreinformation. Default 300000300000
rebalance_timeout_msThe maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to max.poll.interval.ms configuration,but as aiokafka will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:.ConsumerRebalanceListener to delay rebalacing. Defaultsto session_timeout_msNone
session_timeout_msClient group session and failure detectiontimeout. The consumer sends periodic heartbeats(heartbeat.interval.ms) to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe broker configuration propertiesgroup.min.session.timeout.ms and group.max.session.timeout.ms.Default: 1000010000
heartbeat_interval_msThe expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than session_timeout_ms, but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 30003000
consumer_timeout_msmaximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200200
exclude_internal_topicsWhether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: TrueTrue
isolation_levelControls how to read messages writtentransactionally.If set to read_committed, :meth:.getmany will only returntransactional messages which have been committed.If set to read_uncommitted (the default), :meth:.getmany willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, inread_committed mode, :meth:.getmany will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, read_committed consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in read_committed the seek_to_end method willreturn the LSO. See method docs below. Default: read_uncommitted'read_uncommitted'
sasl_oauth_token_providerOAuthBearer token provider instance. (See :mod:kafka.oauth.abstract).Default: NoneNone

benchmark

View source
benchmark(
self, interval=1, sliding_window_size=None
)

Decorator to benchmark produces/consumes functions

Parameters:

NameTypeDescriptionDefault
intervalUnion[int, datetime.timedelta]Period to use to calculate throughput. If value is of type int,then it will be used as seconds. If value is of type timedelta,then it will be used as it is. default: 1 - one second1
sliding_window_sizeOptional[int]The size of the sliding window to use to calculateaverage throughput. default: None - By default average throughput isnot calculatedNone

consumes

View source
consumes(
self,
topic=None,
decoder='json',
executor=None,
brokers=None,
prefix='on_',
description=None,
loop=None,
bootstrap_servers='localhost',
client_id='aiokafka-0.8.1',
group_id=None,
key_deserializer=None,
value_deserializer=None,
fetch_max_wait_ms=500,
fetch_max_bytes=52428800,
fetch_min_bytes=1,
max_partition_fetch_bytes=1048576,
request_timeout_ms=40000,
retry_backoff_ms=100,
auto_offset_reset='latest',
enable_auto_commit=True,
auto_commit_interval_ms=5000,
check_crcs=True,
metadata_max_age_ms=300000,
partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),
max_poll_interval_ms=300000,
rebalance_timeout_ms=None,
session_timeout_ms=10000,
heartbeat_interval_ms=3000,
consumer_timeout_ms=200,
max_poll_records=None,
ssl_context=None,
security_protocol='PLAINTEXT',
api_version='auto',
exclude_internal_topics=True,
connections_max_idle_ms=540000,
isolation_level='read_uncommitted',
sasl_mechanism='PLAIN',
sasl_plain_password=None,
sasl_plain_username=None,
sasl_kerberos_service_name='kafka',
sasl_kerberos_domain_name=None,
sasl_oauth_token_provider=None,
)

Decorator registering the callback called when a message is received in a topic.

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

NameTypeDescriptionDefault
topicOptional[str]Kafka topic that the consumer will subscribe to and execute thedecorated function when it receives a message from the topic,default: None. If the topic is not specified, topic name will beinferred from the decorated function name by stripping the defined prefixNone
decoderUnion[str, Callable[[bytes, Type[pydantic.main.BaseModel]], Any]]Decoder to use to decode messages consumed from the topic,default: json - By default, it uses json decoder to decodebytes to json string and then it creates instance of pydanticBaseModel. It also accepts custom decoder function.'json'
executorUnion[str, fastkafka._components.task_streaming.StreamExecutor, NoneType]Type of executor to choose for consuming tasks. Avaliable optionsare "SequentialExecutor" and "DynamicTaskExecutor". The default option is"SequentialExecutor" which will execute the consuming tasks sequentially.If the consuming tasks have high latency it is recommended to use"DynamicTaskExecutor" which will wrap the consuming functions into tasksand run them in on asyncio loop in background. This comes with a cost ofincreased overhead so use it only in cases when your consume functions havehigh latency such as database queries or some other type of networking.None
prefixstrPrefix stripped from the decorated function to define a topic nameif the topic argument is not passed, default: "on_". If the decoratedfunction name is not prefixed with the defined prefix and topic argumentis not passed, then this method will throw ValueError'on_'
brokersUnion[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka.None
descriptionOptional[str]Optional description of the consuming function async docs.If not provided, consuming function doc attr will be used.None
bootstrap_serversa host[:port] string (or list ofhost[:port] strings) that the consumer should contact to bootstrapinitial cluster metadata.This does not have to be the full node list.It just needs to have at least one broker that will respond to aMetadata API Request. Default port is 9092. If no servers arespecified, will default to localhost:9092.'localhost'
client_ida name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client. Alsosubmitted to :class:~.consumer.group_coordinator.GroupCoordinatorfor logging with respect to consumer group administration. Default:aiokafka-{version}'aiokafka-0.8.1'
group_idname of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: NoneNone
key_deserializerAny callable that takes araw message key and returns a deserialized key.None
value_deserializerAny callable that takes araw message value and returns a deserialized value.None
fetch_min_bytesMinimum amount of data the server shouldreturn for a fetch request, otherwise wait up tofetch_max_wait_ms for more data to accumulate. Default: 1.1
fetch_max_bytesThe maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).52428800
fetch_max_wait_msThe maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500.500
max_partition_fetch_bytesThe maximum amount of dataper-partition the server will return. The maximum total memoryused for a request = #partitions * max_partition_fetch_bytes.This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576.1048576
max_poll_recordsThe maximum number of records returned in asingle call to :meth:.getmany. Defaults None, no limit.None
request_timeout_msClient request timeout in milliseconds.Default: 40000.40000
retry_backoff_msMilliseconds to backoff when retrying onerrors. Default: 100.100
auto_offset_resetA policy for resetting offsets on:exc:.OffsetOutOfRangeError errors: earliest will move to the oldestavailable message, latest will move to the most recent, andnone will raise an exception so you can handle this case.Default: latest.'latest'
enable_auto_commitIf true the consumer's offset will beperiodically committed in the background. Default: True.True
auto_commit_interval_msmilliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000.5000
check_crcsAutomatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: TrueTrue
metadata_max_age_msThe period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000300000
partition_assignment_strategyList of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: [:class:.RoundRobinPartitionAssignor](<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)
max_poll_interval_msMaximum allowed time between calls toconsume messages (e.g., :meth:.getmany). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See KIP-62_ for moreinformation. Default 300000300000
rebalance_timeout_msThe maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to max.poll.interval.ms configuration,but as aiokafka will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:.ConsumerRebalanceListener to delay rebalacing. Defaultsto session_timeout_msNone
session_timeout_msClient group session and failure detectiontimeout. The consumer sends periodic heartbeats(heartbeat.interval.ms) to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe broker configuration propertiesgroup.min.session.timeout.ms and group.max.session.timeout.ms.Default: 1000010000
heartbeat_interval_msThe expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than session_timeout_ms, but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 30003000
consumer_timeout_msmaximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200200
api_versionspecify which kafka API version to use.:class:AIOKafkaConsumer supports Kafka API versions >=0.9 only.If set to auto, will attempt to infer the broker version byprobing various APIs. Default: auto'auto'
security_protocolProtocol used to communicate with brokers.Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT,SASL_SSL. Default: PLAINTEXT.'PLAINTEXT'
ssl_contextpre-configured :class:~ssl.SSLContextfor wrapping socket connections. Directly passed into asyncio's:meth:~asyncio.loop.create_connection. For more information see:ref:ssl_auth. Default: None.None
exclude_internal_topicsWhether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: TrueTrue
connections_max_idle_msClose idle connections after the numberof milliseconds specified by this config. Specifying None willdisable idle checks. Default: 540000 (9 minutes).540000
isolation_levelControls how to read messages writtentransactionally.If set to read_committed, :meth:.getmany will only returntransactional messages which have been committed.If set to read_uncommitted (the default), :meth:.getmany willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, inread_committed mode, :meth:.getmany will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, read_committed consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in read_committed the seek_to_end method willreturn the LSO. See method docs below. Default: read_uncommitted'read_uncommitted'
sasl_mechanismAuthentication mechanism when security_protocolis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are:PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512,OAUTHBEARER.Default: PLAIN'PLAIN'
sasl_plain_usernameusername for SASL PLAIN authentication.Default: NoneNone
sasl_plain_passwordpassword for SASL PLAIN authentication.Default: NoneNone
sasl_oauth_token_providerOAuthBearer token provider instance. (See :mod:kafka.oauth.abstract).Default: NoneNone

Returns:

TypeDescription
Callable[[Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]], Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]]: A function returning the same function

create_docs

View source
create_docs(
self
)

Create the asyncapi documentation based on the configured consumers and producers.

This function exports the asyncapi specification based on the configured consumers +and producers in the FastKafka instance. It generates the asyncapi documentation by +extracting the topics and callbacks from the consumers and producers.

Note: +The asyncapi documentation is saved to the location specified by the _asyncapi_path +attribute of the FastKafka instance.

create_mocks

View source
create_mocks(
self
)

Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock

fastapi_lifespan

View source
fastapi_lifespan(
self, kafka_broker_name
)

Method for managing the lifespan of a FastAPI application with a specific Kafka broker.

Parameters:

NameTypeDescriptionDefault
kafka_broker_namestrThe name of the Kafka broker to start FastKafkarequired

Returns:

TypeDescription
Callable[[ForwardRef('FastAPI')], AsyncIterator[None]]Lifespan function to use for initializing FastAPI

get_topics

View source
get_topics(
self
)

Get all topics for both producing and consuming.

Returns:

TypeDescription
Iterable[str]A set of topics for both producing and consuming.

is_started

View source
@property
is_started(
self
)

Property indicating whether the FastKafka object is started.

The is_started property indicates if the FastKafka object is currently +in a started state. This implies that all background tasks, producers, +and consumers have been initiated, and the object is successfully connected +to the Kafka broker.

Returns:

TypeDescription
boolTrue if the object is started, False otherwise.

produces

View source
produces(
self,
topic=None,
encoder='json',
prefix='to_',
brokers=None,
description=None,
loop=None,
bootstrap_servers='localhost',
client_id=None,
metadata_max_age_ms=300000,
request_timeout_ms=40000,
api_version='auto',
acks=<object object at 0x7f21fc189d70>,
key_serializer=None,
value_serializer=None,
compression_type=None,
max_batch_size=16384,
partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7f21fa7a9150>,
max_request_size=1048576,
linger_ms=0,
send_backoff_ms=100,
retry_backoff_ms=100,
security_protocol='PLAINTEXT',
ssl_context=None,
connections_max_idle_ms=540000,
enable_idempotence=False,
transactional_id=None,
transaction_timeout_ms=60000,
sasl_mechanism='PLAIN',
sasl_plain_password=None,
sasl_plain_username=None,
sasl_kerberos_service_name='kafka',
sasl_kerberos_domain_name=None,
sasl_oauth_token_provider=None,
)

Decorator registering the callback called when delivery report for a produced message is received

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

NameTypeDescriptionDefault
topicOptional[str]Kafka topic that the producer will send returned values fromthe decorated function to, default: None- If the topic is notspecified, topic name will be inferred from the decorated functionname by stripping the defined prefix.None
encoderUnion[str, Callable[[pydantic.main.BaseModel], bytes]]Encoder to use to encode messages before sending it to topic,default: json - By default, it uses json encoder to convertpydantic basemodel to json string and then encodes the string to bytesusing 'utf-8' encoding. It also accepts custom encoder function.'json'
prefixstrPrefix stripped from the decorated function to define a topicname if the topic argument is not passed, default: "to_". If thedecorated function name is not prefixed with the defined prefixand topic argument is not passed, then this method will throw ValueError'to_'
brokersUnion[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka.None
descriptionOptional[str]Optional description of the producing function async docs.If not provided, producing function doc attr will be used.None
bootstrap_serversa host[:port] string or list ofhost[:port] strings that the producer should contact tobootstrap initial cluster metadata. This does not have to be thefull node list. It just needs to have at least one broker that willrespond to a Metadata API Request. Default port is 9092. If noservers are specified, will default to localhost:9092.'localhost'
client_ida name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: aiokafka-producer-# (appended with a unique numberper instance)None
key_serializerused to convert user-supplied keys to bytesIf not :data:None, called as f(key), should return:class:bytes.Default: :data:None.None
value_serializerused to convert user-supplied messagevalues to :class:bytes. If not :data:None, called asf(value), should return :class:bytes.Default: :data:None.None
acksone of 0, 1, all. The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common: 0: Producer will not wait for any acknowledgment from the server at all. The message will immediately be added to the socket buffer and considered sent. No guarantee can be made that the server has received the record in this case, and the retries configuration will not take effect (as the client won't generally know of any failures). The offset given back for each record will always be set to -1. 1: The broker leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after acknowledging the record but before the followers have replicated it then the record will be lost.* all: The broker leader will wait for the full set of in-sync replicas to acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica remains alive. This is the strongest available guarantee.If unset, defaults to acks=1. If enable_idempotence is:data:True defaults to acks=all<object object at 0x7f21fc189d70>
compression_typeThe compression type for all data generated bythe producer. Valid values are gzip, snappy, lz4, zstdor :data:None.Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:None.None
max_batch_sizeMaximum size of buffered data per partition.After this amount :meth:send coroutine will block until batch isdrained.Default: 1638416384
linger_msThe producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than linger_ms, producer will wait linger_ms - process_time.Default: 0 (i.e. no delay).0
partitionerCallable used to determine which partitioneach message is assigned to. Called (after key serialization):partitioner(key_bytes, all_partitions, available_partitions).The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:None, the message is delivered to a random partition(filtered to partitions with available leaders only, if possible).<kafka.partitioner.default.DefaultPartitioner object at 0x7f21fa7a9150>
max_request_sizeThe maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576.1048576
metadata_max_age_msThe period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000300000
request_timeout_msProduce request timeout in milliseconds.As it's sent as part of:class:~kafka.protocol.produce.ProduceRequest (it's a blockingcall), maximum waiting time can be up to 2 *request_timeout_ms.Default: 40000.40000
retry_backoff_msMilliseconds to backoff when retrying onerrors. Default: 100.100
api_versionspecify which kafka API version to use.If set to auto, will attempt to infer the broker version byprobing various APIs. Default: auto'auto'
security_protocolProtocol used to communicate with brokers.Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT,SASL_SSL. Default: PLAINTEXT.'PLAINTEXT'
ssl_contextpre-configured :class:~ssl.SSLContextfor wrapping socket connections. Directly passed into asyncio's:meth:~asyncio.loop.create_connection. For moreinformation see :ref:ssl_auth.Default: :data:NoneNone
connections_max_idle_msClose idle connections after the numberof milliseconds specified by this config. Specifying :data:None willdisable idle checks. Default: 540000 (9 minutes).540000
enable_idempotenceWhen set to :data:True, the producer willensure that exactly one copy of each message is written in thestream. If :data:False, producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to all. If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:ValueError will be thrown.New in version 0.5.0.False
sasl_mechanismAuthentication mechanism when security_protocolis configured for SASL_PLAINTEXT or SASL_SSL. Valid valuesare: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512,OAUTHBEARER.Default: PLAIN'PLAIN'
sasl_plain_usernameusername for SASL PLAIN authentication.Default: :data:NoneNone
sasl_plain_passwordpassword for SASL PLAIN authentication.Default: :data:NoneNone

Returns:

TypeDescription
Callable[[Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]], Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]]: A function returning the same function

Exceptions:

TypeDescription
ValueErrorwhen needed

run_in_background

View source
run_in_background(
self
)

Decorator to schedule a task to be run in the background.

This decorator is used to schedule a task to be run in the background when the app's _on_startup event is triggered.

Returns:

TypeDescription
Callable[[Callable[..., Coroutine[Any, Any, Any]]], Callable[..., Coroutine[Any, Any, Any]]]A decorator function that takes a background task as an input and stores it to be run in the backround.

set_kafka_broker

View source
set_kafka_broker(
self, kafka_broker_name
)

Sets the Kafka broker to start FastKafka with

Parameters:

NameTypeDescriptionDefault
kafka_broker_namestrThe name of the Kafka broker to start FastKafkarequired

Exceptions:

TypeDescription
ValueErrorIf the provided kafka_broker_name is not found in dictionary of kafka_brokers
+ + + + \ No newline at end of file diff --git a/docs/next/api/fastkafka/testing/ApacheKafkaBroker/index.html b/docs/next/api/fastkafka/testing/ApacheKafkaBroker/index.html new file mode 100644 index 0000000..e6719ff --- /dev/null +++ b/docs/next/api/fastkafka/testing/ApacheKafkaBroker/index.html @@ -0,0 +1,34 @@ + + + + + +ApacheKafkaBroker | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

ApacheKafkaBroker

fastkafka.testing.ApacheKafkaBroker

View source

ApacheKafkaBroker class, used for running unique kafka brokers in tests to prevent topic clashing.

init

View source
__init__(
self,
topics=[],
retries=3,
apply_nest_asyncio=False,
zookeeper_port=2181,
listener_port=9092,
)

Initialises the ApacheKafkaBroker object

Parameters:

NameTypeDescriptionDefault
topicsIterable[str]List of topics to create after sucessfull Kafka broker startup[]
retriesintNumber of retries to create kafka and zookeeper services using random3
apply_nest_asyncioboolset to True if running in notebookFalse
zookeeper_portintPort for clients (Kafka brokes) to connect2181
listener_portintPort on which the clients (producers and consumers) can connect9092

get_service_config_string

View source
get_service_config_string(
self, service, data_dir
)

Gets the configuration string for a service.

Parameters:

NameTypeDescriptionDefault
servicestrName of the service ("kafka" or "zookeeper").required
data_dirPathPath to the directory where the service will save data.required

Returns:

TypeDescription
strThe service configuration string.

is_started

View source
@property
is_started(
self
)

Property indicating whether the ApacheKafkaBroker object is started.

The is_started property indicates if the ApacheKafkaBroker object is currently +in a started state. This implies that Zookeeper and Kafka broker processes have +sucesfully started and are ready for handling events.

Returns:

TypeDescription
boolTrue if the object is started, False otherwise.

start

View source
start(
self
)

Starts a local Kafka broker and ZooKeeper instance synchronously.

Returns:

TypeDescription
strThe Kafka broker bootstrap server address in string format: host:port.

stop

View source
stop(
self
)

Stops a local kafka broker and zookeeper instance synchronously

+ + + + \ No newline at end of file diff --git a/docs/next/api/fastkafka/testing/LocalRedpandaBroker/index.html b/docs/next/api/fastkafka/testing/LocalRedpandaBroker/index.html new file mode 100644 index 0000000..e154e73 --- /dev/null +++ b/docs/next/api/fastkafka/testing/LocalRedpandaBroker/index.html @@ -0,0 +1,34 @@ + + + + + +LocalRedpandaBroker | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

LocalRedpandaBroker

fastkafka.testing.LocalRedpandaBroker

View source

LocalRedpandaBroker class, used for running unique redpanda brokers in tests to prevent topic clashing.

init

View source
__init__(
self,
topics=[],
retries=3,
apply_nest_asyncio=False,
listener_port=9092,
tag='v23.1.2',
seastar_core=1,
memory='1G',
mode='dev-container',
default_log_level='debug',
kwargs,
)

Initialises the LocalRedpandaBroker object

Parameters:

NameTypeDescriptionDefault
topicsIterable[str]List of topics to create after sucessfull redpanda broker startup[]
retriesintNumber of retries to create redpanda service3
apply_nest_asyncioboolset to True if running in notebookFalse
listener_portintPort on which the clients (producers and consumers) can connect9092
tagstrTag of Redpanda image to use to start container'v23.1.2'
seastar_coreintCore(s) to use byt Seastar (the framework Redpanda uses under the hood)1
memorystrThe amount of memory to make available to Redpanda'1G'
modestrMode to use to load configuration properties in container'dev-container'
default_log_levelstrLog levels to use for Redpanda'debug'

get_service_config_string

View source
get_service_config_string(
self, service, data_dir
)

Generates a configuration for a service

Parameters:

NameTypeDescriptionDefault
data_dirPathPath to the directory where the zookeepeer instance will save datarequired
servicestr"redpanda", defines which service to get config string forrequired

is_started

View source
@property
is_started(
self
)

Property indicating whether the LocalRedpandaBroker object is started.

The is_started property indicates if the LocalRedpandaBroker object is currently +in a started state. This implies that Redpanda docker container has sucesfully +started and is ready for handling events.

Returns:

TypeDescription
boolTrue if the object is started, False otherwise.

start

View source
start(
self
)

Starts a local redpanda broker instance synchronously

Returns:

TypeDescription
strRedpanda broker bootstrap server address in string format: add:port

stop

View source
stop(
self
)

Stops a local redpanda broker instance synchronously

+ + + + \ No newline at end of file diff --git a/docs/next/api/fastkafka/testing/Tester/index.html b/docs/next/api/fastkafka/testing/Tester/index.html new file mode 100644 index 0000000..ea8eb91 --- /dev/null +++ b/docs/next/api/fastkafka/testing/Tester/index.html @@ -0,0 +1,39 @@ + + + + + +Tester | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Tester

fastkafka.testing.Tester

View source

init

View source
__init__(
self, app, use_in_memory_broker=True
)

Mirror-like object for testing a FastKafka application

Can be used as context manager

Parameters:

NameTypeDescriptionDefault
appUnion[fastkafka.FastKafka, List[fastkafka.FastKafka]]The FastKafka application to be tested.required
use_in_memory_brokerboolWhether to use an in-memory broker for testing or not.True

benchmark

View source
benchmark(
self, interval=1, sliding_window_size=None
)

Decorator to benchmark produces/consumes functions

Parameters:

NameTypeDescriptionDefault
intervalUnion[int, datetime.timedelta]Period to use to calculate throughput. If value is of type int,then it will be used as seconds. If value is of type timedelta,then it will be used as it is. default: 1 - one second1
sliding_window_sizeOptional[int]The size of the sliding window to use to calculateaverage throughput. default: None - By default average throughput isnot calculatedNone

consumes

View source
consumes(
self,
topic=None,
decoder='json',
executor=None,
brokers=None,
prefix='on_',
description=None,
loop=None,
bootstrap_servers='localhost',
client_id='aiokafka-0.8.1',
group_id=None,
key_deserializer=None,
value_deserializer=None,
fetch_max_wait_ms=500,
fetch_max_bytes=52428800,
fetch_min_bytes=1,
max_partition_fetch_bytes=1048576,
request_timeout_ms=40000,
retry_backoff_ms=100,
auto_offset_reset='latest',
enable_auto_commit=True,
auto_commit_interval_ms=5000,
check_crcs=True,
metadata_max_age_ms=300000,
partition_assignment_strategy=(<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,),
max_poll_interval_ms=300000,
rebalance_timeout_ms=None,
session_timeout_ms=10000,
heartbeat_interval_ms=3000,
consumer_timeout_ms=200,
max_poll_records=None,
ssl_context=None,
security_protocol='PLAINTEXT',
api_version='auto',
exclude_internal_topics=True,
connections_max_idle_ms=540000,
isolation_level='read_uncommitted',
sasl_mechanism='PLAIN',
sasl_plain_password=None,
sasl_plain_username=None,
sasl_kerberos_service_name='kafka',
sasl_kerberos_domain_name=None,
sasl_oauth_token_provider=None,
)

Decorator registering the callback called when a message is received in a topic.

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

NameTypeDescriptionDefault
topicOptional[str]Kafka topic that the consumer will subscribe to and execute thedecorated function when it receives a message from the topic,default: None. If the topic is not specified, topic name will beinferred from the decorated function name by stripping the defined prefixNone
decoderUnion[str, Callable[[bytes, Type[pydantic.main.BaseModel]], Any]]Decoder to use to decode messages consumed from the topic,default: json - By default, it uses json decoder to decodebytes to json string and then it creates instance of pydanticBaseModel. It also accepts custom decoder function.'json'
executorUnion[str, fastkafka._components.task_streaming.StreamExecutor, NoneType]Type of executor to choose for consuming tasks. Avaliable optionsare "SequentialExecutor" and "DynamicTaskExecutor". The default option is"SequentialExecutor" which will execute the consuming tasks sequentially.If the consuming tasks have high latency it is recommended to use"DynamicTaskExecutor" which will wrap the consuming functions into tasksand run them in on asyncio loop in background. This comes with a cost ofincreased overhead so use it only in cases when your consume functions havehigh latency such as database queries or some other type of networking.None
prefixstrPrefix stripped from the decorated function to define a topic nameif the topic argument is not passed, default: "on_". If the decoratedfunction name is not prefixed with the defined prefix and topic argumentis not passed, then this method will throw ValueError'on_'
brokersUnion[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka.None
descriptionOptional[str]Optional description of the consuming function async docs.If not provided, consuming function doc attr will be used.None
bootstrap_serversa host[:port] string (or list ofhost[:port] strings) that the consumer should contact to bootstrapinitial cluster metadata.This does not have to be the full node list.It just needs to have at least one broker that will respond to aMetadata API Request. Default port is 9092. If no servers arespecified, will default to localhost:9092.'localhost'
client_ida name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client. Alsosubmitted to :class:~.consumer.group_coordinator.GroupCoordinatorfor logging with respect to consumer group administration. Default:aiokafka-{version}'aiokafka-0.8.1'
group_idname of the consumer group to join for dynamicpartition assignment (if enabled), and to use for fetching andcommitting offsets. If None, auto-partition assignment (viagroup coordinator) and offset commits are disabled.Default: NoneNone
key_deserializerAny callable that takes araw message key and returns a deserialized key.None
value_deserializerAny callable that takes araw message value and returns a deserialized value.None
fetch_min_bytesMinimum amount of data the server shouldreturn for a fetch request, otherwise wait up tofetch_max_wait_ms for more data to accumulate. Default: 1.1
fetch_max_bytesThe maximum amount of data the server shouldreturn for a fetch request. This is not an absolute maximum, ifthe first message in the first non-empty partition of the fetchis larger than this value, the message will still be returnedto ensure that the consumer can make progress. NOTE: consumerperforms fetches to multiple brokers in parallel so memoryusage will depend on the number of brokers containingpartitions for the topic.Supported Kafka version >= 0.10.1.0. Default: 52428800 (50 Mb).52428800
fetch_max_wait_msThe maximum amount of time in millisecondsthe server will block before answering the fetch request ifthere isn't sufficient data to immediately satisfy therequirement given by fetch_min_bytes. Default: 500.500
max_partition_fetch_bytesThe maximum amount of dataper-partition the server will return. The maximum total memoryused for a request = #partitions * max_partition_fetch_bytes.This size must be at least as large as the maximum message sizethe server allows or else it is possible for the producer tosend messages larger than the consumer can fetch. If thathappens, the consumer can get stuck trying to fetch a largemessage on a certain partition. Default: 1048576.1048576
max_poll_recordsThe maximum number of records returned in asingle call to :meth:.getmany. Defaults None, no limit.None
request_timeout_msClient request timeout in milliseconds.Default: 40000.40000
retry_backoff_msMilliseconds to backoff when retrying onerrors. Default: 100.100
auto_offset_resetA policy for resetting offsets on:exc:.OffsetOutOfRangeError errors: earliest will move to the oldestavailable message, latest will move to the most recent, andnone will raise an exception so you can handle this case.Default: latest.'latest'
enable_auto_commitIf true the consumer's offset will beperiodically committed in the background. Default: True.True
auto_commit_interval_msmilliseconds between automaticoffset commits, if enable_auto_commit is True. Default: 5000.5000
check_crcsAutomatically check the CRC32 of the recordsconsumed. This ensures no on-the-wire or on-disk corruption tothe messages occurred. This check adds some overhead, so it maybe disabled in cases seeking extreme performance. Default: TrueTrue
metadata_max_age_msThe period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000300000
partition_assignment_strategyList of objects to use todistribute partition ownership amongst consumer instances whengroup management is used. This preference is implicit in the orderof the strategies in the list. When assignment strategy changes:to support a change to the assignment strategy, new versions mustenable support both for the old assignment strategy and the newone. The coordinator will choose the old assignment strategy untilall members have been updated. Then it will choose the newstrategy. Default: [:class:.RoundRobinPartitionAssignor](<class 'kafka.coordinator.assignors.roundrobin.RoundRobinPartitionAssignor'>,)
max_poll_interval_msMaximum allowed time between calls toconsume messages (e.g., :meth:.getmany). If this intervalis exceeded the consumer is considered failed and the group willrebalance in order to reassign the partitions to another consumergroup member. If API methods block waiting for messages, that timedoes not count against this timeout. See KIP-62_ for moreinformation. Default 300000300000
rebalance_timeout_msThe maximum time server will wait for thisconsumer to rejoin the group in a case of rebalance. In Java clientthis behaviour is bound to max.poll.interval.ms configuration,but as aiokafka will rejoin the group in the background, wedecouple this setting to allow finer tuning by users that use:class:.ConsumerRebalanceListener to delay rebalacing. Defaultsto session_timeout_msNone
session_timeout_msClient group session and failure detectiontimeout. The consumer sends periodic heartbeats(heartbeat.interval.ms) to indicate its liveness to the broker.If no hearts are received by the broker for a group member withinthe session timeout, the broker will remove the consumer from thegroup and trigger a rebalance. The allowed range is configured withthe broker configuration propertiesgroup.min.session.timeout.ms and group.max.session.timeout.ms.Default: 1000010000
heartbeat_interval_msThe expected time in millisecondsbetween heartbeats to the consumer coordinator when usingKafka's group management feature. Heartbeats are used to ensurethat the consumer's session stays active and to facilitaterebalancing when new consumers join or leave the group. Thevalue must be set lower than session_timeout_ms, but typicallyshould be set no higher than 1/3 of that value. It can beadjusted even lower to control the expected time for normalrebalances. Default: 30003000
consumer_timeout_msmaximum wait timeout for background fetchingroutine. Mostly defines how fast the system will see rebalance andrequest new data for new partitions. Default: 200200
api_versionspecify which kafka API version to use.:class:AIOKafkaConsumer supports Kafka API versions >=0.9 only.If set to auto, will attempt to infer the broker version byprobing various APIs. Default: auto'auto'
security_protocolProtocol used to communicate with brokers.Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT,SASL_SSL. Default: PLAINTEXT.'PLAINTEXT'
ssl_contextpre-configured :class:~ssl.SSLContextfor wrapping socket connections. Directly passed into asyncio's:meth:~asyncio.loop.create_connection. For more information see:ref:ssl_auth. Default: None.None
exclude_internal_topicsWhether records from internal topics(such as offsets) should be exposed to the consumer. If set to Truethe only way to receive records from an internal topic issubscribing to it. Requires 0.10+ Default: TrueTrue
connections_max_idle_msClose idle connections after the numberof milliseconds specified by this config. Specifying None willdisable idle checks. Default: 540000 (9 minutes).540000
isolation_levelControls how to read messages writtentransactionally.If set to read_committed, :meth:.getmany will only returntransactional messages which have been committed.If set to read_uncommitted (the default), :meth:.getmany willreturn all messages, even transactional messages which have beenaborted.Non-transactional messages will be returned unconditionally ineither mode.Messages will always be returned in offset order. Hence, inread_committed mode, :meth:.getmany will only returnmessages up to the last stable offset (LSO), which is the one lessthan the offset of the first open transaction. In particular anymessages appearing after messages belonging to ongoing transactionswill be withheld until the relevant transaction has been completed.As a result, read_committed consumers will not be able to read upto the high watermark when there are in flight transactions.Further, when in read_committed the seek_to_end method willreturn the LSO. See method docs below. Default: read_uncommitted'read_uncommitted'
sasl_mechanismAuthentication mechanism when security_protocolis configured for SASL_PLAINTEXT or SASL_SSL. Valid values are:PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512,OAUTHBEARER.Default: PLAIN'PLAIN'
sasl_plain_usernameusername for SASL PLAIN authentication.Default: NoneNone
sasl_plain_passwordpassword for SASL PLAIN authentication.Default: NoneNone
sasl_oauth_token_providerOAuthBearer token provider instance. (See :mod:kafka.oauth.abstract).Default: NoneNone

Returns:

TypeDescription
Callable[[Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]], Union[Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], Awaitable[None]], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel]], None], Callable[[Union[List[pydantic.main.BaseModel], pydantic.main.BaseModel], Union[List[fastkafka.EventMetadata], fastkafka.EventMetadata]], None]]]: A function returning the same function

create_docs

View source
create_docs(
self
)

Create the asyncapi documentation based on the configured consumers and producers.

This function exports the asyncapi specification based on the configured consumers +and producers in the FastKafka instance. It generates the asyncapi documentation by +extracting the topics and callbacks from the consumers and producers.

Note: +The asyncapi documentation is saved to the location specified by the _asyncapi_path +attribute of the FastKafka instance.

create_mocks

View source
create_mocks(
self
)

Creates self.mocks as a named tuple mapping a new function obtained by calling the original functions and a mock

fastapi_lifespan

View source
fastapi_lifespan(
self, kafka_broker_name
)

Method for managing the lifespan of a FastAPI application with a specific Kafka broker.

Parameters:

NameTypeDescriptionDefault
kafka_broker_namestrThe name of the Kafka broker to start FastKafkarequired

Returns:

TypeDescription
Callable[[ForwardRef('FastAPI')], AsyncIterator[None]]Lifespan function to use for initializing FastAPI

get_topics

View source
get_topics(
self
)

Get all topics for both producing and consuming.

Returns:

TypeDescription
Iterable[str]A set of topics for both producing and consuming.

is_started

View source
@property
is_started(
self
)

Property indicating whether the FastKafka object is started.

The is_started property indicates if the FastKafka object is currently +in a started state. This implies that all background tasks, producers, +and consumers have been initiated, and the object is successfully connected +to the Kafka broker.

Returns:

TypeDescription
boolTrue if the object is started, False otherwise.

produces

View source
produces(
self,
topic=None,
encoder='json',
prefix='to_',
brokers=None,
description=None,
loop=None,
bootstrap_servers='localhost',
client_id=None,
metadata_max_age_ms=300000,
request_timeout_ms=40000,
api_version='auto',
acks=<object object at 0x7f21fc189d70>,
key_serializer=None,
value_serializer=None,
compression_type=None,
max_batch_size=16384,
partitioner=<kafka.partitioner.default.DefaultPartitioner object at 0x7f21fa7a9150>,
max_request_size=1048576,
linger_ms=0,
send_backoff_ms=100,
retry_backoff_ms=100,
security_protocol='PLAINTEXT',
ssl_context=None,
connections_max_idle_ms=540000,
enable_idempotence=False,
transactional_id=None,
transaction_timeout_ms=60000,
sasl_mechanism='PLAIN',
sasl_plain_password=None,
sasl_plain_username=None,
sasl_kerberos_service_name='kafka',
sasl_kerberos_domain_name=None,
sasl_oauth_token_provider=None,
)

Decorator registering the callback called when delivery report for a produced message is received

This function decorator is also responsible for registering topics for AsyncAPI specificiation and documentation.

Parameters:

NameTypeDescriptionDefault
topicOptional[str]Kafka topic that the producer will send returned values fromthe decorated function to, default: None- If the topic is notspecified, topic name will be inferred from the decorated functionname by stripping the defined prefix.None
encoderUnion[str, Callable[[pydantic.main.BaseModel], bytes]]Encoder to use to encode messages before sending it to topic,default: json - By default, it uses json encoder to convertpydantic basemodel to json string and then encodes the string to bytesusing 'utf-8' encoding. It also accepts custom encoder function.'json'
prefixstrPrefix stripped from the decorated function to define a topicname if the topic argument is not passed, default: "to_". If thedecorated function name is not prefixed with the defined prefixand topic argument is not passed, then this method will throw ValueError'to_'
brokersUnion[Dict[str, Any], fastkafka._components.asyncapi.KafkaBrokers, NoneType]Optional argument specifying multiple broker clusters for consumingmessages from different Kafka clusters in FastKafka.None
descriptionOptional[str]Optional description of the producing function async docs.If not provided, producing function doc attr will be used.None
bootstrap_serversa host[:port] string or list ofhost[:port] strings that the producer should contact tobootstrap initial cluster metadata. This does not have to be thefull node list. It just needs to have at least one broker that willrespond to a Metadata API Request. Default port is 9092. If noservers are specified, will default to localhost:9092.'localhost'
client_ida name for this client. This string is passed ineach request to servers and can be used to identify specificserver-side log entries that correspond to this client.Default: aiokafka-producer-# (appended with a unique numberper instance)None
key_serializerused to convert user-supplied keys to bytesIf not :data:None, called as f(key), should return:class:bytes.Default: :data:None.None
value_serializerused to convert user-supplied messagevalues to :class:bytes. If not :data:None, called asf(value), should return :class:bytes.Default: :data:None.None
acksone of 0, 1, all. The number of acknowledgmentsthe producer requires the leader to have received before considering arequest complete. This controls the durability of records that aresent. The following settings are common: 0: Producer will not wait for any acknowledgment from the server at all. The message will immediately be added to the socket buffer and considered sent. No guarantee can be made that the server has received the record in this case, and the retries configuration will not take effect (as the client won't generally know of any failures). The offset given back for each record will always be set to -1. 1: The broker leader will write the record to its local log but will respond without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after acknowledging the record but before the followers have replicated it then the record will be lost.* all: The broker leader will wait for the full set of in-sync replicas to acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica remains alive. This is the strongest available guarantee.If unset, defaults to acks=1. If enable_idempotence is:data:True defaults to acks=all<object object at 0x7f21fc189d70>
compression_typeThe compression type for all data generated bythe producer. Valid values are gzip, snappy, lz4, zstdor :data:None.Compression is of full batches of data, so the efficacy of batchingwill also impact the compression ratio (more batching means bettercompression). Default: :data:None.None
max_batch_sizeMaximum size of buffered data per partition.After this amount :meth:send coroutine will block until batch isdrained.Default: 1638416384
linger_msThe producer groups together any records that arrivein between request transmissions into a single batched request.Normally this occurs only under load when records arrive fasterthan they can be sent out. However in some circumstances the clientmay want to reduce the number of requests even under moderate load.This setting accomplishes this by adding a small amount ofartificial delay; that is, if first request is processed faster,than linger_ms, producer will wait linger_ms - process_time.Default: 0 (i.e. no delay).0
partitionerCallable used to determine which partitioneach message is assigned to. Called (after key serialization):partitioner(key_bytes, all_partitions, available_partitions).The default partitioner implementation hashes each non-None keyusing the same murmur2 algorithm as the Java client so thatmessages with the same key are assigned to the same partition.When a key is :data:None, the message is delivered to a random partition(filtered to partitions with available leaders only, if possible).<kafka.partitioner.default.DefaultPartitioner object at 0x7f21fa7a9150>
max_request_sizeThe maximum size of a request. This is alsoeffectively a cap on the maximum record size. Note that the serverhas its own cap on record size which may be different from this.This setting will limit the number of record batches the producerwill send in a single request to avoid sending huge requests.Default: 1048576.1048576
metadata_max_age_msThe period of time in milliseconds afterwhich we force a refresh of metadata even if we haven't seen anypartition leadership changes to proactively discover any newbrokers or partitions. Default: 300000300000
request_timeout_msProduce request timeout in milliseconds.As it's sent as part of:class:~kafka.protocol.produce.ProduceRequest (it's a blockingcall), maximum waiting time can be up to 2 *request_timeout_ms.Default: 40000.40000
retry_backoff_msMilliseconds to backoff when retrying onerrors. Default: 100.100
api_versionspecify which kafka API version to use.If set to auto, will attempt to infer the broker version byprobing various APIs. Default: auto'auto'
security_protocolProtocol used to communicate with brokers.Valid values are: PLAINTEXT, SSL, SASL_PLAINTEXT,SASL_SSL. Default: PLAINTEXT.'PLAINTEXT'
ssl_contextpre-configured :class:~ssl.SSLContextfor wrapping socket connections. Directly passed into asyncio's:meth:~asyncio.loop.create_connection. For moreinformation see :ref:ssl_auth.Default: :data:NoneNone
connections_max_idle_msClose idle connections after the numberof milliseconds specified by this config. Specifying :data:None willdisable idle checks. Default: 540000 (9 minutes).540000
enable_idempotenceWhen set to :data:True, the producer willensure that exactly one copy of each message is written in thestream. If :data:False, producer retries due to broker failures,etc., may write duplicates of the retried message in the stream.Note that enabling idempotence acks to set to all. If it is notexplicitly set by the user it will be chosen. If incompatiblevalues are set, a :exc:ValueError will be thrown.New in version 0.5.0.False
sasl_mechanismAuthentication mechanism when security_protocolis configured for SASL_PLAINTEXT or SASL_SSL. Valid valuesare: PLAIN, GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512,OAUTHBEARER.Default: PLAIN'PLAIN'
sasl_plain_usernameusername for SASL PLAIN authentication.Default: :data:NoneNone
sasl_plain_passwordpassword for SASL PLAIN authentication.Default: :data:NoneNone

Returns:

TypeDescription
Callable[[Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]], Union[Callable[..., Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]], Callable[..., Awaitable[Union[pydantic.main.BaseModel, fastkafka.KafkaEvent[pydantic.main.BaseModel], List[pydantic.main.BaseModel], fastkafka.KafkaEvent[List[pydantic.main.BaseModel]]]]]]]: A function returning the same function

Exceptions:

TypeDescription
ValueErrorwhen needed

run_in_background

View source
run_in_background(
self
)

Decorator to schedule a task to be run in the background.

This decorator is used to schedule a task to be run in the background when the app's _on_startup event is triggered.

Returns:

TypeDescription
Callable[[Callable[..., Coroutine[Any, Any, Any]]], Callable[..., Coroutine[Any, Any, Any]]]A decorator function that takes a background task as an input and stores it to be run in the backround.

set_kafka_broker

View source
set_kafka_broker(
self, kafka_broker_name
)

Sets the Kafka broker to start FastKafka with

Parameters:

NameTypeDescriptionDefault
kafka_broker_namestrThe name of the Kafka broker to start FastKafkarequired

Exceptions:

TypeDescription
ValueErrorIf the provided kafka_broker_name is not found in dictionary of kafka_brokers

using_external_broker

View source
using_external_broker(
self, bootstrap_servers_id=None
)

Tester context manager for using external broker

Parameters:

NameTypeDescriptionDefault
bootstrap_servers_idOptional[str]The bootstrap server of aplications.None

Returns:

TypeDescription
AsyncGenerator[ForwardRef('Tester'), None]self or None

using_inmemory_broker

View source
using_inmemory_broker(
self, bootstrap_servers_id=None
)

Tester context manager for using in-memory broker

Parameters:

NameTypeDescriptionDefault
bootstrap_servers_idOptional[str]The bootstrap server of aplications.None

Returns:

TypeDescription
AsyncGenerator[ForwardRef('Tester'), None]self or None
+ + + + \ No newline at end of file diff --git a/docs/next/cli/fastkafka/index.html b/docs/next/cli/fastkafka/index.html new file mode 100644 index 0000000..a1c097f --- /dev/null +++ b/docs/next/cli/fastkafka/index.html @@ -0,0 +1,32 @@ + + + + + +fastkafka | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

fastkafka

Usage:

$ fastkafka [OPTIONS] COMMAND [ARGS]...

Options:

  • --install-completion: Install completion for the current shell.
  • --show-completion: Show completion for the current shell, to copy it or customize the installation.
  • --help: Show this message and exit.

Commands:

  • docs: Commands for managing FastKafka app...
  • run: Runs Fast Kafka API application
  • testing: Commands for managing FastKafka testing

fastkafka docs

Commands for managing FastKafka app documentation

Usage:

$ fastkafka docs [OPTIONS] COMMAND [ARGS]...

Options:

  • --help: Show this message and exit.

Commands:

  • generate: Generates documentation for a FastKafka...
  • install_deps: Installs dependencies for FastKafka...
  • serve: Generates and serves documentation for a...

fastkafka docs generate

Generates documentation for a FastKafka application

Usage:

$ fastkafka docs generate [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --root-path TEXT: root path under which documentation will be created; default is current directory
  • --help: Show this message and exit.

fastkafka docs install_deps

Installs dependencies for FastKafka documentation generation

Usage:

$ fastkafka docs install_deps [OPTIONS]

Options:

  • --help: Show this message and exit.

fastkafka docs serve

Generates and serves documentation for a FastKafka application

Usage:

$ fastkafka docs serve [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --root-path TEXT: root path under which documentation will be created; default is current directory
  • --bind TEXT: Some info [default: 127.0.0.1]
  • --port INTEGER: Some info [default: 8000]
  • --help: Show this message and exit.

fastkafka run

Runs Fast Kafka API application

Usage:

$ fastkafka run [OPTIONS] APP

Arguments:

  • APP: input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --num-workers INTEGER: Number of FastKafka instances to run, defaults to number of CPU cores. [default: 2]
  • --kafka-broker TEXT: kafka_broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastaKafka class. [default: localhost]
  • --help: Show this message and exit.

fastkafka testing

Commands for managing FastKafka testing

Usage:

$ fastkafka testing [OPTIONS] COMMAND [ARGS]...

Options:

  • --help: Show this message and exit.

Commands:

  • install_deps: Installs dependencies for FastKafka app...

fastkafka testing install_deps

Installs dependencies for FastKafka app testing

Usage:

$ fastkafka testing install_deps [OPTIONS]

Options:

  • --help: Show this message and exit.
+ + + + \ No newline at end of file diff --git a/docs/next/cli/run_fastkafka_server_process/index.html b/docs/next/cli/run_fastkafka_server_process/index.html new file mode 100644 index 0000000..f1a273e --- /dev/null +++ b/docs/next/cli/run_fastkafka_server_process/index.html @@ -0,0 +1,32 @@ + + + + + +run_fastkafka_server_process | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

run_fastkafka_server_process

Usage:

$ run_fastkafka_server_process [OPTIONS] APP

Arguments:

  • APP: Input in the form of 'path:app', where path is the path to a python file and app is an object of type FastKafka. [required]

Options:

  • --kafka-broker TEXT: Kafka broker, one of the keys of the kafka_brokers dictionary passed in the constructor of FastKafka class. [required]
  • --install-completion: Install completion for the current shell.
  • --show-completion: Show completion for the current shell, to copy it or customize the installation.
  • --help: Show this message and exit.
+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_00_FastKafka_Demo/index.html b/docs/next/guides/Guide_00_FastKafka_Demo/index.html new file mode 100644 index 0000000..b88d7a8 --- /dev/null +++ b/docs/next/guides/Guide_00_FastKafka_Demo/index.html @@ -0,0 +1,122 @@ + + + + + +FastKafka tutorial | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

FastKafka tutorial

FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.

Install

FastKafka works on macOS, Linux, and most Unix-style operating systems. +You can install it with pip as usual:

pip install fastkafka
try:
import fastkafka
except:
! pip install fastkafka

Running in Colab

You can start this interactive tutorial in Google Colab by clicking the +button below:

Open In Colab

Writing server code

Here is an example python script using FastKafka that takes data from a +Kafka topic, makes a prediction using a predictive model, and outputs +the prediction to another Kafka topic.

Preparing the demo model

First we will prepare our model using the Iris dataset so that we can +demonstrate the preditions using FastKafka. The following call downloads +the dataset and trains the model.

We will wrap the model creation into a lifespan of our app so that the +model is created just before the app is started.

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines two message classes for use in a FastKafka +application:

  • The IrisInputData class is used to represent input data for a +predictive model. It has four fields of type +NonNegativeFloat, +which is a subclass of float that only allows non-negative floating +point values.

  • The IrisPrediction class is used to represent the output of the +predictive model. It has a single field species of type string +representing the predicted species.

These message classes will be used to parse and validate incoming data +in Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used for +generating the documentation only and it is not being checked by the +actual server.

Next, an object of the +FastKafka +class is initialized with the minimum set of arguments:

  • kafka_brokers: a dictionary used for generation of documentation
from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the IrisInputData message class. Specifying the +type of the single argument is instructing the Pydantic to use +IrisInputData.parse_raw() on the consumed message before passing it +to the user defined function on_input_data.

  • The @produces decorator is applied to the to_predictions function, +which specifies that this function should produce a message to the +“predictions" Kafka topic whenever it is called. The to_predictions +function takes a single integer argument species_class representing +one of three possible strign values predicted by the mdoel. It creates +a new IrisPrediction message using this value and then returns it. +The framework will call the IrisPrediction.json().encode("utf-8") +function on the returned value and produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Testing the service

The service can be tested using the +Tester +instances which internally starts Kafka broker and zookeeper.

Before running tests, we have to install Java runtime and Apache Kafka +locally. To simplify the process, we provide the following convenience +command:

fastkafka testing install_deps
[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
from fastkafka.testing import Tester

msg = IrisInputData(
sepal_length=0.1,
sepal_width=0.2,
petal_length=0.3,
petal_width=0.4,
)

# Start Tester app and create local Kafka broker for testing
async with Tester(kafka_app) as tester:
# Send IrisInputData message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with IrisPrediction in predictions topic
await tester.awaited_mocks.on_predictions.assert_awaited_with(
IrisPrediction(species="setosa"), timeout=2
)
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Recap

We have created a Iris classification model and encapulated it into our +fastkafka application. The app will consume the IrisInputData from the +input_data topic and produce the predictions to predictions topic.

To test the app we have:

  1. Created the app

  2. Started our Tester class which mirrors the developed app topics for +testing purpuoses

  3. Sent IrisInputData message to input_data topic

  4. Asserted and checked that the developed iris classification service +has reacted to IrisInputData message

Running the service

The service can be started using builtin faskafka run CLI command. +Before we can do that, we will concatenate the code snippets from above +and save them in a file "application.py"

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

to_predictions(species_class)


@kafka_app.produces(topic="predictions")
def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

To run the service, you will need a running Kafka broker on localhost as +specified in the kafka_brokers parameter above. We can start the Kafka +broker locally using the +ApacheKafkaBroker. +Notice that the same happens automatically in the +Tester +as shown above.

[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): entering...
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): (<_UnixSelectorEventLoop running=True closed=False debug=False>) is already running!
[WARNING] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): calling nest_asyncio.apply()
[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: <class 'fastkafka.testing.ApacheKafkaBroker'>.start(): returning 127.0.0.1:9092
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.start(): exited.

'127.0.0.1:9092'

Then, we start the FastKafka service by running the following command in +the folder where the application.py file is located:

fastkafka run --num-workers=2 --kafka-broker localhost application:kafka_app

In the above command, we use --num-workers option to specify how many +workers to launch and we use --kafka-broker option to specify which +kafka broker configuration to use from earlier specified kafka_brokers

[1200656]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1200656]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1200656]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1200656]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1200654]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1200654]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1200656]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1200656]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1200654]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1200654]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1200654]: [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1200654]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1200654]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[1200654]: [ERROR] aiokafka: Unable to update metadata from [0]
[1200656]: [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('192.168.112.2', 9092)
[1200656]: [ERROR] aiokafka: Unable to update metadata from [0]
^C
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1200656]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: _aiokafka_consumer_loop(): Consumer loop shutting down, waiting for send_stream to drain...
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1200654]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200654...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1200656...

You need to interupt running of the cell above by selecting +Runtime->Interupt execution on the toolbar above.

Finally, we can stop the local Kafka Broker:

[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): entering...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1200193...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1200193 was already terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 1199820...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 1199820 was already terminated.
[INFO] fastkafka._testing.apache_kafka_broker: ApacheKafkaBroker.stop(): exited.

Documentation

The kafka app comes with builtin documentation generation using +AsyncApi HTML generator.

When running in Colab, we need to update Node.js first:

We need to install all dependancies for the generator using the +following command line:

fastkafka docs install_deps
[INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed

To generate the documentation programatically you just need to call the +folloving command:

fastkafka docs generate application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.

. This will generate the asyncapi folder in relative path where all +your documentation will be saved. You can check out the content of it +with:

ls -l asyncapi
total 8
drwxrwxr-x 4 kumaran kumaran 4096 Mar 21 10:09 docs
drwxrwxr-x 2 kumaran kumaran 4096 Mar 21 10:09 spec

In docs folder you will find the servable static html file of your +documentation. This can also be served using our fastkafka docs serve +CLI command (more on that in our guides).

In spec folder you will find a asyncapi.yml file containing the async +API specification of your application.

We can locally preview the generated documentation by running the +following command:

fastkafka docs serve application:kafka_app
[INFO] fastkafka._components.asyncapi: New async specifications generated at: '/work/fastkafka/nbs/guides/asyncapi/spec/asyncapi.yml'
[INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
[INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /work/fastkafka/nbs/guides/asyncapi/docs.


Serving documentation on http://127.0.0.1:8000
^C
Interupting serving of documentation and cleaning up...

From the parameters passed to the application constructor, we get the +documentation bellow:

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
bootstrap_servers="localhost:9092",
)

Kafka_servers

The following documentation snippet are for the consumer as specified in +the code above:

Kafka_consumer

The following documentation snippet are for the producer as specified in +the code above:

Kafka_producer

Finally, all messages as defined as subclasses of BaseModel are +documented as well:

Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)

+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_01_Intro/index.html b/docs/next/guides/Guide_01_Intro/index.html new file mode 100644 index 0000000..593ae53 --- /dev/null +++ b/docs/next/guides/Guide_01_Intro/index.html @@ -0,0 +1,51 @@ + + + + + +Intro | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Intro

This tutorial will show you how to use FastKafkaAPI, step by +step.

The goal of FastKafkaAPI is to simplify the use of Apache Kafka in +Python inspired by FastAPI look and feel.

In this Intro tutorial we’ll go trough the basic requirements to run the +demos presented in future steps.

Installing FastKafkaAPI

First step is to install FastKafkaAPI

$ pip install fastkafka

Preparing a Kafka broker

Next step is to prepare the Kafka environment, our consumers and +producers will need some channel of communication.

!!! info "Hey, your first info!"

If you already have an instance of Kafka running that you can connect to for demo purposes, feel free to skip this step. 

To go through the tutorial, we recommend that you use dockerized Kafka +brokers, if you have Docker and docker-compose installed the setup +should take you no time (if we exclude the container download times).

!!! warning "Listen! This is important."

To be able to setup this configuration you need to have Docker and docker-compose installed

See here for more info on <a href = \"https://docs.docker.com/\" target=\"_blank\">Docker</a> and <a href = \"https://docs.docker.com/compose/install/\" target=\"_blank\">docker compose</a>

To setup the recommended environment, first, create a new folder wher +you want to save your demo files (e.g. fastkafka_demo). Inside the new +folder create a new YAML file named kafka_demo.yml and copy the +following configuration into it:

version: "3"
services:
zookeeper:
image: wurstmeister/zookeeper
hostname: zookeeper
container_name: zookeeper
networks:
- fastkafka-network
ports:
- "2181:2181"
- "22:22"
- "2888:2888"
- "3888:3888"
kafka:
image: wurstmeister/kafka
container_name: kafka
ports:
- "9093:9093"
environment:
HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d' ' -f 2"
KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTER:PLAINTEXT,INSIDE:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: INTER://:9092,INSIDE://localhost:9093
KAFKA_LISTENERS: INTER://_{HOSTNAME_COMMAND}:9092,INSIDE://:9093
KAFKA_INTER_BROKER_LISTENER_NAME: INTER
KAFKA_CREATE_TOPICS: "hello:1:1"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
- zookeeper
healthcheck:
test: [ "CMD", "kafka-topics.sh", "--list", "--zookeeper", "zookeeper:2181" ]
interval: 5s
timeout: 10s
retries: 5
networks:
- fastkafka-network
networks:
fastkafka-network:
name: "fastkafka-network"

This configuration will start a single instance of Zookeeper, single +instance of Kafka broker and create a ‘hello’ topic (quite enough for a +start). To start the configuration, run:

$ docker-compose -f kafka_demo.yaml up -d --wait

This will start the necessary containers and wait till they report that +they are Healthy. After the command finishes, you are good to go to try +out the FastKafkaAPI capabilities! 🎊

Running the code

After installing FastKafkaAPI and initialising the Kafka broker you can +proceed to the ‘First Steps’ part of the tutorial. There, you will write +your first Kafka client and producer apps, run them, and interact with +them.

You are highly encouraged to follow along the tutorials not just by +reading trough them but by implementing the code examples in your own +environment. This will not only help you remember the use cases better +but also, hopefully, demonstrate to you the ease of use of this library.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_02_First_Steps/index.html b/docs/next/guides/Guide_02_First_Steps/index.html new file mode 100644 index 0000000..8df4a9e --- /dev/null +++ b/docs/next/guides/Guide_02_First_Steps/index.html @@ -0,0 +1,49 @@ + + + + + +First Steps | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

First Steps

Creating a simple Kafka consumer app

For our first demo we will create the simplest possible Kafka consumer +and run it using ‘fastkafka run’ command.

The consumer will:

  1. Connect to the Kafka Broker we setup in the Intro guide

  2. Listen to the hello topic

  3. Write any message received from the hello topic to stdout

To create the consumer, first, create a file named

hello_kafka_consumer.py and copy the following code to it:

from os import environ

from fastkafka import FastKafka
from pydantic import BaseModel, Field

kafka_server_url = environ["KAFKA_HOSTNAME"]
kafka_server_port = environ["KAFKA_PORT"]

kafka_brokers = {
"localhost": {
"description": "local development kafka",
"url": kafka_server_url,
"port": kafka_server_port
}
}

class HelloKafkaMsg(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_app = FastKafka(
kafka_brokers=kafka_brokers
)

@kafka_app.consumes()
async def on_hello(msg: HelloKafkaMsg):
print(f"Got data, msg={msg.msg}", flush=True)

!!! info "Kafka configuration"

This consumer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').

!!! warning "Remember to flush"

Notice the **flush=True** option when using print in our consumer. This is because standard python print function doesn't flush by default. To be able to log the worker outputs in real time when using fastkafka run command, the outputs need to be flushed, they will be logged when stopping the worker otherwise.

To run this consumer, in your terminal, run:

fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_consumer:kafka_app

After running the command, you should see something similar to the ouput +below:

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._components.test_dependencies: But not exported to PATH, exporting...
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[878412]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': '127.0.0.1:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[878412]: [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello'})
[878412]: [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello'}
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[878412]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization
[878412]: [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello': 0}.
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 878412...
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[878412]: [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._server: terminate_asyncio_process(): Process 878412 terminated.

[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877951...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877951 terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 877579...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 877579 terminated.

Now you can interact with your consumer, by sending the messages to the +subscribed ‘hello’ topic, don’t worry, we will cover this in the next +step of this guide.

Sending first message to your consumer

After we have created and run our first consumer, we should send a +message to it, to make sure it is working properly.

If you are using the Kafka setup as described in the Intro guide, you +can follow the steps listed here to send a message to the hello topic.

First, connect to your running kafka broker by running:

docker run -it kafka /bin/bash

Then, when connected to the container, run:

kafka-console-producer.sh --bootstrap-server=localhost:9092 --topic=hello

This will open an interactive connection to the hello topic, now you can +write your mesages to the topic and they will be consumed by our +consumer.

In the shell, type:

{"msg":"hello"}

and press enter. This will send a hello message to the topic which will +be read by our running consumer and outputed to stdout.

Check the output of your consumer (terminal where you ran the ‘fastkafka +run’ command) and confirm that your consumer has read the Kafka message. +You shoud see something like this:

Got data, msg=hello

Creating a hello Kafka producer

Consuming messages is only a part of this Library functionality, the +other big part is producing the messages. So, let’s create our first +kafka producer which will send it’s greetings to our consumer +periodically.

The producer will:

  1. Connect to the Kafka Broker we setup in the Intro guide
  2. Connect to the hello topic
  3. Periodically send a message to the hello world topic

To create the producer, first, create a file named

hello_kafka_producer.py and copy the following code to it:

from os import environ

import asyncio
from pydantic import BaseModel, Field

from fastkafka import FastKafka
from fastkafka._components.logger import get_logger

kafka_server_url = environ["KAFKA_HOSTNAME"]
kafka_server_port = environ["KAFKA_PORT"]

kafka_brokers = {
"localhost": {
"description": "local development kafka",
"url": kafka_server_url,
"port": kafka_server_port
}
}

class HelloKafkaMsg(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_app = FastKafka(
kafka_brokers=kafka_brokers
)

logger = get_logger(__name__)

@kafka_app.produces()
async def to_hello(msg: HelloKafkaMsg) -> HelloKafkaMsg:
logger.info(f"Producing: {msg}")
return msg

@kafka_app.run_in_background()
async def hello_every_second():
while(True):
await to_hello(HelloKafkaMsg(msg="hello"))
await asyncio.sleep(1)

!!! info "Kafka configuration"

This producer script uses KAFKA_HOSTNAME and KAFKA_PORT environment vars, so make sure that you have exported them into your environment before running the following comand (e.g. in shell, for KAFKA_HOSTNAME, run: 'export KAFKA_HOSTNAME=kafka').

To run this producer, in your terminal, run:

fastkafka run --num-workers=1 --kafka-broker localhost hello_kafka_producer:kafka_app

After running the command, you should see something similar to the ouput +below:

[INFO] fastkafka._components.test_dependencies: Java is already installed.
[INFO] fastkafka._components.test_dependencies: Kafka is installed.
[INFO] fastkafka._testing.apache_kafka_broker: Starting zookeeper...
[INFO] fastkafka._testing.apache_kafka_broker: Starting kafka...
[INFO] fastkafka._testing.apache_kafka_broker: Local Kafka broker up and running on 127.0.0.1:9092
[879272]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[879272]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[879272]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[879272]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [WARNING] aiokafka.cluster: Topic hello is not available during auto-create initialization
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 879272...
[879272]: [INFO] hello_kafka_producer: Producing: msg='hello'
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[879272]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 879272 terminated.

[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878808...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878808 terminated.
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 878435...
[INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 878435 terminated.

Now, while the producer is running, it will send a HelloKafkaMsg every +second to the hello kafka topic. If your consumer is still running, you +should see the messages appear in its log.

Recap

In this guide we have:

  1. Created a simple Kafka consumer using FastKafka
  2. Sent a message to our consumer trough Kafka
  3. Created a simple Kafka producer using FastKafka
+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_03_Authentication/index.html b/docs/next/guides/Guide_03_Authentication/index.html new file mode 100644 index 0000000..ec2b9cd --- /dev/null +++ b/docs/next/guides/Guide_03_Authentication/index.html @@ -0,0 +1,37 @@ + + + + + +Authentication | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Authentication

TLS Authentication

sasl_mechanism (str) – Authentication mechanism when security_protocol +is configured for SASL_PLAINTEXT or SASL_SSL. Valid values are: PLAIN, +GSSAPI, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER. Default: PLAIN

sasl_plain_username (str) – username for SASL PLAIN authentication. +Default: None

sasl_plain_password (str) – password for SASL PLAIN authentication. +Default: None

sasl_oauth_token_provider (AbstractTokenProvider) – OAuthBearer token +provider instance. (See kafka.oauth.abstract). Default: None

+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_04_Github_Actions_Workflow/index.html b/docs/next/guides/Guide_04_Github_Actions_Workflow/index.html new file mode 100644 index 0000000..fbe691b --- /dev/null +++ b/docs/next/guides/Guide_04_Github_Actions_Workflow/index.html @@ -0,0 +1,42 @@ + + + + + +Deploy FastKafka docs to GitHub Pages | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Deploy FastKafka docs to GitHub Pages

Getting started

Add your workflow file .github/workflows/fastkafka_docs_deploy.yml and +push it to your remote default branch.

Here is an example workflow:

name: Deploy FastKafka Generated Documentation to GitHub Pages

on:
push:
branches: [ "main", "master" ]
workflow_dispatch:

jobs:
deploy:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: airtai/workflows/fastkafka-ghp@main
with:
app: "test_fastkafka.application:kafka_app"

Options

Set app location

Input in the form of path:app, where path is the path to a Python +file and app is an object of type +FastKafka:

- name: Deploy
uses: airtai/workflows/fastkafka-ghp@main
with:
app: "test_fastkafka.application:kafka_app"

In the above example, +FastKafka +app is named as kafka_app and it is available in the application +submodule of the test_fastkafka module.

Example Repository

A +FastKafka-based +library that uses the above-mentioned workfow actions to publish +FastKafka docs to Github Pages can be found +here.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_05_Lifespan_Handler/index.html b/docs/next/guides/Guide_05_Lifespan_Handler/index.html new file mode 100644 index 0000000..3128c70 --- /dev/null +++ b/docs/next/guides/Guide_05_Lifespan_Handler/index.html @@ -0,0 +1,75 @@ + + + + + +Lifespan Events | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Lifespan Events

Did you know that you can define some special code that runs before and +after your Kafka application? This code will be executed just once, but +it covers the whole lifespan of your app! 🚀

Lets break it down:

You can define logic (code) that should be executed before the +application starts up. This is like a warm-up for your app, getting it +ready to consume and produce messages.

Similarly, you can define logic (code) that should be executed when the +application is shutting down. This is like a cool-down for your app, +making sure everything is properly closed and cleaned up.

By executing code before consuming and after producing, you cover the +entire lifecycle of your application 🎉

This is super handy for setting up shared resources that are needed +across consumers and producers, like a database connection pool or a +machine learning model. And the best part? You can clean up these +resources when the app is shutting down!

So lets give it a try and see how it can make your Kafka app even more +awesome! 💪

Lifespan example - Iris prediction model

Let’s dive into an example to see how you can leverage the lifecycle +handler to solve a common use case. Imagine that you have some machine +learning models that need to consume incoming messages and produce +response/prediction messages. These models are shared among consumers +and producers, which means you don’t want to load them for every +message.

Here’s where the lifecycle handler comes to the rescue! By loading the +model before the messages are consumed and produced, but only right +before the application starts receiving messages, you can ensure that +the model is ready to use without compromising the performance of your +tests. In the upcoming sections, we’ll walk you through how to +initialize an Iris species prediction model and use it in your developed +application.

Lifespan

You can define this startup and shutdown logic using the lifespan +parameter of the FastKafka app, and an async context manager.

Let’s start with an example and then see it in detail.

We create an async function lifespan() with yield like this:

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
from contextlib import asynccontextmanager

from fastkafka import FastKafka

ml_models = {}

@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
print("Loading the model!")
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)
yield
# Clean up the ML models and release the resources

print("Exiting, clearing model dict!")
ml_models.clear()

The first thing to notice, is that we are defining an async function +with yield. This is very similar to Dependencies with yield.

The first part of the function, before the yield, will be executed +before the application starts. And the part after the yield will +be executed after the application has finished.

This lifespan will create an iris_prediction model on application +startup and cleanup the references after the app is shutdown.

The lifespan will be passed an KafkaApp reference on startup of your +application, which you can use to reference your application on startup.

For demonstration sake, we also added prints so that when running the +app we can see that our lifespan was called.

Async context manager

Context managers can be used in with blocks, our lifespan, for example +could be used like this:

ml_models = {}
async with lifespan(None):
print(ml_models)

When you create a context manager or an async context manager, what it +does is that, before entering the with block, it will execute the code +before the yield, and after exiting the with block, it will execute +the code after the yield.

If you want to learn more about context managers and contextlib +decorators, please visit Python official +docs

App demo

FastKafka app

Lets now create our application using the created lifespan handler.

Notice how we passed our lifespan handler to the app when constructing +it trough the lifespan argument.

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local development kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Data modeling

Lets model the Iris data for our app:

from pydantic import BaseModel, Field, NonNegativeFloat

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Consumers and producers

Lets create a consumer and producer for our app that will generate +predictions from input iris data.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Final app

The final app looks like this:

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
from contextlib import asynccontextmanager

from pydantic import BaseModel, Field, NonNegativeFloat

from fastkafka import FastKafka

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")
ml_models = {}

@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
print("Loading the model!")
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(X, y)
yield
# Clean up the ML models and release the resources

print("Exiting, clearing model dict!")
ml_models.clear()

kafka_brokers = {
"localhost": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local development kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Running the app

Now we can run the app with your custom lifespan handler. Copy the code +above in lifespan_example.py and run it by running

fastkafka run --num-workers=1 --kafka-broker=localhost lifespan_example:kafka_app

When you run the app, you should see a simmilar output to the one below:

Recap

In this guide we have defined a lifespan handler and passed to our +FastKafka app.

Some important points are:

  1. Lifespan handler is implemented as +AsyncContextManager
  2. Code before yield in lifespan will be executed before +application startup
  3. Code after yield in lifespan will be executed after +application shutdown
  4. You can pass your lifespan handler to FastKafka app on +initialisation by passing a lifespan argument
+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_06_Benchmarking_FastKafka/index.html b/docs/next/guides/Guide_06_Benchmarking_FastKafka/index.html new file mode 100644 index 0000000..2b393ad --- /dev/null +++ b/docs/next/guides/Guide_06_Benchmarking_FastKafka/index.html @@ -0,0 +1,80 @@ + + + + + +Benchmarking FastKafka app | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Benchmarking FastKafka app

Prerequisites

To benchmark a +FastKafka +project, you will need the following:

  1. A library built with +FastKafka.
  2. A running Kafka instance to benchmark the FastKafka application +against.

Creating FastKafka Code

Let’s create a +FastKafka-based +application and write it to the application.py file based on the +tutorial.

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

FastKafka +has a decorator for benchmarking which is appropriately called as +benchmark. Let’s edit our application.py file and add the +benchmark decorator to the consumes method.

# content of the "application.py" file with benchmark

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
@kafka_app.benchmark(interval=1, sliding_window_size=5)
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Here we are conducting a benchmark of a function that consumes data from +the input_data topic with an interval of 1 second and a sliding window +size of 5.

This benchmark method uses the interval parameter to calculate the +results over a specific time period, and the sliding_window_size +parameter to determine the maximum number of results to use in +calculating the average throughput and standard deviation.

This benchmark is important to ensure that the function is performing +optimally and to identify any areas for improvement.

Starting Kafka

If you already have a Kafka running somewhere, then you can skip this +step.

Please keep in mind that your benchmarking results may be affected by +bottlenecks such as network, CPU cores in the Kafka machine, or even the +Kafka configuration itself.

Installing Java and Kafka

We need a working Kafkainstance to benchmark our +FastKafka +app, and to run Kafka we need Java. Thankfully, +FastKafka +comes with a CLI to install both Java and Kafka on our machine.

So, let’s install Java and Kafka by executing the following command.

fastkafka testing install_deps

The above command will extract Kafka scripts at the location +“\$HOME/.local/kafka_2.13-3.3.2" on your machine.

Creating configuration for Zookeeper and Kafka

Now we need to start Zookeeper and Kafka separately, and to start +them we need zookeeper.properties and kafka.properties files.

Let’s create a folder inside the folder where Kafka scripts were +extracted and change directory into it.

mkdir $HOME/.local/kafka_2.13-3.3.2/data_dir && cd $HOME/.local/kafka_2.13-3.3.2/data_dir

Let’s create a file called zookeeper.properties and write the +following content to the file:

dataDir=$HOME/.local/kafka_2.13-3.3.2/data_dir/zookeeper
clientPort=2181
maxClientCnxns=0

Similarly, let’s create a file called kafka.properties and write the +following content to the file:

broker.id=0
listeners=PLAINTEXT://:9092

num.network.threads=3
num.io.threads=8
socket.send.buffer.bytes=102400
socket.receive.buffer.bytes=102400
socket.request.max.bytes=104857600

num.partitions=1
num.recovery.threads.per.data.dir=1
offsets.topic.replication.factor=1
transaction.state.log.replication.factor=1
transaction.state.log.min.isr=1

log.dirs=$HOME/.local/kafka_2.13-3.3.2/data_dir/kafka_logs
log.flush.interval.messages=10000
log.flush.interval.ms=1000
log.retention.hours=168
log.retention.bytes=1073741824
log.segment.bytes=1073741824
log.retention.check.interval.ms=300000

zookeeper.connect=localhost:2181
zookeeper.connection.timeout.ms=18000

Starting Zookeeper and Kafka

We need two different terminals to run Zookeeper in one and Kafka in +another. Let’s open a new terminal and run the following commands to +start Zookeeper:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./zookeeper-server-start.sh ../data_dir/zookeeper.properties

Once Zookeeper is up and running, open a new terminal and execute the +follwing commands to start Kafka:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./kafka-server-start.sh ../data_dir/kafka.properties

Now we have both Zookeeper and Kafka up and running.

Creating topics in Kafka

In a new terminal, please execute the following command to create +necessary topics in Kafka:

export PATH=$PATH:$HOME/.jdk/jdk-11.0.18+10/bin
cd $HOME/.local/kafka_2.13-3.3.2/bin
./kafka-topics.sh --create --topic input_data --partitions 6 --bootstrap-server localhost:9092
./kafka-topics.sh --create --topic predictions --partitions 6 --bootstrap-server localhost:9092

Populating topics with dummy data

To benchmark our +FastKafka +app, we need some data in Kafka topics.

In the same terminal, let’s create some dummy data:

yes '{"sepal_length": 0.7739560486, "sepal_width": 0.8636615789, "petal_length": 0.6122663046, "petal_width": 0.1338914722}' | head -n 1000000 > /tmp/test_data

This command will create a file called test_data in the tmp folder +with one million rows of text. This will act as dummy data to populate +the input_data topic.

Let’s populate the created topic input_data with the dummy data which +we created above:

./kafka-console-producer.sh --bootstrap-server localhost:9092 --topic input_data < /tmp/test_data

Now our topic input_data has one million records/messages in it. If +you want more messages in topic, you can simply execute the above +command again and again.

Benchmarking FastKafka

Once Zookeeper and Kafka are ready, benchmarking +FastKafka +app is as simple as running the fastkafka run command:

fastkafka run --num-workers 1 --kafka-broker localhost application:kafka_app

This command will start the +FastKafka +app and begin consuming messages from Kafka, which we spun up earlier. +Additionally, the same command will output all of the benchmark +throughputs based on the interval and sliding_window_size values.

The output for the fastkafka run command is:

[385814]: 23-04-07 10:49:18.380 [INFO] application: Current group id is ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[385814]: 23-04-07 10:49:18.382 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[385814]: 23-04-07 10:49:18.387 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'group_id': 'ZDGTBVWVBBDMZCW', 'auto_offset_reset': 'earliest', 'bootstrap_servers': 'localh
ost:9092', 'max_poll_records': 100}
[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[385814]: 23-04-07 10:49:18.390 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[385814]: 23-04-07 10:49:18.390 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Discovered coordinator 0 for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: Revoking previously assigned partitions set() for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:18.395 [INFO] aiokafka.consumer.group_coordinator: (Re-)joining group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Joined group 'ZDGTBVWVBBDMZCW' (generation 1) with member_id aiokafka-0.8.0-b1f06560-6983-4d5e-a9af-8084e0e652cc
[385814]: 23-04-07 10:49:21.396 [INFO] aiokafka.consumer.group_coordinator: Elected group leader -- performing partition assignments using roundrobin
[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Successfully synced group ZDGTBVWVBBDMZCW with generation 1
[385814]: 23-04-07 10:49:21.397 [INFO] aiokafka.consumer.group_coordinator: Setting newly assigned partitions {TopicPartition(topic='input_data', partition=0), TopicPartition(topic='input_data', partition=1), TopicPartition(topic='input_data', partition
=2), TopicPartition(topic='input_data', partition=3)} for group ZDGTBVWVBBDMZCW
[385814]: 23-04-07 10:49:22.409 [INFO] fastkafka.benchmark: Throughput = 93,598, Avg throughput = 93,598 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:23.409 [INFO] fastkafka.benchmark: Throughput = 91,847, Avg throughput = 92,723 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:24.409 [INFO] fastkafka.benchmark: Throughput = 92,948, Avg throughput = 92,798 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:25.409 [INFO] fastkafka.benchmark: Throughput = 93,227, Avg throughput = 92,905 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:26.409 [INFO] fastkafka.benchmark: Throughput = 93,553, Avg throughput = 93,035 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:27.409 [INFO] fastkafka.benchmark: Throughput = 92,699, Avg throughput = 92,855 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:28.409 [INFO] fastkafka.benchmark: Throughput = 92,716, Avg throughput = 93,029 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:29.409 [INFO] fastkafka.benchmark: Throughput = 92,897, Avg throughput = 93,019 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:30.409 [INFO] fastkafka.benchmark: Throughput = 92,854, Avg throughput = 92,944 - For application.on_input_data(interval=1,sliding_window_size=5)
[385814]: 23-04-07 10:49:31.410 [INFO] fastkafka.benchmark: Throughput = 92,672, Avg throughput = 92,768 - For application.on_input_data(interval=1,sliding_window_size=5)

Based on the output, when using 1 worker, our +FastKafka +app achieved a throughput of 93k messages per second and an +average throughput of 93k messages per second.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html b/docs/next/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html new file mode 100644 index 0000000..9db612e --- /dev/null +++ b/docs/next/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/index.html @@ -0,0 +1,150 @@ + + + + + +Encoding and Decoding Kafka Messages with FastKafka | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Encoding and Decoding Kafka Messages with FastKafka

Prerequisites

  1. A basic knowledge of +FastKafka +is needed to proceed with this guide. If you are not familiar with +FastKafka, +please go through the tutorial first.
  2. FastKafka +with its dependencies installed is needed. Please install +FastKafka +using the command - pip install fastkafka

Ways to Encode and Decode Messages with FastKafka

In python, by default, we send Kafka messages as bytes. Even if our +message is a string, we convert it to bytes and then send it to Kafka +topic. imilarly, while consuming messages, we consume them as bytes and +then convert them to strings.

In FastKafka, we specify message schema using Pydantic models as +mentioned in tutorial:

# Define Pydantic models for Kafka messages
from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Then, we send and receive messages as instances of Pydantic models which +we defined. So, FastKafka needs a way to encode/decode to these Pydantic +model messages to bytes in order to send/receive messages to/from Kafka +topics.

The @consumes and @produces methods of FastKafka accept a parameter +called decoder/encoder to decode/encode Kafka messages. FastKafka +provides three ways to encode and decode messages:

  1. json - This is the default encoder/decoder option in FastKafka. +While producing, this option converts our instance of Pydantic model +messages to a JSON string and then converts it to bytes before +sending it to the topic. While consuming, it converts bytes to a +JSON string and then constructs an instance of Pydantic model from +the JSON string.
  2. avro - This option uses Avro encoding/decoding to convert instances +of Pydantic model messages to bytes while producing, and while +consuming, it constructs an instance of Pydantic model from bytes.
  3. custom encoder/decoder - If you are not happy with the json or avro +encoder/decoder options, you can write your own encoder/decoder +functions and use them to encode/decode Pydantic messages.

1. Json encoder and decoder

The default option in FastKafka is json encoder/decoder. This option, +while producing, converts our instance of pydantic model messages to +json string and then converts to bytes before sending it to the topics. +While consuming it converts bytes to json string and then constructs +instance of pydantic model from json string.

We can use the application from tutorial as +is, and it will use the json encoder/decoder by default. But, for +clarity, let’s modify it to explicitly accept the ‘json’ encoder/decoder +parameter:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder="json")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder="json")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

In the above code, the @kafka_app.consumes decorator sets up a +consumer for the “input_data" topic, using the ‘json’ decoder to convert +the message payload to an instance of IrisInputData. The +@kafka_app.produces decorator sets up a producer for the “predictions" +topic, using the ‘json’ encoder to convert the instance of +IrisPrediction to message payload.

2. Avro encoder and decoder

What is Avro?

Avro is a row-oriented remote procedure call and data serialization +framework developed within Apache’s Hadoop project. It uses JSON for +defining data types and protocols, and serializes data in a compact +binary format. To learn more about the Apache Avro, please check out the +docs.

Installing FastKafka with Avro dependencies

FastKafka +with dependencies for Apache Avro installed is needed to use avro +encoder/decoder. Please install +FastKafka +with Avro support using the command - pip install fastkafka[avro]

Defining Avro Schema Using Pydantic Models

By default, you can use Pydantic model to define your message schemas. +FastKafka internally takes care of encoding and decoding avro messages, +based on the Pydantic models.

So, similar to the tutorial, the message schema will +remain as it is.

# Define Pydantic models for Avro messages
from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

No need to change anything to support avro. You can use existing +Pydantic models as is.

Reusing existing avro schema

If you are using some other library to send and receive avro encoded +messages, it is highly likely that you already have an Avro schema +defined.

Building pydantic models from avro schema dictionary

Let’s modify the above example and let’s assume we have schemas already +for IrisInputData and IrisPrediction which will look like below:

iris_input_data_schema = {
"type": "record",
"namespace": "IrisInputData",
"name": "IrisInputData",
"fields": [
{"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},
{"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},
{"doc": "Petal length in cm", "type": "double", "name": "petal_length"},
{"doc": "Petal width in cm", "type": "double", "name": "petal_width"},
],
}
iris_prediction_schema = {
"type": "record",
"namespace": "IrisPrediction",
"name": "IrisPrediction",
"fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],
}

We can easily construct pydantic models from avro schema using +avsc_to_pydantic +function which is included as part of +FastKafka +itself.

from fastkafka.encoder import avsc_to_pydantic

IrisInputData = avsc_to_pydantic(iris_input_data_schema)
print(IrisInputData.model_fields)

IrisPrediction = avsc_to_pydantic(iris_prediction_schema)
print(IrisPrediction.model_fields)

The above code will convert avro schema to pydantic models and will +print pydantic models’ fields. The output of the above is:

{'sepal_length': ModelField(name='sepal_length', type=float, required=True),
'sepal_width': ModelField(name='sepal_width', type=float, required=True),
'petal_length': ModelField(name='petal_length', type=float, required=True),
'petal_width': ModelField(name='petal_width', type=float, required=True)}

{'species': ModelField(name='species', type=str, required=True)}

This is exactly same as manually defining the pydantic models ourselves. +You don’t have to worry about not making any mistakes while converting +avro schema to pydantic models manually. You can easily and +automatically accomplish it by using +avsc_to_pydantic +function as demonstrated above.

Building pydantic models from .avsc file

Not all cases will have avro schema conveniently defined as a python +dictionary. You may have it stored as the proprietary .avsc files in +filesystem. Let’s see how to convert those .avsc files to pydantic +models.

Let’s assume our avro files are stored in files called +iris_input_data_schema.avsc and iris_prediction_schema.avsc. In that +case, following code converts the schema to pydantic models:

import json
from fastkafka.encoder import avsc_to_pydantic


with open("iris_input_data_schema.avsc", "rb") as f:
iris_input_data_schema = json.load(f)

with open("iris_prediction_schema.avsc", "rb") as f:
iris_prediction_schema = json.load(f)


IrisInputData = avsc_to_pydantic(iris_input_data_schema)
print(IrisInputData.model_fields)

IrisPrediction = avsc_to_pydantic(iris_prediction_schema)
print(IrisPrediction.model_fields)

Consume/Produce avro messages with FastKafka

FastKafka +provides @consumes and @produces methods to consume/produces +messages to/from a Kafka topic. This is explained in +tutorial.

The @consumes and @produces methods accepts a parameter called +decoder/encoder to decode/encode avro messages.

@kafka_app.consumes(topic="input_data", encoder="avro")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", decoder="avro")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

In the above example, in @consumes and @produces methods, we +explicitly instruct FastKafka to decode and encode messages using +the avro decoder/encoder instead of the default json +decoder/encoder.

Assembling it all together

Let’s rewrite the sample code found in +tutorial to use avro to decode and +encode messages:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


iris_input_data_schema = {
"type": "record",
"namespace": "IrisInputData",
"name": "IrisInputData",
"fields": [
{"doc": "Sepal length in cm", "type": "double", "name": "sepal_length"},
{"doc": "Sepal width in cm", "type": "double", "name": "sepal_width"},
{"doc": "Petal length in cm", "type": "double", "name": "petal_length"},
{"doc": "Petal width in cm", "type": "double", "name": "petal_width"},
],
}
iris_prediction_schema = {
"type": "record",
"namespace": "IrisPrediction",
"name": "IrisPrediction",
"fields": [{"doc": "Predicted species", "type": "string", "name": "species"}],
}
# Or load schema from avsc files

from fastkafka.encoder import avsc_to_pydantic

IrisInputData = avsc_to_pydantic(iris_input_data_schema)
IrisPrediction = avsc_to_pydantic(iris_prediction_schema)


from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder="avro")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder="avro")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

The above code is a sample implementation of using FastKafka to consume +and produce Avro-encoded messages from/to a Kafka topic. The code +defines two Avro schemas for the input data and the prediction result. +It then uses the +avsc_to_pydantic +function from the FastKafka library to convert the Avro schema into +Pydantic models, which will be used to decode and encode Avro messages.

The +FastKafka +class is then instantiated with the broker details, and two functions +decorated with @kafka_app.consumes and @kafka_app.produces are +defined to consume messages from the “input_data" topic and produce +messages to the “predictions" topic, respectively. The functions uses +the decoder=“avro" and encoder=“avro" parameters to decode and encode +the Avro messages.

In summary, the above code demonstrates a straightforward way to use +Avro-encoded messages with FastKafka to build a message processing +pipeline.

3. Custom encoder and decoder

If you are not happy with the json or avro encoder/decoder options, you +can write your own encoder/decoder functions and use them to +encode/decode Pydantic messages.

Writing a custom encoder and decoder

In this section, let’s see how to write a custom encoder and decoder +which obfuscates kafka message with simple +ROT13 cipher.

import codecs
import json
from typing import Any, Type


def custom_encoder(msg: BaseModel) -> bytes:
msg_str = msg.json()
obfuscated = codecs.encode(msg_str, 'rot13')
raw_bytes = obfuscated.encode("utf-8")
return raw_bytes

def custom_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:
obfuscated = raw_msg.decode("utf-8")
msg_str = codecs.decode(obfuscated, 'rot13')
msg_dict = json.loads(msg_str)
return cls(**msg_dict)

The above code defines two custom functions for encoding and decoding +messages in a Kafka application using the FastKafka library.

The encoding function, custom_encoder(), takes a message msg which +is an instance of a Pydantic model, converts it to a JSON string using +the json() method, obfuscates the resulting string using the ROT13 +algorithm from the codecs module, and finally encodes the obfuscated +string as raw bytes using the UTF-8 encoding.

The decoding function, custom_decoder(), takes a raw message raw_msg +in bytes format, a Pydantic class to construct instance with cls +parameter. It first decodes the raw message from UTF-8 encoding, then +uses the ROT13 algorithm to de-obfuscate the string. Finally, it loads +the resulting JSON string using the json.loads() method and returns a +new instance of the specified cls class initialized with the decoded +dictionary.

These functions can be used with FastKafka’s encoder and decoder +parameters to customize the serialization and deserialization of +messages in Kafka topics.

Let’s test the above code

i = IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)

encoded = custom_encoder(i)
display(encoded)

decoded = custom_decoder(encoded, IrisInputData)
display(decoded)

This will result in following output

b'{"frcny_yratgu": 0.5, "frcny_jvqgu": 0.5, "crgny_yratgu": 0.5, "crgny_jvqgu": 0.5}'

IrisInputData(sepal_length=0.5, sepal_width=0.5, petal_length=0.5, petal_width=0.5)

Assembling it all together

Let’s rewrite the sample code found in +tutorial to use our custom decoder and +encoder functions:

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")


import codecs
import json
from typing import Any, Type


def custom_encoder(msg: BaseModel) -> bytes:
msg_str = msg.json()
obfuscated = codecs.encode(msg_str, 'rot13')
raw_bytes = obfuscated.encode("utf-8")
return raw_bytes

def custom_decoder(raw_msg: bytes, cls: Type[BaseModel]) -> Any:
obfuscated = raw_msg.decode("utf-8")
msg_str = codecs.decode(obfuscated, 'rot13')
msg_dict = json.loads(msg_str)
return cls(**msg_dict)


from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", decoder=custom_decoder)
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions", encoder=custom_encoder)
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

This code defines a custom encoder and decoder functions for encoding +and decoding messages sent through a Kafka messaging system.

The custom encoder function takes a message represented as a +BaseModel and encodes it as bytes by first converting it to a JSON +string and then obfuscating it using the ROT13 encoding. The obfuscated +message is then converted to bytes using UTF-8 encoding and returned.

The custom decoder function takes in the bytes representing an +obfuscated message, decodes it using UTF-8 encoding, then decodes the +ROT13 obfuscation, and finally loads it as a dictionary using the json +module. This dictionary is then converted to a BaseModel instance +using the cls parameter.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_11_Consumes_Basics/index.html b/docs/next/guides/Guide_11_Consumes_Basics/index.html new file mode 100644 index 0000000..4ee7b28 --- /dev/null +++ b/docs/next/guides/Guide_11_Consumes_Basics/index.html @@ -0,0 +1,90 @@ + + + + + +@consumes basics | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

@consumes basics

You can use @consumes decorator to consume messages from Kafka topics.

In this guide we will create a simple FastKafka app that will consume +HelloWorld messages from hello_world topic.

Import FastKafka

To use the @consumes decorator, first we need to import the base +FastKafka app to create our application.

from fastkafka import FastKafka

In this demo we will log the messages to the output so that we can +inspect and verify that our app is consuming properly. For that we need +to import the logger.

from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

Define the structure of the messages

Next, you need to define the structure of the messages you want to +consume from the topic using pydantic. For +the guide we’ll stick to something basic, but you are free to define any +complex message structure you wish in your project, just make sure it +can be JSON encoded.

Let’s import BaseModel and Field from pydantic and create a simple +HelloWorld class containing one string parameter msg

from pydantic import BaseModel, Field
class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

Create a base FastKafka app

Now we will create and define a base FastKafka app, replace the +<url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values of your +Kafka bootstrap server

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

Create a consumer function and decorate it with @consumes

Let’s create a consumer function that will consume HelloWorld messages +from hello_world topic and log them.

@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

The function decorated with the @consumes decorator will be called +when a message is produced to Kafka.

The message will then be injected into the typed msg argument of the +function and its type will be used to parse the message.

In this example case, when the message is sent into a hello_world +topic, it will be parsed into a HelloWorld class and on_hello_world +function will be called with the parsed class as msg argument value.

Final app

Your app code should look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)
@app.consumes()
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Run the app

Now we can run the app. Copy the code above in consumer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app

After running the command, you should see this output in your terminal:

[14442]: 23-06-15 07:16:00.564 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[14442]: 23-06-15 07:16:00.564 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}
[14442]: 23-06-15 07:16:00.577 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[14442]: 23-06-15 07:16:00.577 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})
[14442]: 23-06-15 07:16:00.577 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}
[14442]: 23-06-15 07:16:00.577 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[14442]: 23-06-15 07:16:00.585 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}.
Starting process cleanup, this may take a few seconds...
23-06-15 07:16:04.626 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 14442...
[14442]: 23-06-15 07:16:05.735 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[14442]: 23-06-15 07:16:05.735 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-15 07:16:05.853 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 14442 terminated.

Send the message to kafka topic

Lets send a HelloWorld message to the hello_world topic and check if +our consumer kafka application has logged the received message. In your +terminal, run:

echo { \"msg\": \"Hello world\" } | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>
[15588]: 23-06-15 07:16:15.282 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[15588]: 23-06-15 07:16:15.282 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}
[15588]: 23-06-15 07:16:15.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[15588]: 23-06-15 07:16:15.294 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})
[15588]: 23-06-15 07:16:15.295 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}
[15588]: 23-06-15 07:16:15.295 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[15588]: 23-06-15 07:16:15.302 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}.
[15588]: 23-06-15 07:16:25.867 [INFO] consumer_example: Got msg: msg='Hello world'
Starting process cleanup, this may take a few seconds...
23-06-15 07:16:34.168 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 15588...
[15588]: 23-06-15 07:16:35.358 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[15588]: 23-06-15 07:16:35.359 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-15 07:16:35.475 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 15588 terminated.

You should see the “Got msg: msg='Hello world'" being logged by your +consumer.

Choosing a topic

You probably noticed that you didn’t define which topic you are +receiving the message from, this is because the @consumes decorator +determines the topic by default from your function name. The decorator +will take your function name and strip the default “on_" prefix from it +and use the rest as the topic name. In this example case, the topic is +hello_world.

You can choose your custom prefix by defining the prefix parameter in +consumes decorator, like this:

@app.consumes(prefix="read_from_")
async def read_from_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Also, you can define the topic name completely by defining the topic +in parameter in consumes decorator, like this:

@app.consumes(topic="my_special_topic")
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

Message data

The message received from kafka is translated from binary JSON +representation int the class defined by typing of msg parameter in the +function decorated by the @consumes decorator.

In this example case, the message will be parsed into a HelloWorld +class.

Message metadata

If you need any of Kafka message metadata such as timestamp, partition +or headers you can access the metadata by adding a EventMetadata typed +argument to your consumes function and the metadata from the incoming +message will be automatically injected when calling the consumes +function.

Let’s demonstrate that.

Create a consumer function with metadata

The only difference from the original basic consume function is that we +are now passing the meta: EventMetadata argument to the function. The +@consumes decorator will register that and, when a message is +consumed, it will also pass the metadata to your function. Now you can +use the metadata in your consume function. Lets log it to see what it +contains.

First, we need to import the EventMetadata

from fastkafka import EventMetadata

Now we can add the meta argument to our consuming function.

@app.consumes()
async def on_hello_world(msg: HelloWorld, meta: EventMetadata):
logger.info(f"Got metadata: {meta}")

Your final app should look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field
from fastkafka import EventMetadata
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)
class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)
kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)
@app.consumes()
async def on_hello_world(msg: HelloWorld, meta: EventMetadata):
logger.info(f"Got metadata: {meta}")

Now lets run the app and send a message to the broker to see the logged +message metadata.

You should see a similar log as the one below and the metadata being +logged in your app.

[20050]: 23-06-15 07:18:55.661 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[20050]: 23-06-15 07:18:55.661 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}
[20050]: 23-06-15 07:18:55.675 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[20050]: 23-06-15 07:18:55.675 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})
[20050]: 23-06-15 07:18:55.675 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}
[20050]: 23-06-15 07:18:55.675 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[20050]: 23-06-15 07:18:55.682 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}.
[20050]: 23-06-15 07:19:06.337 [INFO] consumer_example: Got metadata: EventMetadata(topic='hello_world', partition=0, offset=0, timestamp=1686813546255, timestamp_type=0, key=None, value=b'{ "msg": "Hello world" }', checksum=None, serialized_key_size=-1, serialized_value_size=24, headers=())
Starting process cleanup, this may take a few seconds...
23-06-15 07:19:14.547 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 20050...
[20050]: 23-06-15 07:19:15.630 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[20050]: 23-06-15 07:19:15.630 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-15 07:19:15.742 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 20050 terminated.

As you can see in the log, from the metadata you now have the +information about the partition, offset, timestamp, key and headers. +🎉

Dealing with high latency consuming functions

If your functions have high latency due to, for example, lengthy +database calls you will notice a big decrease in performance. This is +due to the issue of how the consumes decorator executes your consume +functions when consuming events. By default, the consume function will +run the consuming funtions for one topic sequentially, this is the most +straightforward approach and results with the least amount of overhead.

But, to handle those high latency tasks and run them in parallel, +FastKafka has a +DynamicTaskExecutor +prepared for your consumers. This executor comes with additional +overhead, so use it only when you need to handle high latency functions.

Lets demonstrate how to use it.

To your consumes decorator, add an executor option and set it to +"DynamicTaskExecutor", this will enable the consumer to handle high +latency functions effectively.

Your consuming function should now look like this:

@app.consumes(executor="DynamicTaskExecutor")
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

And the complete app should now look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.consumes(executor="DynamicTaskExecutor")
async def on_hello_world(msg: HelloWorld):
logger.info(f"Got msg: {msg}")

You can now run your app using the CLI commands described in this guide.

Lets send a HelloWorld message to the hello_world topic and check if +our consumer kafka application has logged the received message. In your +terminal, run:

echo { \"msg\": \"Hello world\" } | kafka-console-producer.sh --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>

You should see a similar log as the one below.

[21539]: 23-06-15 07:19:25.135 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[21539]: 23-06-15 07:19:25.135 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:9092'}
[21539]: 23-06-15 07:19:25.147 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[21539]: 23-06-15 07:19:25.147 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'hello_world'})
[21539]: 23-06-15 07:19:25.147 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'hello_world'}
[21539]: 23-06-15 07:19:25.147 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[21539]: 23-06-15 07:19:25.154 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'hello_world': 1}.
[21539]: 23-06-15 07:19:35.512 [INFO] consumer_example: Got msg: msg='Hello world'
Starting process cleanup, this may take a few seconds...
23-06-15 07:19:44.023 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 21539...
[21539]: 23-06-15 07:19:45.202 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[21539]: 23-06-15 07:19:45.203 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-15 07:19:45.313 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 21539 terminated.

Inside the log, you should see the “Got msg: msg='Hello world'" being +logged by your consumer.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_12_Batch_Consuming/index.html b/docs/next/guides/Guide_12_Batch_Consuming/index.html new file mode 100644 index 0000000..96e26ce --- /dev/null +++ b/docs/next/guides/Guide_12_Batch_Consuming/index.html @@ -0,0 +1,47 @@ + + + + + +Batch consuming | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Batch consuming

If you want to consume data in batches @consumes decorator makes that +possible for you. By typing a consumed msg object as a list of +messages the consumer will call your consuming function with a batch of +messages consumed from a single partition. Let’s demonstrate that now.

Consume function with batching

To consume messages in batches, you need to wrap you message type into a +list and the @consumes decorator will take care of the rest for you. +Your consumes function will be called with batches grouped by partition +now.

@app.consumes(auto_offset_reset="earliest")
async def on_hello_world(msg: List[HelloWorld]):
logger.info(f"Got msg batch: {msg}")

App example

We will modify the app example from @consumes +basics guide to consume +HelloWorld messages batch. The final app will look like this (make +sure you replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


import asyncio
from typing import List
from pydantic import BaseModel, Field

from fastkafka import FastKafka
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.consumes(auto_offset_reset="earliest")
async def on_hello_world(msg: List[HelloWorld]):
logger.info(f"Got msg batch: {msg}")

Send the messages to kafka topic

Lets send a couple of HelloWorld messages to the hello_world topic +and check if our consumer kafka application has logged the received +messages batch. In your terminal, run the following command at least two +times to create multiple messages in your kafka queue:

echo { ^"msg^": ^"Hello world^" }
echo { ^"msg^": ^"Hello world^" } | kafka-console-producer.bat --topic=hello_world --bootstrap-server=<addr_of_your_kafka_bootstrap_server>

Now we can run the app. Copy the code of the example app in +consumer_example.py and run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker consumer_example:app

You should see the your Kafka messages being logged in batches by your +consumer.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_21_Produces_Basics/index.html b/docs/next/guides/Guide_21_Produces_Basics/index.html new file mode 100644 index 0000000..68f3b83 --- /dev/null +++ b/docs/next/guides/Guide_21_Produces_Basics/index.html @@ -0,0 +1,62 @@ + + + + + +@produces basics | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

@produces basics

You can use @produces decorator to produce messages to Kafka topics.

In this guide we will create a simple FastKafka app that will produce +hello world messages to hello_world topic.

Import FastKafka

To use the @produces decorator, frist we need to import the base +FastKafka app to create our application.

from fastkafka import FastKafka

Define the structure of the messages

Next, you need to define the structure of the messages you want to send +to the topic using pydantic. For the guide +we’ll stick to something basic, but you are free to define any complex +message structure you wish in your project, just make sure it can be +JSON encoded.

Let’s import BaseModel and Field from pydantic and create a simple +HelloWorld class containing one string parameter msg

from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

Create a base FastKafka app

Now we will create and define a base FastKafka app, replace the +<url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values of your +Kafka bootstrap server



kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

Create a producer function and decorate it with @produces

Let’s create a producer function that will produce HelloWorld messages +to hello_world topic:


@app.produces()
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Now you can call your defined function as any normal python function in +your code. The side effect of calling the function will be that the +value you are returning will also be sent to a kafka topic.

By default, the topic is determined from your function name, the “to_" +prefix is stripped and what is left over is used as a topic name. I this +case, that is hello_world.

Instruct the app to start sending HelloWorld messages

Let’s use @run_in_background decorator to instruct our app to send +HelloWorld messages to hello_world topic every second.


import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Final app

Your app code should look like this:

from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)


kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.produces()
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Run the app

script_file = "producer_example.py"
cmd = "fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app"
md(
f"Now we can run the app. Copy the code above in producer_example.py and run it by running\n```shell\n{cmd}\n```"
)

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_example:app

After running the command, you should see this output in your terminal:

[84645]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[84645]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[84645]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[84645]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization
[84645]: [WARNING] aiokafka.cluster: Topic hello_world is not available during auto-create initialization
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 84645...
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[84645]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 84645 terminated.

Check if the message was sent to the Kafka topic

Lets check the topic and see if there is a “Hello world!" message in the +hello_world topic. In your terminal run:

kafka-console-consumer.sh -topic=hello_world --from-beginning -bootstrap-server=<addr_of_your_kafka_bootstrap_server>

You should see the {“msg": “Hello world!"} messages in your topic.

Choosing a topic

You probably noticed that you didn’t define which topic you are sending +the message to, this is because the @produces decorator determines the +topic by default from your function name. The decorator will take your +function name and strip the default “to_" prefix from it and use the +rest as the topic name. In this example case, the topic is +hello_world.

!!! warn "New topics"

Kafka producers and application startup will fail if the topics you are producing to don't yet exist. Before running the app, make sure that the topics are created.

You can choose your custom prefix by defining the prefix parameter in +produces decorator, like this:


@app.produces(prefix="send_to_")
async def send_to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Also, you can define the topic name completely by defining the topic +in parameter in produces decorator, like this:


@app.produces(topic="my_special_topic")
async def to_hello_world(msg: str) -> HelloWorld:
return HelloWorld(msg=msg)

Message data

The return value from your function will be translated JSON string and +then to bytes and sent to defined Kafka topic. The typing of the return +value is used for generating the documentation for your Kafka app.

In this example case, the return value is HelloWorld class which will be +translated into JSON formatted string and then to bytes. The translated +data will then be sent to Kafka. In the from of: +b'{"msg":"Hello world!"}'

+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_22_Partition_Keys/index.html b/docs/next/guides/Guide_22_Partition_Keys/index.html new file mode 100644 index 0000000..374d449 --- /dev/null +++ b/docs/next/guides/Guide_22_Partition_Keys/index.html @@ -0,0 +1,55 @@ + + + + + +Defining a partition key | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Defining a partition key

Partition keys are used in Apache Kafka to determine which partition a +message should be written to. This ensures that related messages are +kept together in the same partition, which can be useful for ensuring +order or for grouping related messages together for efficient +processing. Additionally, partitioning data across multiple partitions +allows Kafka to distribute load across multiple brokers and scale +horizontally, while replicating data across multiple brokers provides +fault tolerance.

You can define your partition keys when using the @produces decorator, +this guide will demonstrate to you this feature.

Return a key from the producing function

To define a key for the message that you want to produce to Kafka topic, +you need to wrap the response into +KafkaEvent +class and set the key value. Check the example below:


from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:
return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")

In the example, we want to return the HelloWorld message class with +the key defined as my_key. So, we wrap the message and key into a +KafkaEvent class and return it as such.

While generating the documentation, the +KafkaEvent +class will be unwrapped and the HelloWorld class will be documented in +the definition of message type, same way if you didn’t use the key.

!!! info "Which key to choose?"

Although we have defined a fixed key in this example, nothing is stopping you from calculating a key beforehand and passing it in, or using the message parts for key calculation. Just make sure that the key is in `bytes` format when you wrap it in `KafkaEvent`.

App example

We will modify the app example from @producer basics guide to return +the HelloWorld with our key. The final app will look like this (make +sure you replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msg: str) -> KafkaEvent[HelloWorld]:
return KafkaEvent(HelloWorld(msg=msg), key=b"my_key")

import asyncio

@app.run_in_background()
async def hello_every_second():
while(True):
await to_hello_world(msg="Hello world!")
await asyncio.sleep(1)

Run the app

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app

After running the command, you should see this output in your terminal:

[347835]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'hello_every_second' as background task
[347835]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[347835]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[347835]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'hello_every_second'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 347835...
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'hello_every_second'
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'hello_every_second' to finish
[347835]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'hello_every_second'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 347835 terminated.

Check if the message was sent to the Kafka topic with the desired key

Lets check the topic and see if there is a “Hello world!" message in the +hello_world topic with the defined key. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the my_key {“msg": “Hello world!"} messages in your +topic appearing, the my_key part of the message is the key that we +defined in our producing function.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_23_Batch_Producing/index.html b/docs/next/guides/Guide_23_Batch_Producing/index.html new file mode 100644 index 0000000..8fc9add --- /dev/null +++ b/docs/next/guides/Guide_23_Batch_Producing/index.html @@ -0,0 +1,55 @@ + + + + + +Batch producing | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Batch producing

If you want to send your data in batches @produces decorator makes +that possible for you. By returning a list of messages you want to +send in a batch the producer will collect the messages and send them in +a batch to a Kafka broker.

This guide will demonstrate how to use this feature.

Return a batch from the producing function

To define a batch that you want to produce to Kafka topic, you need to +return the List of the messages that you want to be batched from your +producing function.


from typing import List

@app.produces()
async def to_hello_world(msgs: List[str]) -> List[HelloWorld]:
return [HelloWorld(msg=msg) for msg in msgs]

In the example, we want to return the HelloWorld message class batch +that is created from a list of msgs we passed into our producing +function.

Lets also prepare a backgound task that will send a batch of “hello +world" messages when the app starts.


@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

App example

We will modify the app example from @producer +basics guide to return the +HelloWorld batch. The final app will look like this (make sure you +replace the <url_of_your_kafka_bootstrap_server> and +<port_of_your_kafka_bootstrap_server> with the actual values):


import asyncio
from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

from typing import List

@app.produces()
async def to_hello_world(msgs: List[str]) -> List[HelloWorld]:
return [HelloWorld(msg=msg) for msg in msgs]

Run the app

Now we can run the app. Copy the code above in producer_example.py and +run it by running

fastkafka run --num-workers=1 --kafka-broker=demo_broker producer_with_key_example:app

After running the command, you should see this output in your terminal:

[46480]: [INFO] fastkafka._application.app: run_in_background() : Adding function 'prepare_and_send_hello_batch' as background task
[46480]: [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to '127.0.0.1:9092'
[46480]: [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': '127.0.0.1:9092'}'
[46480]: [INFO] fastkafka._application.app: _populate_bg_tasks() : Starting background task 'prepare_and_send_hello_batch'
Starting process cleanup, this may take a few seconds...
[INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 46480...
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Cancelling background task 'prepare_and_send_hello_batch'
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Waiting for background task 'prepare_and_send_hello_batch' to finish
[46480]: [INFO] fastkafka._application.app: _shutdown_bg_tasks() : Execution finished for background task 'prepare_and_send_hello_batch'
[INFO] fastkafka._server: terminate_asyncio_process(): Process 46480 terminated.

Check if the batch was sent to the Kafka topic with the defined key

Lets check the topic and see if there are “Hello world" messages in the +hello_world topic. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the batch of messages in your topic.

Batch key

To define a key for your batch like in Defining a partition +key guide you can wrap the +returning value in a +KafkaEvent +class. To learn more about defining a partition ke and +KafkaEvent +class, please, have a look at Defining a partition +key guide.

Let’s demonstrate that.

To define a key, we just need to modify our producing function, like +this:


from typing import List
from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:
return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")

Now our app looks like this:


import asyncio
from fastkafka import FastKafka
from pydantic import BaseModel, Field

class HelloWorld(BaseModel):
msg: str = Field(
...,
example="Hello",
description="Demo hello world message",
)

kafka_brokers = {
"demo_broker": {
"url": "<url_of_your_kafka_bootstrap_server>",
"description": "local demo kafka broker",
"port": "<port_of_your_kafka_bootstrap_server>",
}
}

app = FastKafka(kafka_brokers=kafka_brokers)

@app.run_in_background()
async def prepare_and_send_hello_batch():
msgs=[f"Hello world {i}" for i in range(10)]
await to_hello_world(msgs)

from typing import List
from fastkafka import KafkaEvent

@app.produces()
async def to_hello_world(msgs: List[str]) -> KafkaEvent[List[HelloWorld]]:
return KafkaEvent([HelloWorld(msg=msg) for msg in msgs], key=b"my_key")

Check if the batch was sent to the Kafka topic

Lets check the topic and see if there are “Hello world" messages in the +hello_world topic, containing a defined key. In your terminal run:

kafka-console-consumer.sh --topic=hello_world --property print.key=true --from-beginning --bootstrap-server=<address_of_your_kafka_bootstrap_server>

You should see the batch of messages with the defined key in your topic.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html b/docs/next/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html new file mode 100644 index 0000000..3317e1c --- /dev/null +++ b/docs/next/guides/Guide_24_Using_Multiple_Kafka_Clusters/index.html @@ -0,0 +1,155 @@ + + + + + +Using multiple Kafka clusters | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Using multiple Kafka clusters

Ready to take your FastKafka app to the next level? This guide shows you +how to connect to multiple Kafka clusters effortlessly. Consolidate +topics and produce messages across clusters like a pro. Unleash the full +potential of your Kafka-powered app with FastKafka. Let’s dive in and +elevate your application’s capabilities!

Test message

To showcase the functionalities of FastKafka and illustrate the concepts +discussed, we can use a simple test message called TestMsg. Here’s the +definition of the TestMsg class:

class TestMsg(BaseModel):
msg: str = Field(...)

Defining multiple broker configurations

When building a FastKafka application, you may need to consume messages +from multiple Kafka clusters, each with its own set of broker +configurations. FastKafka provides the flexibility to define different +broker clusters using the brokers argument in the consumes decorator. +Let’s explore an example code snippet

from pydantic import BaseModel, Field

from fastkafka import FastKafka


class TestMsg(BaseModel):
msg: str = Field(...)


kafka_brokers_1 = dict(
development=dict(url="dev.server_1", port=9092),
production=dict(url="prod.server_1", port=9092),
)
kafka_brokers_2 = dict(
development=dict(url="dev.server_2", port=9092),
production=dict(url="prod.server_1", port=9092),
)

app = FastKafka(kafka_brokers=kafka_brokers_1, bootstrap_servers_id="development")


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals_1(msg: TestMsg):
print(f"Received on s1: {msg=}")
await to_predictions_1(msg)


@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)
async def on_preprocessed_signals_2(msg: TestMsg):
print(f"Received on s2: {msg=}")
await to_predictions_2(msg)


@app.produces(topic="predictions")
async def to_predictions_1(msg: TestMsg) -> TestMsg:
return msg


@app.produces(topic="predictions", brokers=kafka_brokers_2)
async def to_predictions_2(msg: TestMsg) -> TestMsg:
return msg

In this example, the application has two consumes endpoints, both of +which will consume events from preprocessed_signals topic. +on_preprocessed_signals_1 will consume events from kafka_brokers_1 +configuration and on_preprocessed_signals_2 will consume events from +kafka_brokers_2 configuration. When producing, to_predictions_1 will +produce to predictions topic on kafka_brokers_1 cluster and +to_predictions_2 will produce to predictions topic on +kafka_brokers_2 cluster.

How it works

The kafka_brokers_1 configuration represents the primary cluster, +while kafka_brokers_2 serves as an alternative cluster specified in +the decorator.

Using the FastKafka class, the app object is initialized with the +primary broker configuration (kafka_brokers_1). By default, the +@app.consumes decorator without the brokers argument consumes messages +from the preprocessed_signals topic on kafka_brokers_1.

To consume messages from a different cluster, the @app.consumes +decorator includes the brokers argument. This allows explicit +specification of the broker cluster in the on_preprocessed_signals_2 +function, enabling consumption from the same topic but using the +kafka_brokers_2 configuration.

The brokers argument can also be used in the @app.produces decorator to +define multiple broker clusters for message production.

It’s important to ensure that all broker configurations have the same +required settings as the primary cluster to ensure consistent behavior.

Testing the application

To test our FastKafka ‘mirroring’ application, we can use our testing +framework. Lets take a look how it’s done:

from fastkafka.testing import Tester

async with Tester(app) as tester:
# Send TestMsg to topic/broker pair on_preprocessed_signals_1 is consuming from
await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal_s1"))
# Assert on_preprocessed_signals_1 consumed sent message
await app.awaited_mocks.on_preprocessed_signals_1.assert_called_with(
TestMsg(msg="signal_s1"), timeout=5
)
# Assert app has produced a prediction
await tester.mirrors[app.to_predictions_1].assert_called_with(
TestMsg(msg="signal_s1"), timeout=5
)

# Send TestMsg to topic/broker pair on_preprocessed_signals_2 is consuming from
await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal_s2"))
# Assert on_preprocessed_signals_2 consumed sent message
await app.awaited_mocks.on_preprocessed_signals_2.assert_called_with(
TestMsg(msg="signal_s2"), timeout=5
)
# Assert app has produced a prediction
await tester.mirrors[app.to_predictions_2].assert_called_with(
TestMsg(msg="signal_s2"), timeout=5
)
23-06-23 12:15:51.156 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-06-23 12:15:51.157 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-06-23 12:15:51.157 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'
23-06-23 12:15:51.158 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:15:51.158 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'
23-06-23 12:15:51.159 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:15:51.178 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_1:9092'}'
23-06-23 12:15:51.178 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:15:51.179 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'dev.server_2:9092'}'
23-06-23 12:15:51.180 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:15:51.180 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:15:51.180 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}
23-06-23 12:15:51.181 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:15:51.181 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:15:51.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:15:51.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-06-23 12:15:51.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:15:51.186 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:15:51.187 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}
23-06-23 12:15:51.187 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:15:51.188 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:15:51.188 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:15:51.189 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-06-23 12:15:51.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:15:51.189 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:15:51.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_1:9092'}
23-06-23 12:15:51.190 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:15:51.190 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:15:51.191 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:15:51.191 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-06-23 12:15:51.191 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:15:51.192 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:15:51.192 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'dev.server_2:9092'}
23-06-23 12:15:51.193 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:15:51.193 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:15:51.193 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:15:51.194 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-06-23 12:15:51.194 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
Received on s1: msg=TestMsg(msg='signal_s1')
Received on s2: msg=TestMsg(msg='signal_s2')
23-06-23 12:15:56.181 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:15:56.181 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:15:56.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:15:56.182 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:15:56.182 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:15:56.183 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:15:56.183 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:15:56.183 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:15:56.184 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:15:56.184 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:15:56.185 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:15:56.185 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:15:56.185 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:15:56.186 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:15:56.186 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:15:56.186 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:15:56.188 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

The usage of the tester.mirrors dictionary allows specifying the +desired topic/broker combination for sending the test messages, +especially when working with multiple Kafka clusters. This ensures that +the data is sent to the appropriate topic/broker based on the consuming +function, and consumed from appropriate topic/broker based on the +producing function.

Running the application

You can run your application using fastkafka run CLI command in the +same way that you would run a single cluster app.

To start your app, copy the code above in multi_cluster_example.py and +run it by running:

Now we can run the app. Copy the code above in multi_cluster_example.py, +adjust your server configurations, and run it by running

fastkafka run --num-workers=1 --kafka-broker=development multi_cluster_example:app

In your app logs, you should see your app starting up and your two +consumer functions connecting to different kafka clusters.

[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:24092'}
[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[182747]: 23-06-23 12:16:14.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': '127.0.0.1:24093'}
[182747]: 23-06-23 12:16:14.131 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[182747]: 23-06-23 12:16:14.131 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})
[182747]: 23-06-23 12:16:14.131 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}
[182747]: 23-06-23 12:16:14.131 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[182747]: 23-06-23 12:16:14.136 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[182747]: 23-06-23 12:16:14.136 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'preprocessed_signals'})
[182747]: 23-06-23 12:16:14.136 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'preprocessed_signals'}
[182747]: 23-06-23 12:16:14.136 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[182747]: 23-06-23 12:16:14.141 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}.
[182747]: 23-06-23 12:16:14.141 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'preprocessed_signals': 1}.
Starting process cleanup, this may take a few seconds...
23-06-23 12:16:18.294 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Terminating the process 182747...
[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[182747]: 23-06-23 12:16:19.380 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:19.471 [INFO] fastkafka._components._subprocess: terminate_asyncio_process(): Process 182747 terminated.

Application documentation

At the moment the documentation for multicluster app is not yet +implemented, but it is under development and you can expecti it soon!

Examples on how to use multiple broker configurations

Example #1

In this section, we’ll explore how you can effectively forward topics +between different Kafka clusters, enabling seamless data synchronization +for your applications.

Imagine having two Kafka clusters, namely kafka_brokers_1 and +kafka_brokers_2, each hosting its own set of topics and messages. Now, +if you want to forward a specific topic (in this case: +preprocessed_signals) from kafka_brokers_1 to kafka_brokers_2, +FastKafka provides an elegant solution.

Let’s examine the code snippet that configures our application for topic +forwarding:

from pydantic import BaseModel, Field

from fastkafka import FastKafka

class TestMsg(BaseModel):
msg: str = Field(...)

kafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))
kafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))

app = FastKafka(kafka_brokers=kafka_brokers_1)


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals_original(msg: TestMsg):
await to_preprocessed_signals_forward(msg)


@app.produces(topic="preprocessed_signals", brokers=kafka_brokers_2)
async def to_preprocessed_signals_forward(data: TestMsg) -> TestMsg:
return data

Here’s how it works: our FastKafka application is configured to consume +messages from kafka_brokers_1 and process them in the +on_preprocessed_signals_original function. We want to forward these +messages to kafka_brokers_2. To achieve this, we define the +to_preprocessed_signals_forward function as a producer, seamlessly +producing the processed messages to the preprocessed_signals topic +within the kafka_brokers_2 cluster.

Testing

To test our FastKafka forwarding application, we can use our testing +framework. Let’s take a look at the testing code snippet:

from fastkafka.testing import Tester

async with Tester(app) as tester:
await tester.mirrors[app.on_preprocessed_signals_original](TestMsg(msg="signal"))
await tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5)
23-06-23 12:16:31.689 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-06-23 12:16:31.690 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-06-23 12:16:31.691 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'
23-06-23 12:16:31.691 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:31.701 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-06-23 12:16:31.702 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:31.702 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:31.703 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-06-23 12:16:31.703 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:31.704 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:31.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:31.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-06-23 12:16:31.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:16:31.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:31.707 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}
23-06-23 12:16:31.707 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:31.708 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:31.708 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:31.709 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-06-23 12:16:31.709 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:16:35.703 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:35.703 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:35.704 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:35.704 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:35.705 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:35.705 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:35.706 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:35.707 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:35.707 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

With the help of the Tester object, we can simulate and verify the +behavior of our FastKafka application. Here’s how it works:

  1. We create an instance of the Tester by passing in our app +object, which represents our FastKafka application.

  2. Using the tester.mirrors dictionary, we can send a message to a +specific Kafka broker and topic combination. In this case, we use +tester.mirrors[app.on_preprocessed_signals_original] to send a +TestMsg message with the content “signal" to the appropriate Kafka +broker and topic.

  3. After sending the message, we can perform assertions on the mirrored +function using +tester.mirrors[app.to_preprocessed_signals_forward].assert_called(timeout=5). +This assertion ensures that the mirrored function has been called +within a specified timeout period (in this case, 5 seconds).

Example #2

In this section, we’ll explore how you can effortlessly consume data +from multiple sources, process it, and aggregate the results into a +single topic on a specific cluster.

Imagine you have two Kafka clusters: kafka_brokers_1 and +kafka_brokers_2, each hosting its own set of topics and messages. +Now, what if you want to consume data from both clusters, perform some +processing, and produce the results to a single topic on +kafka_brokers_1? FastKafka has got you covered!

Let’s take a look at the code snippet that configures our application +for aggregating multiple clusters:

from pydantic import BaseModel, Field

from fastkafka import FastKafka

class TestMsg(BaseModel):
msg: str = Field(...)

kafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))
kafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))

app = FastKafka(kafka_brokers=kafka_brokers_1)


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals_1(msg: TestMsg):
print(f"Default: {msg=}")
await to_predictions(msg)


@app.consumes(topic="preprocessed_signals", brokers=kafka_brokers_2)
async def on_preprocessed_signals_2(msg: TestMsg):
print(f"Specified: {msg=}")
await to_predictions(msg)


@app.produces(topic="predictions")
async def to_predictions(prediction: TestMsg) -> TestMsg:
print(f"Sending prediction: {prediction}")
return [prediction]

Here’s the idea: our FastKafka application is set to consume messages +from the topic “preprocessed_signals" on kafka_brokers_1 cluster, as +well as from the same topic on kafka_brokers_2 cluster. We have two +consuming functions, on_preprocessed_signals_1 and +on_preprocessed_signals_2, that handle the messages from their +respective clusters. These functions perform any required processing, in +this case, just calling the to_predictions function.

The exciting part is that the to_predictions function acts as a +producer, sending the processed results to the “predictions" topic on +kafka_brokers_1 cluster. By doing so, we effectively aggregate the +data from multiple sources into a single topic on a specific cluster.

This approach enables you to consume data from multiple Kafka clusters, +process it, and produce the aggregated results to a designated topic. +Whether you’re generating predictions, performing aggregations, or any +other form of data processing, FastKafka empowers you to harness the +full potential of multiple clusters.

Testing

Let’s take a look at the testing code snippet:

from fastkafka.testing import Tester

async with Tester(app) as tester:
await tester.mirrors[app.on_preprocessed_signals_1](TestMsg(msg="signal"))
await tester.mirrors[app.on_preprocessed_signals_2](TestMsg(msg="signal"))
await tester.on_predictions.assert_called(timeout=5)
23-06-23 12:16:41.222 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-06-23 12:16:41.223 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-06-23 12:16:41.224 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-06-23 12:16:41.224 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:41.239 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-06-23 12:16:41.239 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:41.240 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'
23-06-23 12:16:41.240 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:41.241 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:41.241 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-06-23 12:16:41.241 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:41.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:41.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:41.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-06-23 12:16:41.243 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:16:41.243 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:41.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}
23-06-23 12:16:41.245 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:41.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:41.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:41.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-06-23 12:16:41.247 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:16:41.247 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:41.248 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-06-23 12:16:41.248 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:41.249 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:41.249 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:41.249 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-06-23 12:16:41.249 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
Default: msg=TestMsg(msg='signal')
Sending prediction: msg='signal'
Specified: msg=TestMsg(msg='signal')
Sending prediction: msg='signal'
23-06-23 12:16:45.241 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:45.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:45.242 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:45.242 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:45.243 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:45.243 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:45.244 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:45.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:45.245 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:45.245 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:45.246 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:45.246 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:45.247 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Here’s how the code above works:

  1. Within an async with block, create an instance of the Tester by +passing in your app object, representing your FastKafka application.

  2. Using the tester.mirrors dictionary, you can send messages to +specific Kafka broker and topic combinations. In this case, we use +tester.mirrors[app.on_preprocessed_signals_1] and +tester.mirrors[app.on_preprocessed_signals_2] to send TestMsg +messages with the content “signal" to the corresponding Kafka broker +and topic combinations.

  3. After sending the messages, you can perform assertions on the +on_predictions function using +tester.on_predictions.assert_called(timeout=5). This assertion +ensures that the on_predictions function has been called within a +specified timeout period (in this case, 5 seconds).

Example #3

In some scenarios, you may need to produce messages to multiple Kafka +clusters simultaneously. FastKafka simplifies this process by allowing +you to configure your application to produce messages to multiple +clusters effortlessly. Let’s explore how you can achieve this:

Consider the following code snippet that demonstrates producing messages +to multiple clusters:

from pydantic import BaseModel, Field

from fastkafka import FastKafka

class TestMsg(BaseModel):
msg: str = Field(...)

kafka_brokers_1 = dict(localhost=dict(url="server_1", port=9092))
kafka_brokers_2 = dict(localhost=dict(url="server_2", port=9092))

app = FastKafka(kafka_brokers=kafka_brokers_1)


@app.consumes(topic="preprocessed_signals")
async def on_preprocessed_signals(msg: TestMsg):
print(f"{msg=}")
await to_predictions_1(TestMsg(msg="prediction"))
await to_predictions_2(TestMsg(msg="prediction"))


@app.produces(topic="predictions")
async def to_predictions_1(prediction: TestMsg) -> TestMsg:
print(f"Sending prediction to s1: {prediction}")
return [prediction]


@app.produces(topic="predictions", brokers=kafka_brokers_2)
async def to_predictions_2(prediction: TestMsg) -> TestMsg:
print(f"Sending prediction to s2: {prediction}")
return [prediction]

Here’s what you need to know about producing to multiple clusters:

  1. We define two Kafka broker configurations: kafka_brokers_1 and +kafka_brokers_2, representing different clusters with their +respective connection details.

  2. We create an instance of the FastKafka application, specifying +kafka_brokers_1 as the primary cluster for producing messages.

  3. The on_preprocessed_signals function serves as a consumer, +handling incoming messages from the “preprocessed_signals" topic. +Within this function, we invoke two producer functions: +to_predictions_1 and to_predictions_2.

  4. The to_predictions_1 function sends predictions to the +“predictions" topic on kafka_brokers_1 cluster.

  5. Additionally, the to_predictions_2 function sends the same +predictions to the “predictions" topic on kafka_brokers_2 cluster. +This allows for producing the same data to multiple clusters +simultaneously.

By utilizing this approach, you can seamlessly produce messages to +multiple Kafka clusters, enabling you to distribute data across +different environments or leverage the strengths of various clusters.

Feel free to customize the producer functions as per your requirements, +performing any necessary data transformations or enrichment before +sending the predictions.

With FastKafka, producing to multiple clusters becomes a breeze, +empowering you to harness the capabilities of multiple environments +effortlessly.

Testing

Let’s take a look at the testing code snippet:

from fastkafka.testing import Tester

async with Tester(app) as tester:
await tester.to_preprocessed_signals(TestMsg(msg="signal"))
await tester.mirrors[to_predictions_1].assert_called(timeout=5)
await tester.mirrors[to_predictions_2].assert_called(timeout=5)
23-06-23 12:16:49.903 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-06-23 12:16:49.904 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-06-23 12:16:49.904 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-06-23 12:16:49.905 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:49.905 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_2:9092'}'
23-06-23 12:16:49.906 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:49.921 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'server_1:9092'}'
23-06-23 12:16:49.921 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-06-23 12:16:49.921 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:49.922 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-06-23 12:16:49.922 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:49.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:49.923 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:49.924 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['preprocessed_signals']
23-06-23 12:16:49.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:16:49.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:49.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_1:9092'}
23-06-23 12:16:49.925 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:49.926 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:49.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:49.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-06-23 12:16:49.927 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-06-23 12:16:49.927 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-06-23 12:16:49.928 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'server_2:9092'}
23-06-23 12:16:49.928 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-06-23 12:16:49.928 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-06-23 12:16:49.929 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-06-23 12:16:49.929 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['predictions']
23-06-23 12:16:49.929 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
msg=TestMsg(msg='signal')
Sending prediction to s1: msg='prediction'
Sending prediction to s2: msg='prediction'
23-06-23 12:16:53.922 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:53.922 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:53.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:53.923 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:53.923 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:53.924 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:53.924 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:53.925 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-06-23 12:16:53.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-06-23 12:16:53.925 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-06-23 12:16:53.926 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Here’s how you can perform the necessary tests:

  1. Within an async with block, create an instance of the Tester by +passing in your app object, representing your FastKafka application.

  2. Using the tester.to_preprocessed_signals method, you can send a +TestMsg message with the content “signal".

  3. After sending the message, you can perform assertions on the +to_predictions_1 and to_predictions_2 functions using +tester.mirrors[to_predictions_1].assert_called(timeout=5) and +tester.mirrors[to_predictions_2].assert_called(timeout=5). These +assertions ensure that the respective producer functions have +produced data to their respective topic/broker combinations.

By employing this testing approach, you can verify that the producing +functions correctly send messages to their respective clusters. The +testing framework provided by FastKafka enables you to ensure the +accuracy and reliability of your application’s producing logic.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html b/docs/next/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html new file mode 100644 index 0000000..d9e4223 --- /dev/null +++ b/docs/next/guides/Guide_30_Using_docker_to_deploy_fastkafka/index.html @@ -0,0 +1,73 @@ + + + + + +Deploying FastKafka using Docker | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Deploying FastKafka using Docker

Building a Docker Image

To build a Docker image for a FastKafka project, we need the following +items:

  1. A library that is built using FastKafka.
  2. A file in which the requirements are specified. This could be a +requirements.txt file, a setup.py file, or even a wheel file.
  3. A Dockerfile to build an image that will include the two files +mentioned above.

Creating FastKafka Code

Let’s create a +FastKafka-based +application and write it to the application.py file based on the +tutorial.

# content of the "application.py" file

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()


from pydantic import BaseModel, NonNegativeFloat, Field

class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

Creating requirements.txt file

The above code only requires FastKafka. So, we will add only that to the +requirements.txt file, but you can add additional requirements to it +as well.

fastkafka>=0.3.0

Here we are using requirements.txt to store the project’s +dependencies. However, other methods like setup.py, pipenv, and +wheel files can also be used. setup.py is commonly used for +packaging and distributing Python modules, while pipenv is a tool used +for managing virtual environments and package dependencies. wheel +files are built distributions of Python packages that can be installed +with pip.

Creating Dockerfile

# (1)
FROM python:3.9-slim-bullseye
# (2)
WORKDIR /project
# (3)
COPY application.py requirements.txt /project/
# (4)
RUN pip install --no-cache-dir --upgrade -r /project/requirements.txt
# (5)
CMD ["fastkafka", "run", "--num-workers", "2", "--kafka-broker", "production", "application:kafka_app"]
  1. Start from the official Python base image.

  2. Set the current working directory to /project.

    This is where we’ll put the requirements.txt file and the +application.py file.

  3. Copy the application.py file and requirements.txt file inside +the /project directory.

  4. Install the package dependencies in the requirements file.

    The --no-cache-dir option tells pip to not save the downloaded +packages locally, as that is only if pip was going to be run again +to install the same packages, but that’s not the case when working +with containers.

    The --upgrade option tells pip to upgrade the packages if they +are already installed.

  5. Set the command to run the fastkafka run command.

    CMD takes a list of strings, each of these strings is what you +would type in the command line separated by spaces.

    This command will be run from the current working directory, the +same /project directory you set above with WORKDIR /project.

    We supply additional parameters --num-workers and --kafka-broker +for the run command. Finally, we specify the location of our +FastKafka application as a command argument.

    To learn more about fastkafka run command please check the CLI +docs.

Build the Docker Image

Now that all the files are in place, let’s build the container image.

  1. Go to the project directory (where your Dockerfile is, containing +your application.py file).

  2. Run the following command to build the image:

    docker build -t fastkafka_project_image .

    This command will create a docker image with the name +fastkafka_project_image and the latest tag.

That’s it! You have now built a docker image for your FastKafka project.

Start the Docker Container

Run a container based on the built image:

docker run -d --name fastkafka_project_container fastkafka_project_image

Additional Security

Trivy is an open-source tool that scans Docker images for +vulnerabilities. It can be integrated into your CI/CD pipeline to ensure +that your images are secure and free from known vulnerabilities. Here’s +how you can use trivy to scan your fastkafka_project_image:

  1. Install trivy on your local machine by following the instructions +provided in the official trivy +documentation.

  2. Run the following command to scan your fastkafka_project_image:

    trivy image fastkafka_project_image

    This command will scan your fastkafka_project_image for any +vulnerabilities and provide you with a report of its findings.

  3. Fix any vulnerabilities identified by trivy. You can do this by +updating the vulnerable package to a more secure version or by using +a different package altogether.

  4. Rebuild your fastkafka_project_image and repeat steps 2 and 3 +until trivy reports no vulnerabilities.

By using trivy to scan your Docker images, you can ensure that your +containers are secure and free from known vulnerabilities.

Example repo

A +FastKafka +based library which uses above mentioned Dockerfile to build a docker +image can be found +here

+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html b/docs/next/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html new file mode 100644 index 0000000..b13c689 --- /dev/null +++ b/docs/next/guides/Guide_31_Using_redpanda_to_test_fastkafka/index.html @@ -0,0 +1,143 @@ + + + + + +Using Redpanda to test FastKafka | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Using Redpanda to test FastKafka

What is FastKafka?

FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.

What is Redpanda?

Redpanda is a drop-in replacement for Kafka. Most of the Kafka tools +work out of the box with Redpanda.

From redpanda.com:

Redpanda is a Kafka®-compatible streaming data platform that is proven +to be 10x faster and 6x lower in total costs. It is also JVM-free, +ZooKeeper®-free, Jepsen-tested and source available.

Some of the advantages of Redpanda over Kafka are

  1. A single binary with built-in everything, no ZooKeeper® or JVM +needed.
  2. Costs upto 6X less than Kafka.
  3. Up to 10x lower average latencies and up to 6x faster Kafka +transactions without compromising correctness.

To learn more about Redpanda, please visit their +website or checkout this blog +post +comparing Redpanda and Kafka’s performance benchmarks.

Example repo

A sample FastKafka-based library that uses Redpanda for testing, based +on this guide, can be found +here.

The process

Here are the steps we’ll be walking through to build our example:

  1. Set up the prerequisites.
  2. Clone the example repo.
  3. Explain how to write an application using FastKafka.
  4. Explain how to write a test case to test FastKafka with Redpanda.
  5. Run the test case and produce/consume messages.

1. Prerequisites

Before starting, make sure you have the following prerequisites set up:

  1. Python 3.x: A Python 3.x installation is required to run +FastKafka. You can download the latest version of Python from the +official website. You’ll also +need to have pip installed and updated, which is Python’s package +installer.
  2. Docker Desktop: Docker is used to run Redpanda, which is +required for testing FastKafka. You can download and install Docker +Desktop from the official +website.
  3. Git: You’ll need to have Git installed to clone the example +repo. You can download Git from the official +website.

2. Cloning and setting up the example repo

To get started with the example code, clone the GitHub +repository by +running the following command in your terminal:

git clone https://github.com/airtai/sample_fastkafka_with_redpanda.git
cd sample_fastkafka_with_redpanda

This will create a new directory called sample_fastkafka_with_redpanda +and download all the necessary files.

Create a virtual environment

Before writing any code, let’s create a new virtual +environment +for our project.

A virtual environment is an isolated environment for a Python project, +which allows you to manage project-specific dependencies and avoid +conflicts between different projects.

To create a new virtual environment, run the following commands in your +terminal:

python3 -m venv venv

This will create a new directory called venv in your project +directory, which will contain the virtual environment.

To activate the virtual environment, run the following command:

source venv/bin/activate

This will change your shell’s prompt to indicate that you are now +working inside the virtual environment.

Finally, run the following command to upgrade pip, the Python package +installer:

pip install --upgrade pip

Install Python dependencies

Next, let’s install the required Python dependencies. In this guide, +we’ll be using +FastKafka +to write our application code and pytest and pytest-asyncio to test +it.

You can install the dependencies from the requirements.txt file +provided in the cloned repository by running:

pip install -r requirements.txt

This will install all the required packages and their dependencies.

3. Writing server code

The application.py file in the cloned repository demonstrates how to +use FastKafka to consume messages from a Kafka topic, make predictions +using a predictive model, and publish the predictions to another Kafka +topic. Here is an explanation of the code:

Preparing the demo model

First we will prepare our model using the Iris dataset so that we can +demonstrate the predictions using FastKafka. The following call +downloads the dataset and trains the model.

We will wrap the model creation into a lifespan of our app so that the +model is created just before the app is started.

from contextlib import asynccontextmanager

from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

from fastkafka import FastKafka

ml_models = {}


@asynccontextmanager
async def lifespan(app: FastKafka):
# Load the ML model
X, y = load_iris(return_X_y=True)
ml_models["iris_predictor"] = LogisticRegression(random_state=0, max_iter=500).fit(
X, y
)
yield
# Clean up the ML models and release the resources
ml_models.clear()

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines two message classes for use in a FastKafka +application:

  • The IrisInputData class is used to represent input data for a +predictive model. It has four fields of type +NonNegativeFloat, +which is a subclass of float that only allows non-negative floating +point values.

  • The IrisPrediction class is used to represent the output of the +predictive model. It has a single field species of type string +representing the predicted species.

These message classes will be used to parse and validate incoming data +in Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class IrisInputData(BaseModel):
sepal_length: NonNegativeFloat = Field(
..., example=0.5, description="Sepal length in cm"
)
sepal_width: NonNegativeFloat = Field(
..., example=0.5, description="Sepal width in cm"
)
petal_length: NonNegativeFloat = Field(
..., example=0.5, description="Petal length in cm"
)
petal_width: NonNegativeFloat = Field(
..., example=0.5, description="Petal width in cm"
)


class IrisPrediction(BaseModel):
species: str = Field(..., example="setosa", description="Predicted species")

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used both +to generate documentation and to later run the server against one of the +given kafka broker.

Next, an instance of the +FastKafka +class is initialized with the minimum required arguments:

  • kafka_brokers: a dictionary used for generating documentation
from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Iris predictions",
kafka_brokers=kafka_brokers,
lifespan=lifespan,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON encode messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the IrisInputData message class. Specifying the +type of the single argument is instructing the Pydantic to use +IrisInputData.parse_raw() on the consumed message before passing it +to the user defined function on_input_data.

  • The @produces decorator is applied to the to_predictions function, +which specifies that this function should produce a message to the +“predictions" Kafka topic whenever it is called. The to_predictions +function takes a single integer argument species_class representing +one of three possible strign values predicted by the mdoel. It creates +a new IrisPrediction message using this value and then returns it. +The framework will call the IrisPrediction.json().encode("utf-8") +function on the returned value and produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: IrisInputData):
species_class = ml_models["iris_predictor"].predict(
[[msg.sepal_length, msg.sepal_width, msg.petal_length, msg.petal_width]]
)[0]

await to_predictions(species_class)


@kafka_app.produces(topic="predictions")
async def to_predictions(species_class: int) -> IrisPrediction:
iris_species = ["setosa", "versicolor", "virginica"]

prediction = IrisPrediction(species=iris_species[species_class])
return prediction

4. Writing the test code

The service can be tested using the +Tester +instance which can be configured to start a Redpanda +broker for testing +purposes. The test.py file in the cloned repository contains the +following code for testing.

import pytest
from application import IrisInputData, IrisPrediction, kafka_app

from fastkafka.testing import Tester

msg = IrisInputData(
sepal_length=0.1,
sepal_width=0.2,
petal_length=0.3,
petal_width=0.4,
)


@pytest.mark.asyncio
async def test():
# Start Tester app and create local Redpanda broker for testing
async with Tester(kafka_app).using_local_redpanda(
tag="v23.1.2", listener_port=9092
) as tester:
# Send IrisInputData message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with IrisPrediction in predictions topic
await tester.awaited_mocks.on_predictions.assert_awaited_with(
IrisPrediction(species="setosa"), timeout=2
)

The +Tester +module utilizes uses +LocalRedpandaBroker +to start and stop a Redpanda broker for testing purposes using Docker

5. Running the tests

We can run the tests which is in test.py file by executing the +following command:

pytest test.py

This will start a Redpanda broker using Docker and executes tests. The +output of the command is:

(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$ pytest
============================== test session starts ===============================
platform linux -- Python 3.10.6, pytest-7.2.2, pluggy-1.0.0
rootdir: /home/kumaran/dev/sample_fastkafka_with_redpanda, configfile: pytest.ini, testpaths: test.py
plugins: asyncio-0.21.0, anyio-3.6.2
asyncio: mode=strict
collected 1 item

test.py . [100%]

=============================== 1 passed in 7.28s ================================
(venv) fastkafka@airt-ai:~/dev/sample_fastkafka_with_redpanda$

Running the tests with the Redpanda broker ensures that your code is +working correctly with a real Kafka-like message broker, making your +tests more reliable.

Recap

We have created an Iris classification model and encapulated it into our +FastKafka +application. The app will consume the IrisInputData from the +input_data topic and produce the predictions to predictions topic.

To test the app we have:

  1. Created the app

  2. Started our +Tester +class with Redpanda broker which mirrors the developed app topics +for testing purposes

  3. Sent IrisInputData message to input_data topic

  4. Asserted and checked that the developed iris classification service +has reacted to IrisInputData message

+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html b/docs/next/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html new file mode 100644 index 0000000..3005ca9 --- /dev/null +++ b/docs/next/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/index.html @@ -0,0 +1,78 @@ + + + + + +Using FastAPI to Run FastKafka Application | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Using FastAPI to Run FastKafka Application

When deploying a FastKafka application, the default approach is to +utilize the fastkafka run CLI +command. This command allows you to launch your FastKafka application as +a standalone service. However, if you already have a FastAPI application +in place and wish to run FastKafka application alongside it, you have an +alternative option.

FastKafka provides a method called +FastKafka.fastapi_lifespan +that leverages FastAPI’s +lifespan +feature. This method allows you to run your FastKafka application +together with your existing FastAPI app, seamlessly integrating their +functionalities. By using the +FastKafka.fastapi_lifespan +method, you can start the FastKafka application within the same process +as the FastAPI app.

The +FastKafka.fastapi_lifespan +method ensures that both FastAPI and FastKafka are initialized and start +working simultaneously. This approach enables the execution of +Kafka-related tasks, such as producing and consuming messages, while +also handling HTTP requests through FastAPI’s routes.

By combining FastAPI and FastKafka in this manner, you can build a +comprehensive application that harnesses the power of both frameworks. +Whether you require real-time messaging capabilities or traditional HTTP +endpoints, this approach allows you to leverage the strengths of FastAPI +and FastKafka within a single deployment setup.

Prerequisites

  1. A basic knowledge of +FastKafka +is needed to proceed with this guide. If you are not familiar with +FastKafka, +please go through the tutorial first.
  2. FastKafka +and FastAPI libraries needs to be installed.

This guide will provide a step-by-step explanation, taking you through +each stage individually, before combining all the components in the +final section for a comprehensive understanding of the process.

1. Basic FastKafka app

In this step, we will begin by creating a simple FastKafka application.

from pydantic import BaseModel, Field, NonNegativeFloat
from typing import *

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Greetings",
kafka_brokers=kafka_brokers,
)


class TestMsg(BaseModel):
msg: str = Field(...)


@kafka_app.consumes()
async def on_names(msg: TestMsg):
await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))


@kafka_app.produces()
async def to_greetings(greeting: TestMsg) -> TestMsg:
return greeting

In the above example, we consume messages from a topic called names, +we prepend “Hello" to the message, and send it back to another topic +called greetings.

We now have a simple +FastKafka +app to produce and consume from two topics.

2. Using fastapi_lifespan method

In this step of the guide, we will explore the integration of a +FastKafka application with a FastAPI application using the +FastKafka.fastapi_lifespan +method. The +FastKafka.fastapi_lifespan +method is a feature provided by FastKafka, which allows you to +seamlessly integrate a FastKafka application with a FastAPI application +by leveraging FastAPI’s lifespan feature.

from fastapi import FastAPI

fastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan(kafka_broker_name="localhost"))


@fastapi_app.get("/hello")
async def hello():
return {"msg": "hello there"}

In the above example, a new instance of the FastAPI app is created, +and when the app is started using uvicorn, it also runs the +FastKafka +application concurrently.

Putting it all together

Let’s put the above code together and write it in a file called +fast_apps.py.

# content of the "fast_apps.py" file

from pydantic import BaseModel, Field, NonNegativeFloat
from typing import *

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Greetings",
kafka_brokers=kafka_brokers,
)


class TestMsg(BaseModel):
msg: str = Field(...)


@kafka_app.consumes()
async def on_names(msg: TestMsg):
await to_greetings(TestMsg(msg=f"Hello {msg.msg}"))


@kafka_app.produces()
async def to_greetings(greeting: TestMsg) -> TestMsg:
return greeting


from fastapi import FastAPI

fastapi_app = FastAPI(lifespan=kafka_app.fastapi_lifespan("localhost"))

@fastapi_app.get("/hello")
async def hello():
return {"msg": "hello there"}

Finally, you can run the FastAPI application using a web server of your +choice, such as Uvicorn or Hypercorn by running the below command:

uvicorn fast_apps:fastapi_app --host=0.0.0.0 --port=8080
+ + + + \ No newline at end of file diff --git a/docs/next/guides/Guide_33_Using_Tester_class_to_test_fastkafka/index.html b/docs/next/guides/Guide_33_Using_Tester_class_to_test_fastkafka/index.html new file mode 100644 index 0000000..d0dee7b --- /dev/null +++ b/docs/next/guides/Guide_33_Using_Tester_class_to_test_fastkafka/index.html @@ -0,0 +1,72 @@ + + + + + +Using Tester to test FastKafka | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

Using Tester to test FastKafka

In order to speed up development and make testing easier, we have +implemented the Tester class.

The Tester instance starts +in-memory implementation of Kafka broker i.e. there is no need for +starting localhost Kafka service for testing FastKafka apps. The +Tester will redirect consumes +and produces decorated functions to the in-memory Kafka broker so that +you can quickly test FasKafka apps without the need of a running Kafka +broker and all its dependencies. Also, for each FastKafka consumes and +produces function, Tester will +create it’s mirrored fuction i.e. if the consumes function is +implemented, the Tester will +create the produces function (and the other way - if the produces +function is implemented, Tester +will create consumes function).

Basic example

To showcase the functionalities of FastKafka and illustrate the concepts +discussed, we can use a simple test message called TestMsg. Here’s the +definition of the TestMsg class:

class TestMsg(BaseModel):
msg: str = Field(...)


test_msg = TestMsg(msg="signal")

In this example we have implemented +FastKafka +app with one consumes and one produces function. on_input function +consumes messages from the input topic and to_output function +produces messages to the output topic.

Note: it is necessary to define parameter and return types in the +produces and consumes functions

from pydantic import BaseModel, Field

app = FastKafka()


@app.consumes()
async def on_input(msg: TestMsg):
await to_output(TestMsg(msg=f"Hello {msg.msg}"))


@app.produces()
async def to_output(msg: TestMsg) -> TestMsg:
return msg

Testing the application

In this example app has imlemented on_input and to_output +functions. We can now use Tester +to create their mirrored functions: to_input and on_output.

Testing process for this example could look like this:

  1. tester produces the message to the input topic

  2. Assert that the app consumed the message by calling on_input +with the accurate argument

  3. Within on_input function, to_output function is called - and +message is produced to the output topic

  4. Assert that the tester consumed the message by calling on_output +with the accurate argument

async with Tester(app).using_inmemory_broker() as tester:
input_msg = TestMsg(msg="Mickey")

# tester produces message to the input topic
await tester.to_input(input_msg)
# previous line is equal to
# await tester.mirrors[app.on_input](input_msg)

# assert that app consumed from the input topic and it was called with the accurate argument
await app.awaited_mocks.on_input.assert_called_with(
TestMsg(msg="Mickey"), timeout=5
)
# assert that tester consumed from the output topic and it was called with the accurate argument
await tester.on_output.assert_called_with(TestMsg(msg="Hello Mickey"), timeout=5)
print("ok")
23-07-31 10:38:30.810 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-07-31 10:38:30.811 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-07-31 10:38:30.812 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
23-07-31 10:38:30.812 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-07-31 10:38:30.826 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
23-07-31 10:38:30.827 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-07-31 10:38:30.827 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-07-31 10:38:30.828 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}
23-07-31 10:38:30.828 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-07-31 10:38:30.829 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-07-31 10:38:30.829 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-07-31 10:38:30.830 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input']
23-07-31 10:38:30.830 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-07-31 10:38:30.835 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-07-31 10:38:30.835 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}
23-07-31 10:38:30.836 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-07-31 10:38:30.836 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-07-31 10:38:30.836 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-07-31 10:38:30.837 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output']
23-07-31 10:38:30.837 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-07-31 10:38:34.828 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-07-31 10:38:34.828 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-07-31 10:38:34.829 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-07-31 10:38:34.829 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-07-31 10:38:34.830 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-07-31 10:38:34.831 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-07-31 10:38:34.831 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-07-31 10:38:34.832 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-07-31 10:38:34.832 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping
ok

Final script

import asyncio
from fastkafka._application.app import FastKafka
from fastkafka._application.tester import Tester
from pydantic import BaseModel, Field


class TestMsg(BaseModel):
msg: str = Field(...)


app = FastKafka()


@app.consumes()
async def on_input(msg: TestMsg):
await to_output(TestMsg(msg=f"Hello {msg.msg}"))


@app.produces()
async def to_output(msg: TestMsg) -> TestMsg:
return msg


async def async_tests():
async with Tester(app).using_inmemory_broker() as tester:
input_msg = TestMsg(msg="Mickey")

# tester produces message to the input topic
await tester.to_input(input_msg)

# assert that app consumed from the input topic and it was called with the accurate argument
await app.awaited_mocks.on_input.assert_called_with(
TestMsg(msg="Mickey"), timeout=5
)
# assert that tester consumed from the output topic and it was called with the accurate argument
await tester.awaited_mocks.on_output.assert_called_with(
TestMsg(msg="Hello Mickey"), timeout=5
)
print("ok")


if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(async_tests())
23-07-31 10:38:34.855 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-07-31 10:38:34.856 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-07-31 10:38:34.856 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
23-07-31 10:38:34.857 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-07-31 10:38:34.871 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
23-07-31 10:38:34.872 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-07-31 10:38:34.872 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-07-31 10:38:34.873 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}
23-07-31 10:38:34.874 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-07-31 10:38:34.875 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-07-31 10:38:34.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-07-31 10:38:34.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input']
23-07-31 10:38:34.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-07-31 10:38:34.878 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-07-31 10:38:34.879 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': 'localhost:9092'}
23-07-31 10:38:34.879 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-07-31 10:38:34.879 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-07-31 10:38:34.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-07-31 10:38:34.880 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output']
23-07-31 10:38:34.881 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-07-31 10:38:38.873 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-07-31 10:38:38.873 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-07-31 10:38:38.874 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-07-31 10:38:38.874 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-07-31 10:38:38.875 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-07-31 10:38:38.876 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-07-31 10:38:38.877 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-07-31 10:38:38.877 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-07-31 10:38:38.878 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping
ok

Using external brokers

If you have already running brokers e.g. kafka_brokers, you can use +Tester method +using_external_broker to set brokers which will be used in tests.

The same example as previous but with external kafka_brokers:

# content of the "application_test.py" file

import asyncio
from fastkafka._application.app import FastKafka
from fastkafka._application.tester import Tester
from pydantic import BaseModel, Field


class TestMsg(BaseModel):
msg: str = Field(...)


kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

app = FastKafka(
title="Demo Kafka app",
kafka_brokers=kafka_brokers,
)

@app.consumes()
async def on_input(msg: TestMsg):
await to_output(TestMsg(msg=f"Hello {msg.msg}"))


@app.produces()
async def to_output(msg: TestMsg) -> TestMsg:
return msg


async def async_tests():
async with Tester(app).using_external_broker(bootstrap_servers_id="production") as tester:
input_msg = TestMsg(msg="Mickey")

# tester produces message to the input topic
await tester.to_input(input_msg)

# assert that app consumed from the input topic and it was called with the accurate argument
await app.awaited_mocks.on_input.assert_called_with(
TestMsg(msg="Mickey"), timeout=5
)
# assert that tester consumed from the output topic and it was called with the accurate argument
await tester.awaited_mocks.on_output.assert_called_with(
TestMsg(msg="Hello Mickey"), timeout=5
)
print("ok")


if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(async_tests())

Example: New Employee app

In this example, our app has one consumes and two produces functions.

Every time a company hires an Employee, some employee data is sent to +the new_employee topic.

That’s when our application comes into play! The app consumes this data +by calling on_new_employee. Within this function, to_employee_email +and to_welcome_message functions are called - and messages are +produced to the employee_email and welcome_message topic.

class Employee(BaseModel):
name: str
surname: str
email: Optional[str] = None


class EmaiMessage(BaseModel):
sender: str = "info@gmail.com"
receiver: str
subject: str
message: str

kafka_brokers = dict(localhost=[dict(url="server_1", port=9092)], production=[dict(url="production_server_1", port=9092)])
app = FastKafka(kafka_brokers=kafka_brokers)


@app.consumes()
async def on_new_employee(msg: Employee):
employee = await to_employee_email(msg)
await to_welcome_message(employee)


@app.produces()
async def to_employee_email(employee: Employee) -> Employee:
# generate new email
employee.email = employee.name + "." + employee.surname + "@gmail.com"
return employee


@app.produces()
async def to_welcome_message(employee: Employee) -> EmaiMessage:
message = f"Dear {employee.name},\nWelcome to the company"
return EmaiMessage(receiver=employee.email, subject="Welcome", message=message)

Testing the application

In this example app has imlemented on_new_employee, +to_employee_email and to_welcome_message functions. We can now use +Tester to create their mirrored +functions: to_new_employee, on_employee_email and +on_welcome_message.

Testing process:

  1. tester produces message to the new_employee topic

  2. Assert that the app consumed the message from the new_employee +topic with the accurate argument

  3. Within on_new_employee function, to_employee_email and +to_welcome_message functions are called - and messages are +produced to the employee_email and welcome_message topic

  4. Assert that the tester consumed the message by calling +on_employee_email

  5. Assert that the tester consumed the message by calling +on_welcome_message

assert app._kafka_config["bootstrap_servers_id"] == "localhost"

async with Tester(app).using_inmemory_broker(bootstrap_servers_id="production") as tester:
assert app._kafka_config["bootstrap_servers_id"] == "production"
assert tester._kafka_config["bootstrap_servers_id"] == "production"

# produce the message to new_employee topic
await tester.to_new_employee(Employee(name="Mickey", surname="Mouse"))
# previous line is equal to:
# await tester.mirrors[app.on_new_employee](Employee(name="Mickey", surname="Mouse"))

# Assert app consumed the message
await app.awaited_mocks.on_new_employee.assert_called_with(
Employee(name="Mickey", surname="Mouse"), timeout=5
)

# If the the previous assert is true (on_new_employee was called),
# to_employee_email and to_welcome_message were called inside on_new_employee function

# Now we can check if this two messages were consumed
await tester.awaited_mocks.on_employee_email.assert_called(timeout=5)
await tester.awaited_mocks.on_welcome_message.assert_called(timeout=5)

assert app._kafka_config["bootstrap_servers_id"] == "localhost"

print("ok")
23-07-31 10:38:40.069 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-07-31 10:38:40.070 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-07-31 10:38:40.070 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'
23-07-31 10:38:40.071 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-07-31 10:38:40.071 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'
23-07-31 10:38:40.072 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-07-31 10:38:40.091 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'
23-07-31 10:38:40.091 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-07-31 10:38:40.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-07-31 10:38:40.092 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}
23-07-31 10:38:40.093 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-07-31 10:38:40.093 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-07-31 10:38:40.094 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-07-31 10:38:40.094 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['new_employee']
23-07-31 10:38:40.095 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-07-31 10:38:40.096 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-07-31 10:38:40.097 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}
23-07-31 10:38:40.098 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-07-31 10:38:40.099 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-07-31 10:38:40.099 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-07-31 10:38:40.100 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['employee_email']
23-07-31 10:38:40.100 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-07-31 10:38:40.101 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-07-31 10:38:40.101 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}
23-07-31 10:38:40.102 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-07-31 10:38:40.103 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-07-31 10:38:40.103 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-07-31 10:38:40.103 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['welcome_message']
23-07-31 10:38:40.104 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-07-31 10:38:44.092 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-07-31 10:38:44.093 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-07-31 10:38:44.093 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-07-31 10:38:44.094 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-07-31 10:38:44.094 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-07-31 10:38:44.095 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-07-31 10:38:44.095 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-07-31 10:38:44.096 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-07-31 10:38:44.096 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-07-31 10:38:44.096 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-07-31 10:38:44.097 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-07-31 10:38:44.097 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-07-31 10:38:44.097 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping
ok

Final script

import asyncio
from fastkafka._application.app import FastKafka
from fastkafka._application.tester import Tester
from pydantic import BaseModel, Field
from typing import Optional


class Employee(BaseModel):
name: str
surname: str
email: Optional[str] = None


class EmaiMessage(BaseModel):
sender: str = "info@gmail.com"
receiver: str
subject: str
message: str


kafka_brokers = dict(localhost=[dict(url="server_1", port=9092)], production=[dict(url="production_server_1", port=9092)])
app = FastKafka(kafka_brokers=kafka_brokers)


@app.consumes()
async def on_new_employee(msg: Employee):
employee = await to_employee_email(msg)
await to_welcome_message(employee)


@app.produces()
async def to_employee_email(employee: Employee) -> Employee:
# generate new email
employee.email = employee.name + "." + employee.surname + "@gmail.com"
return employee


@app.produces()
async def to_welcome_message(employee: Employee) -> EmaiMessage:
message = f"Dear {employee.name},\nWelcome to the company"
return EmaiMessage(receiver=employee.email, subject="Welcome", message=message)


async def async_tests():
assert app._kafka_config["bootstrap_servers_id"] == "localhost"

async with Tester(app).using_inmemory_broker(bootstrap_servers_id="production") as tester:
assert app._kafka_config["bootstrap_servers_id"] == "production"
assert tester._kafka_config["bootstrap_servers_id"] == "production"

# produce the message to new_employee topic
await tester.to_new_employee(Employee(name="Mickey", surname="Mouse"))
# previous line is equal to:
# await tester.mirrors[app.on_new_employee](Employee(name="Mickey", surname="Mouse"))

# Assert app consumed the message
await app.awaited_mocks.on_new_employee.assert_called_with(
Employee(name="Mickey", surname="Mouse"), timeout=5
)

# If the the previous assert is true (on_new_employee was called),
# to_employee_email and to_welcome_message were called inside on_new_employee function

# Now we can check if this two messages were consumed
await tester.awaited_mocks.on_employee_email.assert_called(timeout=5)
await tester.awaited_mocks.on_welcome_message.assert_called(timeout=5)

assert app._kafka_config["bootstrap_servers_id"] == "localhost"
print("ok")


if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(async_tests())
23-07-31 10:38:47.045 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
23-07-31 10:38:47.046 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
23-07-31 10:38:47.046 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'
23-07-31 10:38:47.047 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-07-31 10:38:47.048 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'
23-07-31 10:38:47.048 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-07-31 10:38:47.067 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': ['production_server_1:9092']}'
23-07-31 10:38:47.067 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
23-07-31 10:38:47.068 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-07-31 10:38:47.070 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}
23-07-31 10:38:47.070 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-07-31 10:38:47.071 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-07-31 10:38:47.071 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-07-31 10:38:47.072 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['new_employee']
23-07-31 10:38:47.072 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-07-31 10:38:47.072 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-07-31 10:38:47.073 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}
23-07-31 10:38:47.074 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-07-31 10:38:47.074 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-07-31 10:38:47.074 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-07-31 10:38:47.075 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['employee_email']
23-07-31 10:38:47.075 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-07-31 10:38:47.076 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
23-07-31 10:38:47.076 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'auto_offset_reset': 'earliest', 'max_poll_records': 100, 'bootstrap_servers': ['production_server_1:9092']}
23-07-31 10:38:47.076 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
23-07-31 10:38:47.077 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
23-07-31 10:38:47.077 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
23-07-31 10:38:47.078 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['welcome_message']
23-07-31 10:38:47.078 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
23-07-31 10:38:51.068 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-07-31 10:38:51.069 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-07-31 10:38:51.069 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-07-31 10:38:51.070 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-07-31 10:38:51.070 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-07-31 10:38:51.071 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-07-31 10:38:51.071 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-07-31 10:38:51.072 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
23-07-31 10:38:51.072 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
23-07-31 10:38:51.073 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-07-31 10:38:51.073 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-07-31 10:38:51.074 [INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
23-07-31 10:38:51.074 [INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping
ok
+ + + + \ No newline at end of file diff --git a/docs/next/index.html b/docs/next/index.html new file mode 100644 index 0000000..7e239ea --- /dev/null +++ b/docs/next/index.html @@ -0,0 +1,121 @@ + + + + + +FastKafka | FastKafka + + + + + + + + + + +
+
Version: dev 🚧

FastKafka

Effortless Kafka integration for your web services

PyPI PyPI -
+Downloads PyPI - Python
+Version

GitHub Workflow
+Status +CodeQL +Dependency
+Review

GitHub


FastKafka is a powerful and easy-to-use +Python library for building asynchronous services that interact with +Kafka topics. Built on top of Pydantic, +AIOKafka and +AsyncAPI, FastKafka simplifies the process +of writing producers and consumers for Kafka topics, handling all the +parsing, networking, task scheduling and data generation automatically. +With FastKafka, you can quickly prototype and develop high-performance +Kafka-based services with minimal code, making it an ideal choice for +developers looking to streamline their workflow and accelerate their +projects.


⭐⭐⭐ Stay in touch ⭐⭐⭐

Please show your support and stay in touch by:

Your support helps us to stay in touch with you and encourages us to +continue developing and improving the library. Thank you for your +support!


🐝🐝🐝 We were busy lately 🐝🐝🐝

Activity

Install

FastKafka works on Windows, macOS, Linux, and most Unix-style operating +systems. You can install base version of FastKafka with pip as usual:

pip install fastkafka

To install FastKafka with testing features please use:

pip install fastkafka[test]

To install FastKafka with asyncapi docs please use:

pip install fastkafka[docs]

To install FastKafka with all the features please use:

pip install fastkafka[test,docs]

Tutorial

You can start an interactive tutorial in Google Colab by clicking the +button below:

Open in Colab

Writing server code

To demonstrate FastKafka simplicity of using @produces and @consumes +decorators, we will focus on a simple app.

The app will consume JSON messages containing positive floats from one +topic, log them, and then produce incremented values to another topic.

Messages

FastKafka uses Pydantic to parse input +JSON-encoded data into Python objects, making it easy to work with +structured data in your Kafka-based applications. Pydantic’s +BaseModel class allows you +to define messages using a declarative syntax, making it easy to specify +the fields and types of your messages.

This example defines one Data mesage class. This Class will model the +consumed and produced data in our app demo, it contains one +NonNegativeFloat field data that will be logged and “processed" +before being produced to another topic.

These message class will be used to parse and validate incoming data in +Kafka consumers and producers.

from pydantic import BaseModel, Field, NonNegativeFloat


class Data(BaseModel):
data: NonNegativeFloat = Field(
..., example=0.5, description="Float data example"
)

Application

This example shows how to initialize a FastKafka application.

It starts by defining a dictionary called kafka_brokers, which +contains two entries: "localhost" and "production", specifying local +development and production Kafka brokers. Each entry specifies the URL, +port, and other details of a Kafka broker. This dictionary is used for +both generating the documentation and later to run the actual server +against one of the given kafka broker.

Next, an object of the +FastKafka +class is initialized with the minimum set of arguments:

  • kafka_brokers: a dictionary used for generation of documentation

We will also import and create a logger so that we can log the incoming +data in our consuming function.

from logging import getLogger
from fastkafka import FastKafka

logger = getLogger("Demo Kafka app")

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Demo Kafka app",
kafka_brokers=kafka_brokers,
)

Function decorators

FastKafka provides convenient function decorators @kafka_app.consumes +and @kafka_app.produces to allow you to delegate the actual process of

  • consuming and producing data to Kafka, and

  • decoding and encoding JSON messages

from user defined functions to the framework. The FastKafka framework +delegates these jobs to AIOKafka and Pydantic libraries.

These decorators make it easy to specify the processing logic for your +Kafka consumers and producers, allowing you to focus on the core +business logic of your application without worrying about the underlying +Kafka integration.

This following example shows how to use the @kafka_app.consumes and +@kafka_app.produces decorators in a FastKafka application:

  • The @kafka_app.consumes decorator is applied to the on_input_data +function, which specifies that this function should be called whenever +a message is received on the “input_data" Kafka topic. The +on_input_data function takes a single argument which is expected to +be an instance of the Data message class. Specifying the type of the +single argument is instructing the Pydantic to use Data.parse_raw() +on the consumed message before passing it to the user defined function +on_input_data.

  • The @produces decorator is applied to the to_output_data function, +which specifies that this function should produce a message to the +“output_data" Kafka topic whenever it is called. The to_output_data +function takes a single float argument data. It it increments the +data returns it wrapped in a Data object. The framework will call +the Data.json().encode("utf-8") function on the returned value and +produce it to the specified topic.

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: Data):
logger.info(f"Got data: {msg.data}")
await to_output_data(msg.data)


@kafka_app.produces(topic="output_data")
async def to_output_data(data: float) -> Data:
processed_data = Data(data=data+1.0)
return processed_data

Testing the service

The service can be tested using the +Tester +instances which internally starts InMemory implementation of Kafka +broker.

The Tester will redirect your consumes and produces decorated functions +to the InMemory Kafka broker so that you can quickly test your app +without the need for a running Kafka broker and all its dependencies.

from fastkafka.testing import Tester

msg = Data(
data=0.1,
)

# Start Tester app and create InMemory Kafka broker for testing
async with Tester(kafka_app) as tester:
# Send Data message to input_data topic
await tester.to_input_data(msg)

# Assert that the kafka_app responded with incremented data in output_data topic
await tester.awaited_mocks.on_output_data.assert_awaited_with(
Data(data=1.1), timeout=2
)
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._start() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._patch_consumers_and_producers(): Patching consumers and producers!
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker starting
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['input_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'earliest', 'max_poll_records': 100}
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched start() called()
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched subscribe() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer.subscribe(), subscribing to: ['output_data']
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[INFO] Demo Kafka app: Got data: 0.1
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaConsumer patched stop() called
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
[INFO] fastkafka._testing.in_memory_broker: AIOKafkaProducer patched stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker._stop() called
[INFO] fastkafka._testing.in_memory_broker: InMemoryBroker stopping

Recap

We have created a simple FastKafka application. The app will consume the +Data from the input_data topic, log it and produce the incremented +data to output_data topic.

To test the app we have:

  1. Created the app

  2. Started our Tester class which mirrors the developed app topics for +testing purposes

  3. Sent Data message to input_data topic

  4. Asserted and checked that the developed service has reacted to Data +message

Running the service

The service can be started using builtin faskafka run CLI command. +Before we can do that, we will concatenate the code snippets from above +and save them in a file "application.py"

# content of the "application.py" file

from pydantic import BaseModel, Field, NonNegativeFloat

from fastkafka import FastKafka
from fastkafka._components.logger import get_logger

logger = get_logger(__name__)

class Data(BaseModel):
data: NonNegativeFloat = Field(
..., example=0.5, description="Float data example"
)

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Demo Kafka app",
kafka_brokers=kafka_brokers,
)

@kafka_app.consumes(topic="input_data", auto_offset_reset="latest")
async def on_input_data(msg: Data):
logger.info(f"Got data: {msg.data}")
await to_output_data(msg.data)


@kafka_app.produces(topic="output_data")
async def to_output_data(data: float) -> Data:
processed_data = Data(data=data+1.0)
return processed_data

To run the service, use the FastKafka CLI command and pass the module +(in this case, the file where the app implementation is located) and the +app simbol to the command.

fastkafka run --num-workers=1 --kafka-broker localhost application:kafka_app

After running the command, you should see the following output in your +command line:

[1504]: 23-05-31 11:36:45.874 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1504]: 23-05-31 11:36:45.875 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1504]: 23-05-31 11:36:45.937 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1504]: 23-05-31 11:36:45.956 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1504]: 23-05-31 11:36:45.956 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1506]: 23-05-31 11:36:45.993 [INFO] fastkafka._application.app: set_kafka_broker() : Setting bootstrap_servers value to 'localhost:9092'
[1506]: 23-05-31 11:36:45.994 [INFO] fastkafka._application.app: _create_producer() : created producer using the config: '{'bootstrap_servers': 'localhost:9092'}'
[1506]: 23-05-31 11:36:46.014 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() starting...
[1506]: 23-05-31 11:36:46.015 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer created using the following parameters: {'bootstrap_servers': 'localhost:9092', 'auto_offset_reset': 'latest', 'max_poll_records': 100}
[1506]: 23-05-31 11:36:46.040 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer started.
[1506]: 23-05-31 11:36:46.042 [INFO] aiokafka.consumer.subscription_state: Updating subscribed topics to: frozenset({'input_data'})
[1506]: 23-05-31 11:36:46.043 [INFO] aiokafka.consumer.consumer: Subscribed to topic(s): {'input_data'}
[1506]: 23-05-31 11:36:46.043 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer subscribed.
[1506]: 23-05-31 11:36:46.068 [ERROR] aiokafka.cluster: Topic input_data not found in cluster metadata
[1506]: 23-05-31 11:36:46.070 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1504]: 23-05-31 11:36:46.131 [WARNING] aiokafka.cluster: Topic input_data is not available during auto-create initialization
[1504]: 23-05-31 11:36:46.132 [INFO] aiokafka.consumer.group_coordinator: Metadata for topic has changed from {} to {'input_data': 0}.
[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)
[1506]: 23-05-31 11:37:00.237 [ERROR] aiokafka: Unable to update metadata from [0]
[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable connect to node with id 0: [Errno 111] Connect call failed ('172.28.0.12', 9092)
[1504]: 23-05-31 11:37:00.238 [ERROR] aiokafka: Unable to update metadata from [0]
[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1506]: 23-05-31 11:37:00.294 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
Starting process cleanup, this may take a few seconds...
23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1504...
23-05-31 11:37:00.345 [INFO] fastkafka._server: terminate_asyncio_process(): Terminating the process 1506...
[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop(): Consumer stopped.
[1504]: 23-05-31 11:37:00.347 [INFO] fastkafka._components.aiokafka_consumer_loop: aiokafka_consumer_loop() finished.
23-05-31 11:37:00.607 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1506 was already terminated.
23-05-31 11:37:00.822 [INFO] fastkafka._server: terminate_asyncio_process(): Process 1504 was already terminated.

Documentation

The kafka app comes with builtin documentation generation using +AsyncApi HTML generator.

AsyncApi requires Node.js to be installed and we provide the following +convenience command line for it:

fastkafka docs install_deps
23-05-31 11:38:24.128 [INFO] fastkafka._components.docs_dependencies: AsyncAPI generator installed

To generate the documentation programatically you just need to call the +following command:

fastkafka docs generate application:kafka_app
23-05-31 11:38:25.113 [INFO] fastkafka._components.asyncapi: Old async specifications at '/content/asyncapi/spec/asyncapi.yml' does not exist.
23-05-31 11:38:25.118 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'
23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
23-05-31 11:38:43.455 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /content/asyncapi/docs.

This will generate the asyncapi folder in relative path where all your +documentation will be saved. You can check out the content of it with:

ls -l asyncapi
total 8
drwxr-xr-x 4 root root 4096 May 31 11:38 docs
drwxr-xr-x 2 root root 4096 May 31 11:38 spec

In docs folder you will find the servable static html file of your +documentation. This can also be served using our fastkafka docs serve +CLI command (more on that in our guides).

In spec folder you will find a asyncapi.yml file containing the async +API specification of your application.

We can locally preview the generated documentation by running the +following command:

fastkafka docs serve application:kafka_app
23-05-31 11:38:45.250 [INFO] fastkafka._components.asyncapi: New async specifications generated at: '/content/asyncapi/spec/asyncapi.yml'
23-05-31 11:39:04.410 [INFO] fastkafka._components.asyncapi: Async docs generated at 'asyncapi/docs'
23-05-31 11:39:04.411 [INFO] fastkafka._components.asyncapi: Output of '$ npx -y -p @asyncapi/generator ag asyncapi/spec/asyncapi.yml @asyncapi/html-template -o asyncapi/docs --force-write'

Done! ✨
Check out your shiny new generated files at /content/asyncapi/docs.


Serving documentation on http://127.0.0.1:8000
127.0.0.1 - - [31/May/2023 11:39:14] "GET / HTTP/1.1" 200 -
127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/global.min.css HTTP/1.1" 200 -
127.0.0.1 - - [31/May/2023 11:39:14] "GET /js/asyncapi-ui.min.js HTTP/1.1" 200 -
127.0.0.1 - - [31/May/2023 11:39:14] "GET /css/asyncapi.min.css HTTP/1.1" 200 -
Interupting serving of documentation and cleaning up...

From the parameters passed to the application constructor, we get the +documentation bellow:

from fastkafka import FastKafka

kafka_brokers = {
"localhost": {
"url": "localhost",
"description": "local development kafka broker",
"port": 9092,
},
"production": {
"url": "kafka.airt.ai",
"description": "production kafka broker",
"port": 9092,
"protocol": "kafka-secure",
"security": {"type": "plain"},
},
}

kafka_app = FastKafka(
title="Demo Kafka app",
kafka_brokers=kafka_brokers,
)

Kafka_servers

The following documentation snippet are for the consumer as specified in +the code above:

Kafka_consumer

The following documentation snippet are for the producer as specified in +the code above:

Kafka_producer

Finally, all messages as defined as subclasses of BaseModel are +documented as well:

Kafka_![Kafka_servers](https://raw.githubusercontent.com/airtai/fastkafka/main/nbs/images/screenshot-kafka-messages.png)

License

FastKafka is licensed under the Apache License 2.0

A permissive license whose main conditions require preservation of +copyright and license notices. Contributors provide an express grant of +patent rights. Licensed works, modifications, and larger works may be +distributed under different terms and without source code.

The full text of the license can be found +here.

+ + + + \ No newline at end of file diff --git a/font/Panton-SemiBold.woff b/font/Panton-SemiBold.woff new file mode 100644 index 0000000000000000000000000000000000000000..2812f88969b792f60a480935f3abdc33530dbc49 GIT binary patch literal 45248 zcmZsCV{C5S^Y&fawr$(CZQHhO+dZ{y_tbVzZM!{n|L1x0`}oevm7STnW@gRW$*g22 z+e1NI8~_CPCk+Dtgntj9a>@U+|DTA9tIGZ3mHzW#{{yajD1D`bsF*ka5c2v@|K|f} zBLpCkN+>F*007|?007lH0N^FqV{pkTp`s=X0EGU_4QvPifTrIK0j(>jFfjisIR7tB z{vQlYLtKkZ?2H@%0PvQ7yg>i}WM>HXqSM02<)1C&?jHy8Km7avKrC#%%mDznL;%26 z9smTQXykYQ+tSR)^j|*Bf1H1L0qh_(GD?>JjQ?!0|8$~%AcfEc-L$lG_57Eg@ZUa? z|D~f9c#!9`bujtIgZgI&#rzM7z)AFWMxOub0yX@LBlrhGU{!#py^)>Szn=E~#g762 zU`wOO??jFcF0KH8@4tQ_{+$Kf%xdG(v*)IXp{b!Ez*r_><@^4V-2kg2%K#vTNsr2C z5V-K;_RZ43%3J{C%!QdLSPT%bhxym-|6=qXZxKL1riOmFf8eyRu>y(51EX*N1zq6$ z|2FYI&Vf0lnV|&$h@8+IR2Uo_g0TPyfKUOL`+t6OWus_ALo-9e3*bdq*d4@druf0g z1=Mj1LyJHV#1y38PA<^S=8lGnuQMZ0N4o^!DAP8WTBw+qWg$nInwjxHRAAudfW2&( zr~m53so7Yjn_>3de7y0_=r-r|&(wAfx2N!Mwx4hIIpBnGS%NodvI=Kd1%=FawqN@Dwy$QC`0MJpl1PRvM>#4| zmc|$TS*K1le<2hp6G&9P5KFX|D1OYBT8*#tYK}R<`JZG7c$q{0y^!DG2}8FNT<(p> z<^x~hnEw?qU*VAaCrbMOv|nN&%qsp+a)Iwi?acYtQ9@LT?k!0|; z#{L%KVrPD_h~_L$Oep}**M*%c&q0@DX&h}(Iqpsu|2pBW*2`A`y~+$>PE4s5O>)o2 zb|lPDH0{vIwTt{F4#}otZT3F)s!VPzNkV|`P&;O82pVt9`VEA}OQP8~2Ke2Fi~bI7 zTeFs}V%W=e-u6H(b;*Ox8*DEVfO8T^Jeo0&DKZPRt!FEwtnuYH&gWadEM~PYaF)i4gW~!%)xzx>PSxeSpZczRmO!XUE-)6aM?azcF zgOmevz@(Asc-vk$D!9fS$iXX~Ri;t=8blDT1s&+u7Qys8XHGi!xOxPxQ~d2g56|fP zVubHktP6O|0?(kF!Lax+F1Oqb%w@q!{q`VShGD$U^?ULoMAopweO&@)zF^fKf|~%i zF#Am3(A)Ue%{AfPQM1%r@7O-3YrTEHXjr{Du3>0mflQ;Bs7vl4+j_!?=LB!*=e}H1 zxR%Y8K{lhA{9ii{T>jaYCP%+O6KgdltSFlTW=2V}P7jx&)yq2&c8fBylsBxI|#?3MFLL zd#R%s6z9V`?l$Z8_cFNH7wDJP%NL673~wj7x@yLm=~Wf_wO;wC)j?$45K3>M90|P) z0lie<)#PMsT+$zk6-S?u<&(tEG_REU|Mm)Hw>7_M7(1t5oi4ykVR>58dg;?xN5QUW zmpE%iJ~3pvj$OR@@Y^Z>oC^P*4+q*zeEkz%>bKyyYga(KwKjo2wwq572M3JHRgn@Rmi{k=MW-xWB#SPiSdXJt9h; zAM3=hT0xhwC2A{eC12F6hJ+Kj%Cn>^QEt4IgVZKLiXy{jvaBVuwmnS4Qq|wvxm*w^ z>@i8clzoWh@LTZUW3k9#IsMD!;x8~DVd9W*hU8W1vDol;ij41+DVE%KHX+osttN%| zKGt14;IQs~I^(Q+A0wblfTn}#Rl8Lkin@X}ts;4uFs&@TQq8&=y3)pcz}80Fym(_c z*K(FMW+o)vNKA{a7PC4o%@C}%Zk3W|Ec?&fx6bfmOC_gp>yhcKvA=Wp zKe_)Z`3^+*imGHU(sqmM? z)D_jamhkYMNg?;1$q~FK%L=@+1O~wphYi9Kr4@CZgtG)El_t$%_)buYvxGOsn($(o zG6&K25$8n78Z)b8PO|oybMG>SWK9!CY`lNrtdENX`>pHh)R)A=?=vNkm`8VIiqX9x#N^{vL+rn=0soK?n*>Fh=ESVetVyMn zOl7hQ^ecFM;@>^~Pe}i_)B)juBOrO88emXhLSRMUK;RzW8xTkkQV@O+b&z0?3XmI6 zPEbeCD$oNkXfSQCRInkiJ8*Pxad1cQ5ePyERftH40f-Aod`MZySjZvB9VkdBaVSfu zCa4!^d}wKCf9M_=TUuNbHptQg)HeVE{wq?o#xk(kAp^O!GK_*j})L)g67 z(>R1UmN;#=kht2oZFsr~iOmQ+nt zchvOM?$muWh&0MHnKYZUxU|-^eRQyNf^@}nSMANK_hA&)vwB+oq0H!n7?CT|<>Cm%0gBHsZ&GQTXpKYtVdfdHm}q=3Ib zjlhv0rl6x>ix9k!qEL%4Kv-BfQFufIAR;IdDl#dGEov*;A_gj^EtW2JCQdFMDn2Q} zC=o94Dyc2GEX6O?B#kbuD_tdhB*Q4Q8sphCQt`4fMsP3$usXm~-r~a-% zs3EFhsgbVHpmC)suIa5=rMaU8&=S^i*V@u1*EZMg)Be<9*74P8(%IAn)}_}q*NxZh z(gW8M)N|FV(A(0-)tA?g*YD7OH{dq#H0Uz;G-NbXG>kVKF(Ni{G^#NMHs&{WHtsRL zGC?=tHL*7-HkmX9H#Ia(HT`Y+Vn%IdZkB1bX%22KY#wahVS#SpZ}DVlXt``9Z?$ZV zZf$BEXq{`_W<76xVS{NSZsTlIYI9`^uoblJvE8!$w8OR&wsW_uvpcYdwwJW`wJ)`w zbbxhGb%=BraCmaWb<}i>b-Z$-cXD>RaAtK*bzXIWc8PSQbj@{Rb?b3wcGq?fcCT_@ zcK`OE^icHh^l0!n^2GF1^tAPi_AK?>@}l)p_p0)`^QQN<_0IO*^1=5J_p$Y<^Lg>5 z_BHlR_wDoj^kekX{5On8{9*kC{oVbm{1*cd14REV@j%1C_`s^b z*}%IX)F9d*!=MV!&5>RRPuw!*18H&P$L7bq%vIt~%$d1Ce+QLxa$%?`-pvj8(6Ecx*&jndN?HIMYAAY+61|419 z>E^S!TrQir#U!rHuH438PSO+?wA^}?E2wuE5IM(Xj^n$Q?O100vNe04WQ0vMkUQv{ zX5g8}NL=!ybwYDD3;ihOc(}rA+%IuA@&_WE@e1Y5teaB3fGReuoPRM)V(1BQ0%}2&*US6BPwYAVh2k&``3=F*YrA> zwd$ooCQ*P8xyTt9oP05uYlz?z#V3M1TGY4bQZ!-d_i_aB2pR7$3%NQowJFfa0c>Mi>Q`T@Za{G{<44|7{?v_afkDy*ru7BO+&f7aKL;9*t z2BmkyjneCnHga^Z6ckbAUW)Sn(A|VD7Eg{JRSmJxXz9;2`O<6)EX+)hryy3@M00+k z50-Mw*BKgY_ZWf0&py~4xY>&{CL4EnrLY)^{mgcRpxB($7U_7QC#TL<(BOXv&!N;` zYUW93l);>L?bmUHsc8qRYD0TqRdnM6=@FXRLW4)|_ix|~&qVcOSOsGW?hd=5&-J>| ze+oE}DJVzhq~(PXO9drhZItP%>2(&G+dqF@IDGmDnX!EapIzrLl{h#qKAordlQ5TD zk}4ZV;fOw1HaMSz8k(6YzQMhRP53R9|CP(u!nz9I6->nqV8 zjY@skiqW|niSK${$GFSpapHOEP+RMKl7-EBM0rV6)O#shbJ98S^Wmerywgys;{i8+m|DcI(iH8Hy&N2 z^Cy}3@QQEw>lW+3yc`9lIx64}>R`abrC7oA7M)YRBPXmLcRf0w zVwd!@1b%KGm#5|<-#!hVemO|uCVUJuUV^^(3Rwa_y)Z_S12v<^1tPu7DlEx9NN!NN!SK0fq5vsqC5NM>GO`FbL8JbFmMu9t$qY z&>_9`I)bJkd{vvS38CAI=G0fB4Y{1t$Q5OX4M~ft6WHBIp!&Kx9#RnVk|^{d2?J7@ z5f#uMjYeufq}~+?$xDj_%9j7jYH{h3{sE2-mpbus+PndOrd-e$3BE3h#7{ zC7zmN;VvXZM%!Gzg33^;eAIzzFO)D;7`KK17EN<9w>5-rX|PT)~f@nQOeVqi1p5pZ4+v3>lx$bPfvfKvs#Dc<@^&4_a6kpjy7>_ zhCF6NwrJt780z)Mh&<5t+MbKO$?M0{Zp$QRx6r@s?oGtYSll@79PKdVAfL|PTW+2R zd3Q`Po1eAIu+BjZqan`7%`t;<8q--U+`1alA?zt_KAG<3arbeXTQ^Vy61oRKISr!3=)I4uZ9 zx^ZTrD+EV*`C;QSTT{VIh@s@*qV~$PrqXEWIX$1w22-wMyN9GQZu9sYcCNA>%V&01 zf%}gAEw1ZIe0KWx?rPz|XWNZ$t`IC{Z*v$M%Mt4qz$nP}u&)=`tc{t9i%^j7D1arX zen`za&?~&D+D#_IrtUP47YJffb28&HN{5@O!G{EZh8$MXE{L5r#L$cVpi$X4#H@u> zHPMjy=YO@ct|*(`q3F(z#I#-Scnp?WVc@BJpJul~?X5pee5u&Au8@hbTja^GDxuF( zq{g@{7j>h{lhmBn@O#|9L|V)qnc;lf1ay0-D|GxK(^WiAvx+|Qjhn0oYagA4BDFD% z+S{zfgY0ib?1P6SScG;36t`iCL~^uiE?Fa*^9ZRHhhvlM(V-1X#N+yQrMPQA_COVR ze%EBdl#!rvkQc2>LPv3uyW%DGZ-ep2oVQEi(my}GMNTB?`#dQbHv0@(B))lFyEB;j zPx61_uh?7?75F13=q2*E@khB3#N8`rHBHeT?Dyb8Hh8+mwZ`+~nw$ahn~ClK3n5-Q zm~gV#cE5!IyOYu@B`@{HwDj>5Jq0JAw(_8$H>$;`K#B_4Ao&#DZt$Wm@MzMD zmP3bP(m66k_zNdsQ=Xf3z6J-DNQsji$a1g1i*rS<9$c9PDzO_5A)8tIkYwwO{Z0vf zBAHPLu4-K`oD#pGqkkVKEg~x+6it3v3;mmJZUtM;Od>v=&Ry`S>CM3khQo1r&gV}- zQA>E_`jluB3WwyfYk8*(&TAhsW(4P8v*Cr~jK;U90iCT)B#XPq=#nH3sHu|0w4$uN z3IZ65t}*t?Pa~C2xU@2Rc$!~PDs7WO#Fhq9wBRyoaG3rS94p^9l?FXs&hr|i^xPIe zpS$C#)ru+`?g#O=J0FLCEbx#*TXp37fWMX4I*f(|aWH+ao-PyM>voLWFlGw8hNr>8 zQb)^rRGApfF6a<;0xp+zZk=&D0!sS-%zArFs9Poz{fw-f>Gt;OuzvFMb0R51Nnbs2la_ zZHI}{NSmxHato6wiFrP{MJTij0#QYZPi;}fq|+I2vaSV_mgLxbecie3J>8W2?Xa{w zg{)0_WBKOw6Ozf1#3mZ%b8_YQO`yHjsd?*8W~J*X2A~rYX9jlFbAO!4_yVdw;;By8 z5BOSC8>ysW@O%oUSwW-x$#3?2x{WzC%!>H^G{gh~Rlalg!D!8L{F> zP<3cRAvJ$Q7gae;WE?%N7jV9Rx%gr>==In5r8xry!M@}5>;@sl{iFm7#$4{oNK6jA z%7%v_e7HkCvx8Pm;=Tk786-QrVymRK7TfytQH!aL61ZPF$}&u`{}slObo$pvqq&cm z#EN-VFvO4uGR6FErAcMLSgaq^H`vcCRtQtoBH=TAjq$f$5MRT`DO_svqL|ElYC+jh zX^U<58IhXaWhN73zKkvhDV?6!C+pf!b8g#(yWt@6g#5)dym8-Nt^4MExG0iuczU)!^{Ow)@32%-Q`4{%dEW;-oD+v4VIN0SkX+&~rX1ut%alSu|q zy1?pX0FnDfieJcVa*#_3g!*tPoWP@nA^nEYPZ|!+;ne~cvsQ1XI@M4L}RX15ZvWEE^7uHt4{Wkb{&N3A!GJkL|pCU@dXEpBXn-sd% z`EX!R=okiV_9HSx^M`ivJhqAopYI#fya_cZI+SqvCUT~!b%6ZYEf++AFO;viMh$4~ zk7>DWo*FKg|8;2h(*7>=K7;Ub-H-zDkC=Fq#0?p(#*pF9#r4uVU;IqsnV!0H6d$q{ zf!-z>-}M~d+Kx!>W|Q|x1F3osP~|t1?}hIj|!`*)Ut1_%AXsR~R#C0m#_tCZn-_2{k6!~R`B=Rk8!_6}GF6-9svmWou1jL0;>tC9Vk*N$ z1)kd3)Kl2tRn>N&oE~GTSn{jJ%+PXhI@>}Kve>b*X;9Tgb;c z^$3GHw>n?@Mlp*taQmL1dG9n>(CKr5@&{y14F8&CT!RaY+9C?h2ukE4jEPR#lSHK* zGy5SHSjrIXLkfxDV7ZMDGkn0ES2?Ea5~7XR_RNhW4*(=rU4gBMN2vauad*dcKvfY) zC+X@5B7&(gdnydLw}4@bjSI|&-WPhc?bDh8so09AMQL8eD{>kSLTLF!q+D9Slz!{? zQmT8$b$gMtMEuRQ66=#qCXbQb*66#gG$^##)VG=HIyV+ywUrPf$u> zh8m=BL>$0k6M=~y;De7~?yq&RCiBD$iaG)s8Rb?pPusj_Pis_ys1yTPfFM@tlgpJz zFm6}V$$CIgm!!+7uwJ~u?BQySvfHEjE0 z;iNEi9ND3aOMJ)l5@jlGTKNT5TpTTtj5;LG^7V?+auuwp(MAc)&k62`22!r559Edn zHTFF06tmaS*CCg4B_zdYzYp2s%(Wiq7Sk}!WEkNrp(>o3owYE>_N_;KABsvQS$K){ z2UPP*7}Q7hI75Vrdwq>qENUvf=+r|r8~quE%CcV30>7jPOJs0PA(9n$D#%Z(Jdt4v zdBhjDF{w#-QR%ucNU(bk;Q*-HuVpdb&mg@~uB+X|MItQlA|@)Y)}RQg9|c0NKM{TI zDTWm+jzU`?5-7+74H0#ruwL~UV47%yOIujAzaE0dti&qxy{V*ZkJ3>Mr{PpwTRL(d ztF@>x)L+*Blsq(y08{!`CdXS{&*X1l`##RH)MsV-HbVpKPW1c?R22{e6SOONF#+HJ zB^G2QAzD`_NxJclr+7d){-c)pzDrAAe2Q3Pu81xYLWCdEUq-VKUJI>tt|3IjEn37( z!7ahCJ(bCRMkIvJcX-vTxYlIq+sx)AlqcHcs zJGw$07mS-fO`S2|5YL(P#~D&km>B6QzjBw{}N-X^s$%Rn)&3M`u(J}9v^ z6UKm&ajAQ)#@pr@m16F$&?msBKylkY^)(Sm-wlqT|Gz>edJ>{+2-CrFG~dL-~|x^G2;^&G@z)Oc%z;u2Z<;YOmA)=P1KkPfs z6SG!P%P*P3yMT?Pw-%%BiRU#R=YnxU#Sg*(oJ94GRvc@HJH$0rJ~H=5P}^FNzhjxN zl_G$Z%w8Duo4v#Myr*FB8ZDD9$sVcS)}~^AFMiqrP(R|e_9APR|80^BM}I)0_k%~f<$NGx)M=9$$Lko_&eJFM z))#(EzN26P_V3AJnP`H?bzRakk4GKB2H2{6{yd+T^WV@3!`TBi8*~DL7W0{60@l`6 zwta|`88S?L8h$iK$uv3&+vY~7DTtg{&&R5wxcXfC?7AQxM@nSlSdnLpkN|XEkK>s9 z45W?`G5U`@A2ARy8inq^)s7i^&GssMGMi*2(w*3Y+AS#Y9j$PTBwvxwB3pz)4W0qp zv?xo6+~S(Pu`6uefc+n|+q(xE1Ld2|V{jD9NkaT(M**BiRJ~lUUv^xM7(}oT+{D3c z6C$Bu$1z^&?9rRy-vd?_%OBhbBX2a0(-*D*X>hv(wvH4DAwDd2}l#S6r(X26Q{Hg-nx0|lO zV5VTSiHUanfO{5ah=8IO0M9_?OgeZ>l!DR^nJk$=zdCTXDJ(swtbBmR0DlUV$x>LA zPe~P!TDcU-G3PrRNC9u6;&u*GZtU?kDVw#2tR>cKd#&lJR8aV?m@CpQUTCF`p&o#7 zYFI4l&i{si6y2N#%)%09FM;{BSrv$kh%lS=OrG6$REmjDLaJH4>Y6@u@RiZXrgqNL zUq;CCerl6i;DF_9P-C7d79%wrB?`5nfXgda*|IB|*HO=-gsGVy|GitXf_=(}fAb#Z zrRc&9Eciau0z`6w1!AcAgp58+20mep7~o}ZR$Ax1?(mvBg$_&vzI{;8k_p8~8aO8= z)BaLV=5i|UY#AY5@Oe=T4rBD({mRKWBJc{O0NZ!C9a9);8yn9FV z<11Vgwu-(+LNoE7VF%h3Z>iZLXJk!8dxA$F_BeKE*4#;W4JtSt@LKSw;M)zdEpLn~ z1la_%M&%o@;(9wM^!Y~!?4kHqR_&2NZe}YQ7*v4AX>zP{iE7b3DV~~s35!#nWZkXB z0N4&VwFLM%uvGnl@y_`P0B#UhH< z;|LW%*Hdp3ao$w|=Na}BVv=CbQ)i>g-QX6}sasdL{QD%P7(-Yu=I|b-=NF)NP~lGh z?W&A%;4J$(g?2MgT(361vJNa5#N>$hM_RI?@NgSbjWKCmc!4lQ+najTb23^h$Vt0a z&hG*u-$`pQHrxtGa{}-R?B&0dso=pQWYeX{y9svNgpV~Xr5MjC4rS0FB)+wXz%`PK0_kh ztJvY<`&=UweRS;jvnydtcgWtUUH>T91{_s*6$PvQ1e6#s2Qwn${5rhFC^L@7Eca(^ zmB^Vn((?$dB^*0ZjhJ76nKmsrD>R8VC%kqbdR@p(aHKJcRwQn`Xwil)VV21LcVBv< z7IkXr$wL$QYaVUHCi!z)xG7ezdMe&J4?>`q&*=_PiGg)+KdBU$c_kGuBh{j5`aDBg zhE+e_dBHk(XlGvDxPGPTr(BZbzRhV<=olB~c3rI|Z&IjOi;@;)9f3*6Gd8|2FA;hNQ}&3r^2RX>Coa^+^I zpGl7E?x4j^hW~|%fI)D)ALColYqT?HZxA0a%R8!RxfUJ62npAw(Ls}wq=xfuuFTV zeZ>*BYQA6Ula{61quXgNBx#ozv0MA*o+VP)`gVVFz)JKL=1VEN{nF}w&8WG^g)iz$ z3;_2-h!^+#(X&>&#@!}tgsma-&t8bPR*1{qJWMvrn6P5!F%U%f6#rCpUxaVoNO~G* z#85N&A)e2JFKNCUQlirqj;&+GpGKsYG%_cg76Ga z*N#mUlJwkZ;gG*=&ZJcO)>EYK_VsK?W=Y;xO*4RPz*%(zEX%B_OYB&?>{_obA5 z2l))&(xyPy+T$YxU%a6spWfwt+wK>#OpR66yi?J(bX4n#n;Up%Lt!L4^E7$y3OXy6 zmz#Y?U9e+_g4&kNr_eFP-FD^!)+^Zr4HSjfHc;Z&^>ve7L z5n5IMr6XHchV-1V~D=R)nZ`~_G;?;qW#!-DYS}B zewW{Ewe1M>{2i7HgIr>RRRFpyE|J1?b{6r)6@ArZcz@@QHoJBQkdZ+$8F>15P+vFS z96`61r6-Zn{VidFcqo#>S)r{AroQ@A3(I|#d~?nN{_yq(RyYFenT#NsL9dZ!Zf%MZ zOXij}%8}JlAcR>R?xNFNvWRrwm30E>XB0J*^gI{KiR9vM@mH(@tZnrn1=cg*E=7N& zgm~0?5+UM^2NHQo#0?+B{_Y4g7|Vank=xV8VwpYllC>3s!}@!S zY<-A62=WUN&4j41V`Hd_eGoZFD@hm_q$kQ~G(8;fwZHJ;Jla(Sj*Oq>b;+Bn-0_bX z5AH!BJt0S;-7%g{9RiD6#%W;vSB}oHEQr=@Jkz|N6a-Q-tWwdXNy|?%7#&csLY&wp zj-)K(PlUapjA@1g{ngd|Wp0j!9!zG8?&J5#7KrsFbB(y zQb%RVk;@BT7)(4idP`gynval-&-a$issfJmwi5j5;-ob3%!C8z#5hx z-D5%s3YS^70O~>ZJ`czq41u{HCSbVAOV2wVy+WgEVzw^juZMqIzaTKFPh3 z+>HW>Q_#6$vkFFOQM2&iA&&g)%z4D5*{OowlvvXv5{;k0=Fy?f$uHE;!(AK)9Np11 zy;r-pWYtX#b4Gy@nw92X&84v9q?^u2Z{$Rfx4wj35i}(4!*HJ=KVu3S)SWOItuUQ5 z=TcANSL~C6%jAp72aahAMXP03x}gzc*s?O|4iQ7g34=S-`E4Em>u za-rj6kT&!Rs*>g99Z(8}@Qi_7CX$YY5)EU{eUh-4I_cttoc1cmkSW$%Ulv!IjVrL@ zhl&(a+NSjYaR~UK-#%evwnG=ga{mr{#H51N2@3)mUNAV|O8^A)14hMjKu5X<0X;ha(@?PH{m>>iag@#v+D9~jHJwHPiD3UT5~ z=b7Q8vBkx)GFTA_Af} zz9K;EaITP1Bk3lcSi*`F2Blgm7#toH<{~wO`y@*GFoidBR}~c9mUiMD07^2cCxrV< zGfNQY(~Mc-aR;00h{Ew&`FbR$Nsrzt7X!wDnnc|iBQ=wDD>93j zb3;_wa(!e`52vbL!+B2}8hw-I$u`X=iWB2OFpt{wi*_a`&o-XSlod~F#9H2`BQS?O zj{BYD^}Hs8ubD9o6z3(nn~x6dkf!K55`qrJYLPIqLeMZSL#1U(IsS7qj-7@RaSC2> z%2w()&_f6;ld}AkD~Kn}2uAAd#okcNNVKz-9QMfd0931mI0|}mn${+C^H&%ikg~qR z&^%bawx{AXIdab>%|GzXesUY-<1g+Un4MIxmE~3*#RPWq63}qSAK#%-Gl#HvS8)w8&}fTdBx0sg}CZb)+PX$xPESjsAr+;fuC3&|4OU z40GwcMX*fPxe*YS-&r^^I+q#M?pIBjX@g=evw6~<0{rXQq+sw3)6U`Y_wJwN2|lRB zL1U-PT=ba7?9fh@n?EX2jEIWX=aF%o>fPLAvjT-&>n1=yZK4Lw9aykdML#G-`rJw} zKmB1r(8qL9T7beNQ)tsY4HuxD5CY5xnpDKX{N889vTj!pEV8Z(3&}{p1O+=*4}%bp zz@Hn8t%K>I-_kNkj;>pl#~qOj4=XVU$2eWEa}@DZLhxClOf2G$c-Y@yCQTES4~uffx}8vW5f~la%=7K#t!>Qc9jq!jFw(zruA@ zQ_%|77SF^sa=>Rsqn?VUCqUzbLXLfX^Q#@h4>&2fabyD&xHjmUM=NpVKrTLIA#}Mo zSgH!~Rm%SW=@Va#6NK_Dk5*5Oo=YtIxB_>d}tAB6gh`b)b=mM@#SgrK9tD?Zy%P<0tGM^SD)L6ph zbu?!Ss)@xkitVjk!`J$wVtpVS+t~crHGD~~mbnfGCjyhvJ$VU^PT!7XFhAT&LU6+m zOub2UvIQjxSCu8WjT=Y<|Ca4{Ct*j_XYih^3FAhHFLwSbcUHG?r$kD^RayIpL*3`V z!u9EaHfgU56IRkZtvq*87co!UmKA^Xb&f~| z)xD5JOL)%asoJ@gG)6T6w)JDmOL`Nfbp(-J$FCKu%#qtpb8#CYS46$oM8m z4u?S(Y0g%=LYMb&p7xk7jllJ=r>uYM_OZA!X5#$s`Nk{L+n-*imGR$PI2^Uvk*V=* zFLkr&@K3AD>pkVaaIlqtB_yl=)(Cj`gzph{8=>+=i@%<G=nqS=*drN-^pZLW z`XgNym^x(((!z+Fq8v&0w5v`oK6-XGHgsIJ=KYpE1} zT1m*QOUv}E(P7Z$ceU6LZQ{YC+tHoZ*%MIy(%LyCZ@cFGVL6@{6tu8Yv+x>`#T?)x z2?^0jZ-u~iN5gmoS4P#1lZ%T}S>VRb*5T#~NwR?Wd&`h3HPo6qg?vuC>ddK`JG|i3 z#$UC3R;RJ$I+LazPCvI-sNG1c@-k)@>mu+ccaVo|o3rWY;wcr;H-CjUE`jhX^_O?~ zlb5L-Z`Fh6B9PbTtS%PrNHsc^vqw-7qDMce54Qwt83K_(z2slTV-U16*@M$gARjsA z>fBsoP?lf3IUp^Vw)KK|b^O)k)C`Dridlb1eTtYl+`w<|e2!;=DSLBJ7BwV|TElK{ zPwA?`ls6JV6hpwvCr>vY?=^#X1N#I}0(__`rW2c$&ekZOAQm`|72(&R@QSC~8*~{Y zThWx86{F4OfRZe%KAy*U*U{%hy|A*uU|C_#w7`?g{F9Sog=6!A=5~d!4-?VCPaDMX zG^tc{Z<6R(|I$HBKrh0h+Lp`TRc&0Davjo+gPD25Owp`xDid;l+a$6-jZ`PnlzbA# z34<1EmLS#CCR4Hakzu?-S4vjzj9{$`Cv*g+n2MuvDfq6Eq5B_*mP9qwAj*;7ms2fW zJedSU%qg_s=KK0;Q4ym4c7LtQRA?gO|B{Lk*_JFll-Tu8hXo6IT<53g3s+ZHk+oQh zzP5s8z>HZF#hRv;XeZ?cu`k5L5YeDr|6YBWIlZA>YZs?not=-Xr4a>Z{A|Qp@r!uh z`-z&foRn=;V+r{F-PCH6q31aJE~S(G$4a_1%aJHV8MrESyB066)<=hjN5?4+J9YPg zclnfr%f3FgQ^jDwL-eYe5_$Vzde0U|RbxkHE`0C;%3WO$@A$74i^-F=KI~$oaIuTq!jdscAzUC zYqZNi&V#NYWs}4l>W=9%43r3F0LrpX;djM&v(T}^!IFnNY~2D%1N?lThS;7DQ}B7_ z%i?y^L9JeUbRlFrCI6IXe9|`;+=C&_`DH8HuS0CbdMg9&9#I|E*@zf>0 zqA*0DeCFsID@{+|du#=+)-hnE{bq=T1Y(>6KkJ($Yo79*=i^CHZ{mH%kPBEXsi^X| zg)1y+E5^W2BLE*%1UCs{RJpeeJYg|oMVmdl%o-CsXC3IFd)XeP`0rZE@&0LmW;D}> zH2nG#wyhElV?lS1t|5Q!zF}Sl&`9j*MbcFuvbH|T=2)5RN2Wr$%0;}nf|vBh=GM7F zXq`_D=oDN=`fjZ;9o>)eOpJtWGmb?fg6rXv3@PPcut;1=1U5anzUcdCb=zrt@0k~t z1&NAJ-egIgpXRQ*Q{m0Zk?&t>glW@_)~nZvHowD@D6vEC^u3-_SYGF?#QcI{NJd-_ zxEmaf3sa~W9k6Wl8eYGX%Bx25DuN??NENzd+DgW}G!$!|aD_rGdwqQUU>`99sgRYc>mGchYAA zm?FAzywyf%X{12_v7q1k8-tFe=%jmSOm?o%GytT~0b&lQC-SPc^x_LM`8jO4K}mwS zWr=BC(JKN9%$Zqa2AC&tp-fUwIPwsQs~?{G8avlGQ8jw^F4JT;7F9y!#y0$mr?G%Y zr>Uh9vp<&JHg9uo)?WzVpYaWXd?hUUKOiAk(%~#~(S*7c(Zt*GP{B5g;KLxedRl+x z`RwBm>*t%0ztKiJxlE);Id!MZ#cH%AY|#2Fs-_B+5>%KFai63c1EW4%G7x0De3bDq z>(dLh4L~lB*pS>bpU>J&M;>S2C_VmFWQY#QzjE!N2vKTMwbUrx02Mw&7QUXv!mzOAV z*e$(tRxlk2;y`#va9L{-${jF{5?!d2 z9f$N=xU##7WWqXdcWg9xk~oxxdj*w?1sTwE`nlUr0!~%&r@|I3G@UyQnW3g;bR8JV zac85|*!XXBW0wlsrm`BO%^3{ebt<+Uf}jOROnw>M@w{mvX5cJcWS1G=Vq{I_R=w~# z(bT!9BFB`=Rl9-~uRtyXPZIj8M@4uC#jGV5gr7qmF-M7-<(_`cYqo3LsPZ?C!AN6i$bav!Wl zvleUT`GE-DGw?lKawE&pnFS`4MxCyW4t#&2PU4^HV+JKrQ4_w5GtgRS3t$1rW3)hT zUA0Ioh_h)VhECASf5*_eo5)wE@|;2t(Njr{vNk=H)(_#17 zTf3EfnN1X4GF2oW^)iK8KOdZy-(zePp!fOLxa6oAlP<;EPP|dHW;&=Qjf4alOO0^C z$7KXT!YTY|+ZwG))sKUqbUUx?6n{LbCt|fWg%xgXAZ4Z~l*sUzN4rA~WZF-u;p-32Y8ET@Vr zV`UmsE2NT@SIRI)I}HcnyJ>&wya1YAV9Uq;U$-0Je@G!eOInC1J`M!jyq} zOh-5l_1JkFvnx5>Kfbc6)^=LSYSzsD&%XNvcH*yU5#Iy7G--=Yx2T@+>kKT{D6AeE zfA~!qw9vz)-TFdW+kr;rIIXn&`-(gA1m5XK;8hYykOVZSog&`k6w&}Qt=Sye&rZ^* zzb_AyH@SZU?mJfVZP^DmQa{06Sjt4toiTtF;C=DFJhz(mylct#rVz)Tt!K)F6DLH+ z>(vSG8dwz^(qW=gWWx}g(-2AZ$tlSPHUAH0K$*YTk-MT`9M?DK%(!A+f@eiLR+h8E z^v4N)mPr$>h^;Rh2hAIk-7KR@KK0k5xJELl3R{k;5)$N#=6n5L_>-ihvM(o!A50RI z$|d8WIELdLkoR|Fu*=R0!82nbU|jm$kzfy*@<3`7r$|Oss<)%5GAd7$+GCP4H?<~O zY**xkN4-|_zwIom{#%loq>R@Cz(iT0;tafi7t4M0_#FX8Z+DRF)gt4H;qX1E+g4y} zO}*;Zm~A9)6mkYmaBw71K=88P$2^!5k1y~v$?lXmukzVASXAZJdE@0-joZ!uEX|Z> z^>vmW2Wd%GIU61jV?RfAOsFvHms3r;m@Vax zy%}x(X5uzvO+^y%)y$Zr<`~k@_fx=pVgWRvSaS4C62=T9YnSJt*Lmu*_1H00pVMnK zVndngcP0Rr3|q|FycySwMPUrHYw_hrwIo&J=#T3Wyw`)#ROGNcF<`dH5p;7CAsGvc z<;vLG5DppV6jkJk5>3se0)&{F1@hg5RH~%(pcj+-T@X0--*H0 zm)H2B;)1Z*!#Vk4zb`Dixz2dR%gbVGeS4K8wbpgih%ctb+Yn=)BiVy(*}~M@sZeSv z+~;W6zX-HrQo@qrR_gsJ>p^uNbSH_sv^c2P*m&raH9Slkjr6sGuq+PwU6cT=O}oW@ zJS9E{?~M!Jr}&esx2Ptax+m7+-8+#bDU+6olPo4NAib2ZfF#S2I+dK)Hufj@{2wT% z@mv;7OOMT#76SFLWGoz6=S3QXQHV4{Rbo@mr$_IsdKaNGh8mOFblwt%8%RsU$LMQ>faOg_coT{>6O@Q>6wVlmX(5F zb|fK4GgH(^R?~~=*g!O-*ZK^#oK2@8eX=(XEbFG z?^*YlN2@d5ayFI5Q{c>}@c2$6$0L={LGV&>A)TuZxw4X{DfSYH^GBs+tC!v4<1?Y>C`!*#N4PY4K^0YQikUmG@^L71I@fhLwU8W@k@mIM; zJDTlV4pQL6tK-GQ^an(Xhhj*5H_e*O)R~VGs zT7@ly$7P16F9A{9*eI6#TIcr5`G`_P&(_AuY- z0hc8t-GUY47Y;~_gjk+;-GFF3_(wR0Fhd4mt|0Ja zM(qLDakgFHQf1!wP2o-@i@bb$#GRR&Ec-ggf?BVLnsK{DSdxLv&^+RIn#)6zZ`pfE zjLt*;j>Cw%Md=Os-zVkrzn=~1eTiC9@f5N?w1iZ&dn<#?Z0IeYz&dwP1J!z){VbNai?C zx&Dy-Y@jMQQ_(iN{@A0&{6|1AJ2cA~8WT|bU<{%u$y{D^k+*@W!g;4LsIhtY5 z&yZ(uoaQa)7c6l02Qp?C#Mga;PFAZ)r+X{1))VXWGCOI#ZMOkWg zMDc+97(Q&BWr8C+CbY}$BAK-IsvZ>Oq6{c`Fd3Tbv*d-SIJr%$HSE>m*LF@n>X5p~tdS<^;Q-yqt!i!w!MZ4QBt`J$?%>G5Rd)$^036bhQGvMtJtC1$ZCD|@yP+FhBuuCW)3DC< zZ(L0EP38LK3!94qjSH8nx|xVkB_>{f-!yYRPuO{_eoh^P$Bg-idiRo&107CEfmT7( z@}A>pLpys%-PV!eQZ?mSbb4G;!Q!b~YwNeK*sx*6`JfNyId`^1Iy*OYfoF%J8P0YN zCpZrdu3r3uMQfhhzrV0NnT)|d8|n5F;$+k%Tx+T={=_=n4C^%d^(`<3AMG%9jHcw z4w5^jnxQcoLb_T+>x+>kMlfGmdR;AdAuwh^+(H>dRa0{{k55ynn%cNDg|M}icm~>? zbt(E;4DK8)yB4^mVS(pq394p-6KQ6XS*JIh%$_kP#9Ka?na&8cH!!X@)|V+`GyhJ( zU_*kHb;sGa4c1=;4IGp-#0oO4yn6OMgbA%b8LB_flDrl&?`~zFBB8|Ks=Qj4Vap7j zX9^{6kLK0JEQa^%ppHHxLrz;fSp95vpidOE95)U-ZVO~whwUH#i_N$Wku%E=4Dc2d z&c6BVwh#MTYcl%Q>@9*Qtq=sbF^^aao2vZk&RlWfn!RPLyOQTA_e^y!!MD_{Sh|%CgZk2j zIyt#whkowd*bcc$?QrYVEvkrbYN%1&RGrr6kH>xf26OcJOGQIC2@k>Xn*g4(*v`In1R9(aalc^}s7Ls_zXehq1Dv$1fljfS*h}x3+YLOBhCX?P$<0sH^XVF{J_c4Cn z?7Eq`%k$h3YCm`^-|NSVh0M?>9{f17 z-(_z~|M<*dkIC)i3EXq?DE3<*xsu|y$h!$8m}6i$8)E`C)%X2k)~1v_DB}MbfpN~v z-JLSp8!QO*#LC1760Jo;TKq^1KLm#nafu{df-kWyW=zt>|AjE3h1Du!asWyWtmm*K z8;}8r_cG_kz5wfp{jn+niTMSocnHMtkl2$9A>?s^xfp3J7q68_dTn?JEq~97A)1ti zW~1+CGXk-n<6E$BrZT|7<3LJ?!q0q|GkZ@1DnbZ%*#{r64smf_(RZ>D zOti+r-?h4QS*y~GgQp*-6ryt#9m;^FBPn&~*5xXX5yRE>7`CSD-m~1N%9X{1_FOvF z_GNLQjgP;#V))Af<2d+xO?G-6k_zM~0@At-1Noy2SbWAw(G-Hx0HL&-Ae5`&J1H;9 zflUB$$q;LyEUS|fb%W;X>(bIgT87g5Lwot=P4R^A(v5X; z=2|QkxS7K^re`oqOb=?@q+a2b>4RkfQ)*{tyCW2iSn!>O_Kxh#=aO?8)Bs#?y+G>k z)r(@K*SMQEnhfF|lLx+^FpxsCEb7Yifrjf-dsL%1p*h)XV&ngzcQx3B@};3N zSnOe^MQc34g--fEwX{Yl)g%{9lX&re*Wfx@n;7kFZPvZbTBNvN+d!9qYTNh=&B*Qt zMVYG=C+|?!a>cQlF(^m9GiMB0KE-2PnaXSZGFOq5q+InE$1jee*KnxTpl;2lzDqWQ z{wcYj3obzY^UT=H+(;uERtXgSLlof})KzIQ4Y<3vKc)M3tnugE#7fkC@E9qVgAF)? zD=cyh*rD8^20SGeqvnKCGc)=*&MbiM5>H4`H6Hc-!ac`7NQVWC*MZz#C|WQqsAo%I zmUiSxWq?NC%G#V9!=mB^#hzxZ{FMVY<*{JYKW0Tta(3nTczIlfB{eq3G7fG&je+u$ zY*kKmf0lUs45 zVjr5ak8rG8iH*$meucs@1>eR7IU0NH)3Flp7#tf>Nf(HZEygWky#V4dtr1IXuBX|x z8<4#_v7Hq5Vk0=ftlfZH-*u41+tEMmBh16=@!Y*V2e1vSJh^5g4k(y70rcX3QSV66 zMjX+@K{mJ+xk@N>RL8fR!mkEww`nG1cSR0FwiSV<0>Q1SB&u}bT3$BrsjoaqY1&z2e zdeefquQ#ujXvv)=n#XOBrqk-16aOSWfiQRL`gQYC=<5Gjb#3ryLY1PA{!gX0al2r3 znKJ#-|ElaZTsQ7@g73sC;CX6KmD6X}`2-wDl$Dp}(FP@{V^4QJEm3fs#M@f!)m$*= zTg5W5A}eUir=(?~te=)oc}wh**WtUBR$PtmY@P3km&$gs{_I`uz!kmu=A?3&F%aOa zUGAe@c9OYTWH?^+y%*zqlWOc4&D+VI63$qemuvk!BgZ>g5;$>$Ba`WdB;B(<`Vord zKPYit)yF=jC%n+!cHtZ8tj_cl_y{7!iT*`;1Zb)gJu%u7`P21aHL*{PY_R0*Ku=D* zeK?!@d~s67EwL}B5B9_;_$uR*k6eu zP(i$8pXnfDaU{|G5mF;>POR|Je~yta zT$chVW*Ma4VDvPj!>qBOq;~Zt9H|fKhv*j=XN{e66+`A&_$O?AmsUF{fwgzcM+9E% zRNil?bV0^q;#=kbx3UI@kK6+6jMZ;re6d&@uYTqK%YK$Gr?r4GyM}X>9Li*d-9o7*thxAsm=C|mN(wmL7ZB?Xj`S;f8{x=RYaZI@W%6z_78B}G<=0aT=Hv&A!2%Fb}9HQvxTH&AwseHv+f#O6A>EgjuJp1IxTX zt{O!N2|)kMPITHDGtn2&nc%^=1ftfbwR zoHUxLnV72XN3c;$G|t~}(M5l0p1N^X9G7#rAO%&gs;Fq^#RWz(j(S zY)LB_3d2JkBIasUCCrB6KIMOCbB&h7Y1UUv0tU()&0c}1IcLl8)b+|PK9;!o;|<#u z4TFPxJa=r9{%R;o+cHQ~NB$s@rI%b>YLtsDlD#+smD!A*`9st;wzW;)Hns!5p=PfrFrVnbKX+V9a`rf;arUw=t2alG8QZg zq-O1Cj$h&=8owMO-&hbjz*=2a_M&Q^Nz$BO0ZAbL{1M4Z33M_tXJq=~_brwWhn6f~ zs@xRC0>vrA;AWnR!gOAY=Zsqrg&4?E`y^r3%;ip^^?~47jp|z&WA>XP57ur1LuIN0+f zaca-Hk=>ufax8BgI5?<$p~L;$zr_9A1L>AmrgE#zxvQ!oeB`R%3)J|e7+$hiPHCo6 zCfi8WNxy;PEy8+&4#h}oB0-OwsD(fy&K@VDJY$kgVhoBPW4xs5qXO4< z$>o<{(uS!~dll!+Texsu8<-3qZSxiq?{Y%>;oZ}AA8tqMi@lzhJ7eCAx!^m;XUtVr z&z+&x*MRF?r=AHd)4hz<44jBXlY>4jlN|CQ;yjuZ!wxg^=!1;xY=oll)AuQgZ*s`& zi4&}33-cuTfrpVP5m3s%BvN9*bTEmQDB+uLFe~~Q&ixuD?$PE&US+s9QCJ8nj4J?=+t5Xf05ibRb6NjK4X1T-&%jczI7;e?P|2% z(<1j|&y*=WPl9K5kF47hJ9eD#iE`uM!2!fRR6SZ>j`>mm-G+q{>ltV^D@d(Af=&js zVpnNvdj`!57U8ww$LI%7BCZu(-Y01O#L^>Rrt$@ZK&MOPi`V9~UAHHLZ%Ol8h(xhJ~ zDX(nuRe-wcD&=2S-4>UEw}D?SIk5!(AH2Tn=+Rw&MvtLnQP?wMK4jSFp~l0)N4u6s zgC&DMNdrkzziUARdn6zmtlz!{IJIru9BHx6HCf%&Zf>p3J!#7O9X-A6&w(#jZ;5TB zzPRQwk@CDzA<1i=pU~bm;U8MeG59UcX|9U*8_4IzqG|jq39Xkv?rR4vwrrI6ZwLd9 z_0t?CaKWYmkdQE^I{H)^uxW{g8yehBh9&L3$;F#z)^A<0ar@?tJNDkRuy11LfWEJH zVjuBGj`R5JtTb=4W%9aZdzOuiEc^QVS1R*cyV^^xY3=H2Rb@~Yt|MlEy0Qz~sG1Go zM{xp}y=xtbZI0**csnvUyMIjQa(&f5{q~|@F!23*OUo-tPk_Bs{l!aqr#r!G&M8-v z7Wqpqd%vP2P=0f?Hpy*p9!r``RyB)Dk?Zbnj8N%ROAQ5hRv8u;XSCOxW$b)nD*XFW zv|pturfGs@3ZmDSHn|k)@p6D@LRWZYaJchbWymS%@da2OOo3m#-T0s9=CH!OXzK0T zoh*N^z`8c>y9t+Z7#@9OGFYZlYt(*;Yt(vT8q7GgQ>$~;NKLOi8!w8g>Mn1?bxzdq zlu%4Yb66&qok99R^qe5(5h)9*;#mu9zD!_4(nXYP3l^KNFY_$I^=Cw~Pbd#G;=9zw zK)KLIiqir`V#Jf7F;SEZ{LMuGMUcr8z&76TV~@ z@zdi%tKw)3;;15bg`*^*bbv2uGFH-PNxYiS3_4!YD5-{#T2$yst%MPp^32#@0it<- z1{;*nY*5wX1b&6cph93yI%+C?7RE-&cBfxTs!sOp;UxzL10rQDH#4j?JTm=ObuaMx zh+a5PV~Xyl+MoCcsRn3b?*im)R8{=S<-xGsC8N02N=3nMb@gk33q{t!G!s5aSBWUTZ}nsss=vJ zZ)hcrMpaG;w|rE)?ZAsabxLEI5W!F4SZXy#+oI!&!ww+3@ErB$mie+prC zrtViFLw{8(lJoC4PmgX;oUx=mnb>{wl4$1OJORjYTL zzpBcPd&4Oo54G9K%WZ8#;LS_b7edez!<%+(9wzipAdEZ#KW^T>c^#quIg;L^ql29W zu+uPMis7(9xy;ZxsEfrCeXiC%Lww1h`O(OzV2p}Iz95AAPrQ;Lkh6kDY3Z(mKk;92RS zwJX72(Z=rDY>w8GgwGS=^5q&Id^(Be1Tuqcl8kUj`6OId+w|+G-b@_E`fNbU)Jc<) zr70CBL2SYho=}t7;PqI2N#(=3aJWvJIa)#T(5;CTC6%dB7*yTjGYo?_GX$2*GhUNv zme}He&H?!)>cv)(!8TRHP|D&yDP$!f5gn+!d-LWE8}4po+yNI|u)4eVVRT*}?wPQv zfN?^7?aKeJx@V1!syg@VGmrCr&CEGxGOs))GeZ&*LLQSB2}#J4KoSI{yhJG|h=_ot zh~iyVTeMQODuOnx0=>4VOKn#xRZ3U2dTV=2ul2UKh1=CymZYs+z24Tn_%H1`NJc|Xk0hrw-IzhT?94ePgU$?S|<(5jw}_MV=dz3A#?>((uM2ESCl zk=Y@S#T!~$8lI_dZLQDvxuSOfpFvA&ld~CT*hmeLs!beb1A|Ae6p@~Gf09*wF`eNz zHngr96)I=XuG~*M{no5KF(v~KIiOrMXJp0b?5k#dlluQ&x7~_$cm#NE=OjPrO9*k; zAVf>ar$&>Vm?T7mM@=@Tx?v zUz5eFri+;wS3_S`B#|+{K^V~kQ-{lBnJDHk2d0mc$*WiGy_9;@g;mg}Kd}^)LqE^y z1yOUTcWGGv*bfdB)TBq_L<@Z@(UvE(MdpdD+D84B{^8Wm(WF?&*{>5^kmp1(^IR1D zfx>nx31cF+1jq)~P(_B0(6UxmF3>Od8R11u4C7~J!!t+hXY%~Vw&pw&#3NP5t zG)&x1?V)bLvaOw6*QR}DC(TPb7Bpf($7YOpjG+(n{EH@^&vS}ppc8n;4~cSI5f5P!I3!|M1N2AYi=Rpm zdN@H0D?h;3Kfv=;H!(&t(8pwZ?zt?x`N=3|DdIs)V+V73Jw#x!ZxHk24+t2?FnGf2 z_}kZsC&aNdK-L*-hP9=C zKGk$?9y^KWZpfrU9uFBf?XqEp#zUrIC1AeM2(vgFb2{fTT!l~iejj;iT`%q zP5%e*lpxVN%mv7olJl?C6P9v;ucyt>Bk30t-4y$W+*ymGWNURfDW9mhWy=p7!0m-JQ7Xk~mZ{9>;C4)aq)RiHZbC(L(Z z1um+nR8>Q&{F)@ap~^u;*}p|rL$dsuth}jdK}CKN&u0iMp+yO1yzqq43i3onpvzc9Q~5a z!e}?DtSZ1dO`v^fk)s~wtSEn`n({P@iIF`c!w(MsCQ%lR!~Fz(xO4ve&ilwqBpQv} zR~(HN6WJ6!g+Ae^g7aVQ3N2VJHtPzb1XGW?WtNqTSt%&8-4L3UKJLP%gnd$umTp@?CfE^%%w(rFx$$etaC{+0rs0_p6r zmQAERC#O*&T@?zOLzt~KvwC_)=kFU`(9oxHzKANAvqp!82jW%q-Jb5&j_${{42)D3 ztE|GwvEl6tTbn(2Tn6B;7RF_!$7PFt@l0@AJqn4e>j5Y42`g^q-WNrd6#;e;5R`gD za0x1cffQT1X$MsbaIXTmJ+LM#vL52ZLWp@uQ@p9c3Z6)sz(>ktCk{2L9`5NInlm^$ zHafrKSN(m7Sxsz3J@WDz$BrzdUvHe#G`lkt>S^t0uA5g9?rKaH6><85fg+F}1;N8N z*=KQ^wg}Ep#w=^=19EC=5ePu6K<<`uK-%EG5rQ;t#KZgQMi(sXuPLVoJl$;_-Ft5x z9Ihx0!RZIp12z=LSc)Bn=Y7!AL$$#>fDVT%h z6Q_`ue?3|MW6ar2MYzV4dC7m5IYHcXAYS2`tzVVuTCw{>8obyxirDL^KZ{zvJrbQ3yzZDN6nQuK|`}{U;r( zmkbO`f~G}^z1`or=V9mdGV5U7MGITtckZGndhgb|w?$Nib;(1+eL=Ix7={17zM z#lY8Vy{$NY3(dnOh14

YQD~3Nm%nr^AG>P00#c`uMmXRmS4Cy>ctoxpo1;o+AFY zEmSXwnZ-1UhYtY7E~WIxRQXwsfuz1=KWO~ zkNm+_+Nlc4k3f_Jf%08)F#R*x1OAZ_89$pXE3nmxc28GU;4NFAOq~(sF#bi@lKq`j z)x;V4A29bkWsu0zY>Zb)yvf?v$R-!VqEEi3Z^qlF>Sy~NfBWs!&)=E*++BA*_w25_ zoNIv)qUT(Hx0y zGaXuZIlcq8#XELNzz^VTME?|MXm$=I*XNe(gXQ0t4xclvK(92c@unjWJ@U$fJ26@O z&8LoDcjwkkcP?M7`ba^|+aEx;?|WnDI23>(KrDsWAYi*t6Y-s7HBgf!hMtI*F?y*((a-*I@s0BztZ|ESnc#9(erM|w zCr+Tc6Gs;Pd=z~OjrhLbwQz)Yv%{xGlmE#U3?#2H5e{UZwRf=j29XIurR zGDn`2a3Mkh$)s*7;?;gh5dFJEFZ!LtveuzP#-}nkhU`iU5<5$<93i_4-r0Oqs>u-Ijw#CG$n!7^N9drQ@;CNI4&3*|Ma$JBuDrb+c z92y5i?J51Vg{U26tZqn(o3juFl8-iXR&l62!4Zjzz6E5!WRp*mOEuFk^1KvnXz>`F zThrB|ViyQStlKTSoea0-BJdZCSO|0x+G(SY6WVE}I;r6crk@-MBAS*UvI2zc!n^se z(n7RFGLjd)n@2L^f~M~R8Dl~F;8%u6=FdIx;0zEkD(gVR@Nn$tLi)IvM<-b3X7a|l z3JW1!!C>!>=DNPHErXa3gA9TgTL^)FE|AgBIIbb_SKY9qTua@Yp_R$got$U9c3J{*QN@6dYFH;GNNBBgj+JT7?iYywMs^Sk#IZE*;yhVlp1x+%m~Tpt zd2|Smxu%(|td~T=%Z&`v$2q|(iCO(XM4D=?q{o^O*o}u^tS%rPub8yYR8%khJ$k7X zGfynpW`W9!KCJ6!KYm(*!>TfF7~_g;O432JRgV}4C0OvvxbB6UenmP2?QNV$(Rb0$ zkoK&GG!iJDVJ;?piwaLk2aFQqfDDh}=h1VDV#xB-XFt|;+6lK!SsmAPXs-(3`*$0@ z3}B_C4sWS-qM(lvpbSZ}9(Ns-BvV$<8m~UC;GPZ|5&e*)_+1RA5q%$|@iYZ8|NmI^7vcy((~=cq;Xnsv3%lHgQ6!58AvR+PpX2rnM9a zpotujupYgyv%+b#NmWf%PCd#%`&@|b^py%+>QPlOR2AKPO5`{J^@RwWg8=6|e9rVH zO%RD}15lLU6WAYsyGNDLCJ$~WbyR1?_fn543f(BnsmD&~oR~Tq)}e9eQ6Vr60*s_R zLMsePdqbqnOJPLMh2g4d>e2T^R!<$pIPl>1V63S}VXS%>^@^N+3d^C@#4mCj+AGiX zi_DaT!NFoh|L{EtcFHf|unah6I~)u(dTe?-pzsUMtBH<0wUo0?dC(ApHN2 zB1k8hzO({&kGRIklqzt)ned+Vwsn zjjkfw`4&Nml5kMVJ~uG|cwaJ&e*4e7W|R^Aw#^FZmlQ_j&)!V*+X(paQR*2qW2&4Q zw}lKNM9Ms(8Cl~7@tt~{K0<$B`_`o`&pD1~mif8Kvb56&WyblxOd#QB$3BBc@pO^L zbsb2+Zk7{XaB3#~fh2wWOT&kbfb6bd*X{S>H_^$)IDNHDSe6<`CaIH?xJ}_?Vi6ZEt+K9dW$)5eT@4G2=IgtbFY8+=pdwFPkI$@*DnXBr zQ;=C5(-WE`(<gPSNvo9Xuoy^q}m8C)J(Nm8zL)~wis-@B^mlC{?nzu08^*&o}NjUCons-?b3 zJJD(2SES}tebNAQCJmxTVBGO2egSr9;+xpdzAp1k8RN!>$ezaT*sZ~Mu^{t7ueY(I z;+EhH@&QoP9fuDk@FAecK-p+uPD z171&!!@BaNeY<;UgYL&rW-`!~+Bu#w6w#EuN)ng)%xMxOenD2pM3Yl4WgJZ#F@fxB zc^kid+~gF?SZ9(Lv&WD^4B0oc+(vS-nJj{vSZ;&85O9st9(01u_h4MQD}r!G zi5OY2v816p3Rl(%UP=88!KqqQQ%~wTx_o;N>Kn13Y2skb`>7^;w7^^{zg{yi8aI#Z;frYsq{TEiIJ=5Qc61G?7X%aTE06R@C*szs;kC)42p=q8s-)}yX z<$+uyndU#^)1!r2AHMg8_91hew}AdoIR?vS(Tw3cU0HY&BDz;V0WrT)JgKsn~!`C@nLO zn&zKu6AC$d3#`-S#z${dZQD?k;zAn=K0Gr5N*htI4do1405H@Qo(-?44Fw>Y&WMty z+aNxGXa(T+Vr(cZJCzOP%Q-d_PM&N-IfHtkpDSlW*>fHn${Dm2@O%YqD5uV8LxGoC z7}vRMC_yQ0L&+Fg(H>Tu#)fhRtpd2Opq&*(&u>Jr?5y_My%Khm!_(SPu&+Q1T10g3 z7w0Q5zmP|ims>mnby25@e(8KX!Y^4oqEP2!$Qd4iy3jm;t@b>6z6_Ue=2}`C+p(q9 z*3z}i5~NaFNk?18)+sb?m{)iKx_m_&fh7QVT43e#@m{>zuzV`*UjSGI$EjHR7ftYP zRa5=+ruCRtga?vsYXhvsDjNM6duuSF!A&df7I3o?@O|P+OLRLI=#TM|2x33cx8(X`M5K&AMhjX6a9>G(jI~qx zV|X5z?2los-%Iz$&;)(9#V64F1^yUvY~)i5Tbn#so}oXaQjWcrKV@!}_HXA_VOR3} z#Z}8!VdBs9N2r3-jj(c1DcLNFj!$k#)@$xe;T4kWg+7BuNM7#?%t(|FJ*52cbuP%{wtzMd`X^naits?Q)Meogg7g+nicV3<$L@PJ!aQ> zIbXgHvXGDVl{5=+2CB`#g7?@Hvv#CbYD zpnm6gmEv$q9{~P<6#8);quWTB^)q9HVyqXSx-Lc~$^J->mX<2zS z5!Fc#P(O4aO0+$S(=Nn!u)eU@QB0G}m-L?ujby&0F`puJW4u<5#mRavGwq{aidF}h zld`Cs{ktN{Cz(JSFSE3ZW)=Psw3%qWqOzs=lB0B=elpZP!|9yS5lXECxNtuHA3{I% zc4jSHTDCslh)fK#S-WfwiPxa&W5{=GLGL}d`444LT72!Ule<=2Et5~8d{5sHy8ZDZ zM;>4GSdFEmQbLM*#{RV%_wC!b`b(8=_$ZPuYW4z}{tpeg>DvGRc-pO#%Wl&^6o!At z?mg;d(*>YtBsQ%?9VaE~B}k}=6sc7!g7GfBpYsbg2R8&O_>hhfDN@C+Irr{OLI_Fek zIdhz4T2;;#N_cCyfJJ;XY~wM$8FrZOf#E5`Oh8OV2eq*>u{%m*|MW<$X1nuu|UYa|HJ$AH=e*3iW|-&J_1@F~!thd_(I z5j}iJTJjxTS`Rsza7lXzImEaar^X&>v>+i)i`d}$ALC}2KrZF&vi{w z`iyr>3Tkt|&^&1!&@*GrbN|uJ4i{T`b;$S^JTulenoY619scwS2xqf&G zTJ!dh{AVnLq)q805~d;O<=@$IIE#W)m1ZIjC4y8)ktMRHhBCPjvHyEV;jhQxS8rgS zMV9(%+EkWnjA_CCNJgTqvLxo@h_;@-$VlfB>C6-}%)4$}DZxB*N;N4RHqXsBVyNpV z?x`daGU^Exsy=HdQW>R!T_5{!MI9oDq8<&1A&vwZF$$xx48~wtEQjT>0#?LISQ)EeRjh{9 zu?E(}SgeJ$u@2V7dRQMDU_)$#jj;(f#b($Xp?U;xTOhO8iF$Gi6iD{UQF6@9A=*EuN2|bvJS?I;i=)*3UjXBsAyJ2_ifw|Zd zHZH~?I28BbG<=4`a2$@txwsOS;3&L-gK;u`z>hc^g&cjvs89(6{T!d@z z1Rle+n1?h@Kn732!Q*%m&*5o2gJuIK5|%q zLFD1$0u-Q&eKlZ}`I1pdq8{A2QHLPVFBaGrN z{LOkcFvd8R5=^j>qezbCG91HYxg3}03S5yZab>Q;Rq-o+<7!-;Yj91Du_DJ z$Mv}ZH{?d#n492tyvj|v88_!RZowut)1<{DTR5JroWM4GfDgGPx8l~^hTC#GZqIg3 zWCtfP#mSt)skk0Du#?j`on734GuX`?xf6Tv2mZuI_?R;}i@n^LecXk!IfuJ)H}1|o zIG20U<~*jEp~HUM#ViMy<9vLAPq_e>G0#D|EO3Z>(PNP%mbnnG;eEV`x9~RJ!|QmL z!#JCZxR^`0H}~Pb+>iV703OJLcrXv)p*)O-^9UZvqj)rr;jui9$MXc9$dh<7PvNON zji>Vrp2@R#HqYU?Jdfw|0$#|Acrh>GrM!%n^9o+ct9Uj4!)tggujBQ+fj9Cd-ppHg zD{tfNyn}c0F5b<1crWkc{d@p7;wIdR+wd@M#w~aR2jM{+j;rw~?!|q$n-B6KKFmk> zC?DhFe1cE%DL&0-_$;5}^L&B-<%@iYFY^_?ieqp-U*qe1gKzRJzRh>|F5lz({D2?w zBYw70>OK}p;!R0s}r{GlFf&1|QKj#yq_iEh%xIb9 z_qR*YlmHz8rZmt|5=&JZITagCNeoA8B&9n#O5#&Op|YV+Q>wkP6`PonE({LZQe;C# zd}^rAK-Jeyoo9Qs1O6e>S<2-zP9&!!+9@l^$x1r)N^*MZIwf;XtI;{l$jy&W3o+(b zePVhz)q;N&S{;Q$5}17y?4Ta&}#Bi_1sv z5*FckSEtV$1Gk&kZ{GmGYMzyr2*jaeZ9IGzJ2j`)zphQQPP%?#INPxXLQWCl_IcUWcXV+8B#fp7mY#}$OCD=39In&XD55+>$CqC$iNSgVX-?nzmDk9T24N_J& znE4yFKu5V|u0Vd%U;Am)*;y^zJW=rrWffFeSdQpv&y%%9n3C&~>ryn)o{>rzicv~S zc8J?pKsrD5r?P9s-Gl?4Fz5dbWDooU5VkRZRDSYL>DNrV`U7pD{|Cs&3Q&9czDpDK z{}|hIgeT*G*3SZD`;&Q|>)*-YK!vHB9J9I=R>3{;ru)7jIShHidKE@AKBKxFsuv{7 zdc6Z2ehR7XOnuHhf|w8J6D@k03?f=h>JGj(_V4#o9q*kXpS)Urw(V}f2F{&cIT`x2 z0=*{$`X6s!()j4bO4ShPQ-Z%xNI6!WV+Y5vXH!?9aAx0!?4EhgF2R=TfO~PMXo{zs zMKO$69rJBam=u~uRZ8;n&f^e!}5UA0-oDX|Q5X{q-R|gaa z1a;dEuRg0?7*w}V@y33_^{Yq||F`Zm@l=m7@>?lf*=+{3vx|Hd~eE)v=W{CU> z5aanHy>;A(QK10p3NvKX98KP}_=(Yo-;23l>q2HK#{Uvjo~cQ@c2LPWGzytL*Q2RGLO9EsJ1|HJvOo`-V}QV8-VYN;>kCl2U0 zZkry71}>nCBv-i>>)F8mqO)GolpX)4xDZuMdQ z>;{u{n+iuxhvFo)(Mr>91MpE2@Faa~gkK(s3M)r$wO=!Y0~SPoxq;~9Q{6;dwPm;X z$8oD{A`am3?e3ej1JOT*MaYnV_r!kIBD_9UX&2aiS@UF}DHC+fn*E-+Uz2(m z|JsZqeq3L0**oiW9WSRuJPhXHgS{5`0%baugd1;Y4)}tN8297VU<-r6>JJA+C8cD& zhZz{yT4albP=fRFQ{NRx!=Tbt#8A%IX=x}aGN>5%ex-IRjAC0U-4gA|?oeJSo5dQI zCH7Ti9fu{64;oWZ8{W>y5X#JiFDQE_l{_ z%V#qjQKWx=NPz!6{CSy4VlRe;%wJuWSBmvmz_XWh;Nzs}4}=Zp(I+$Wwa>LXiSwoe zP<%sjUg{;E)%~jD9q$A}Gti_fhZ3~5<_M))nw7;{5kcE>EdvL~^_8b;IoEysUoF;; z#%i+5cn|+TJGA>gaIT=&3J`pcWt~Fv;F2-MDQ|5DqZateZ61v6y|(F(;HM$g`dE1f z4Ex%M4iy>%9~l`54?jQ~?Z97r{5#HSK^4v$IiZ|(i6%|IbXuwhp*TyI(N1AG3l%}YRz4{ zFP2<+xhqUNy^cO7N-AsvGr2$%LzZWLCk9C`AwU5F1ce2*^cEm{~=@ z2zezC^$EqmJ=%aOcPn-&k&-dB?JEfkgK;v7Jd{tRGE|*Ab6#e!7c%Nf0^G6h%d2f6 z3?-m#zoL}c+rp!iKHuR2fk}dY}*k67HdD$Gq>dmG04Vv>}bO5n$SU#L7&$ z%2@h}EW3*MlCJ}lj04m;155)_p^;ds`Z&o<-J+DR~BTbsCohqpc%p zSEyc(E@++!9wJw4zIfr~ZPpM4Hn!c?@ZfuV$>}fCj5%ohR@@_Wc``!EzFGiJ-Xj32 z4|rgIAcc`Gz9U^@c;gzl%cILaP+fz$rn4TKu=35dZ@Ddgn({0(Qc%T2T#~J#p!9W3 zZ++{$K4QW18*Ni0UC4rGDSkPF=A!$A$6x9})c9Ns0qkoHy)Ro#QHB_=(&y!;@OIKemoY*+ zjem)wCL?PTVty+{#tbXqQ!3B5G^SIJIm{{^MNzbK&r?SiOkyd)EQF6$LR(qZ)D(K_ z#{1wVFf3j&Owi)4mCjl;7G0Q5ba}Gp7g?Z7LCeK5N>&?0r%){r9}-zWQjwyGKgR{J z#q(Dg*|ML0Rhp>JEW^jDM*=_NxW=$rF<=Im9Dpi)C6XG+!Yk}1=+YOJR1uT*> zWpUV~aH1`AnLgmp%f~U;khV#ZTlP#Belbl)@>yUcjN-E?A4NVlD*P)TK1q{SrR+hTV5>u3 zDf5aTcOMlv5KrAge$w4_Ks@GU3>_DTIuxkz)25;!Vu$cA791O?l;bZ{l0(V`C?jCe zT|wU~1@RjJ@HEyTvYer)vt6B$DzcDzbRlZBmDu`@Ks7EkjJjYsQTWcTSrY>VtWT*&20A3V(1_zTZ@GmsE`rhuc7E6!)>P|Iai#w`ne)+GsP_aTr&tP?!<@QLKA{BTCnG2f)4x9 z-k0F*r1|&FE!Y_KdNdX+%ce02%MO_i{kUnf4>=**K_cW!hMvfOpmR5iy&WOJ@ARci z@XtZUae1UxvBG`hHy@-D3f$YGAhSxzk09c;!U*-u!oy>IqDjBzz3yLYVrGmL*+HFL zv8G%lqoJ<63)oXhiCsEJ>U@7Qqdw+mt?}hIab|w!teFem;I(A+N2f%8@Qm@7h3;+R zjA-)s;4IJ{y4xD1Bgo(sdx=CPlo1CTsgKxCj|AubOg&~zwJs|!f(4HYG z(c5N6HjXlS_;6wq>cqVg^kpl~xMdgWwvIRli6OHdqaXvdi>0P%L}#1Ka4qb34rb!D z*7PK zOGizpx+!_wTM#-sN!&e1V#x_Q0r2kkqQ|F~GmGyP5EzA=w&*S_GV68=kKzoQ%p3)k zU>$4aaA02WnT0OmrY;k4D9#JT(X57nat6#y@#4(B&&)nm&7_q?(t~VE4nBfVx%)y3nAD^k@`0|8p?^aSurk@i zidKAcy4D2c72|#M%$pn606NW?f-L9gJK-ph;X4-mdsL%&;tK{W0@9ceXD4a5;5f${7mNu-@~a({Nu~>abTl$+;+Q zMeqGm)6}(R^y9n;OImVZ8>h)df4MEpaDIGCP5bTnmeo^W6W$C1D9Pz`*`(;S{B9<% zb>8NIqw-2yVTfR-9Xw$Cv(xC>GjRj9J*~hw{qp7<(%;^OkJJ$|VRx~h&1(?l2^9%Z zFe0RPH1kz1F#CG)r?b*WcwYbJ?#{?&P-^VTU+p!_QhyaYDO+R1+hGa28RB;8KNM;& zMyvZAi8X$?XEH1CidJWNkgKC&zLqDPXJvPml}?*jgOp43j17qvxOL%>^P*93oukN; zR{d$1tI(6_q3{lP*Ifi$<)+jVL0dU7zfSC1V1cXE2RQY+-Ke0=84rYZ+$pxal-*{? zR9eyR1r6;SsJd}K@C&B;e3(g{MOUSdGJcVP$n(XtJlo)PpQ)`oWH>a%gk{Kjqq|Xk zc#4!6^fA#&_>z0j9~4WpFm5C!rSasjC|bL=`P zi!^(u8ilD*32FbR+|!}F16!Lo>ebaN&?<28gGo^^ijlEJm1d&ncC-06A&C9iX~E1N#4I&smjTB>ds51tR%2dl&mr3I#U zpUl#+|JFOU9(tOQ=lbz?{#-m`W`>dln%|B60bC}TfDNl)4Vjxj4|!V*# z6x=XrTxn9ljI4?cGA$=QXKyTZP%CEt;`f3Xi-~8tpnCzG!me5NZx`i78Mc`>94cPS zV7q32?BsSx7jANA@vYcyl$=k9-@=j~@XzK47eOu1_agnz1f5W+#L#jHkfk!ga};7H zNdW_e(0)F3u3`%ilH#NIh?A za!J`|s4<&4OtpNZVBM%W>#%Vakk)ipL{1Uq-4MW&V-(U1{cByYcM1K18JVDDkm{Fd zZXWL;ufB5Crw@I<{E^!+Xghw$Nk|)16MHyGIK5UF98Z%%%8E2U15p%nqS<`wLCfO~ zksV~1990_;tnL#;Jo7%>!<5*ssK;p!Sa53yfqwg8j#Yo*o6bv3Lm2jBICbH?`!$pcaPUX z(Ap23{cA9*DbjJ0$R@5GJCVTzBoDbb?r9`-_`@XtIq_(mUsMRT%Lt<4J_vxQw`J|0 zy4V5DL{pRwCQVk|hx7~lT3cQeaYFlv3HSvqn*e$0=RGq`$|9K%T2p=la z`aj3{uy?(YxCb0NNpMDeti+DruB*Rj9Ij$<1Vjf+lLBT0M-7^pLu=^di+79tit)?d z7-PltTPmchp>|ZlK6>~M=VxCSa=~dcRZj6@d}WI#8q>lpN2Zl6k!EF2gfkMe*2FeN zHg{O+J<^ZaT>Q4(HCEm9DBg4;Oqkx9koEjKnlRm9$V#>0GlrcsSXyIjVV%2^^oaIDMFcd#!Re~AnGd?5`2JhYVTKPV)u- zjOHd1prKK99u3hQ*PV3$!--_Fvhu^7E)EWvB1l5O=G%(&Ppi6+3nByp9{jvSaPIJ*2c@#EowE&z$Nu?Ws4UjhLJA2p=64{g%l+;Z8yah_ z;|}z@4F&yUA%A1gk3UmW0sNxd&t6{fHy=|7a-}Km(Msz0rDv~#9g)n_q@9eSga(pB zzeh{2(=?iOAUhY;h!#TM^Pd%j8Z$UQ6HNnI5z zA{A7>o9>R4(|G0hGPSQ}%Px(H|6)MD9d|ehgXxkbd4?_^5wqA@b3q%$e@5^tSPJhl zbV^@8z-3n?PRW<3Z^cqdQh#N=SVQEg^hN#G{ zp9CC|2Eg{NleaIv@Da79nT#%YXjTO1L3|=Vfg7lYe^)a_Knpi~S)hw`ok+pXLcTWi z0JR7BCnp^GeKL|0rh6UNuKk!65Ka$o*Pk`(?s?%0Ib4+4ZqzDv=1vf5TikiR7D6>n zw5Xz{xYpK3yb4tab2t>uD2F_H6g$}evO@yd%Wl#07CN3;?UweYQm-D;Who~d(l@R1dVTf#| z^OpbE;&VE@9rZpJg%Nx3!2V;2j z3ax!a=flMA`4`c)yaWl@ZGVR-=I+)f_}SPh%X~)=#j7DzPW)H&A^=%^`jxa2d&?r$ zhCdm4_Rt(F{^=C;H2m0({9ZDY>lZWnDETjD%*SY7I;e)FcV1yq;sT|}4&oBtLBqh3 zhsd5XH7_uwA?p@-S;>3H_T=EA+=78Zc-yr{lQSX za>*JpB=SGnqw&WsOdVgzfZ@p=W<+vExb?*ciZ|5p&M|GHKSx}l4@hGN2EAQr(D!C> z7l^&oM23o=NCY+$yg#WAOG&xMg1?Z2!n|U*9Y{@l8He%_39SIbTlk>>Ek}xHc3xGx zVwu+>TD(0(2^wo&f!Y#Xnzy=>u~3$ZLH0sGZ3YpH%teDpmi2`Q)9q_{Y?eM5SxqfL z*;^y({@3?wf^OX!xenmh8? z9xKv9)iQw;yx&1uNAiEwP7^~y*dHa zi)wxu)xk>u4vSazBR8gl@w1FIdRbtbRA=4TzvM_Zbvtm{w9tr3f6!@E7E^Wx*3Xp> z0F2jj=8hZCX)wS{6dwxWrYe1`=nyQQF-I??;?p*%W@L-+pGGF1-QI5Kqcd;`^uTsJ zoPuA9jd3&t)rp?05|2B}7y>3wyCz7UwZ|yvk4(?h5m)7hpIN~??ex};uR>Pp^skv6 z#~tDyuWwKFQVSKvg_B~&Sl>cXx%#zOvvs0gj&zDXOTzS+fH6Hboy_^x-y6VVg*Wuj zuHGSFH`Mkc&VGy_A5m05oc`zW;rJRK<0T}KKme#FcDN(?> zGr>L@CF^VAM;eAfEOA9m3KKP8h{b}WvlfqWX;lZJcXDCeD&zJ=0ly8hwq$)DmdGOc z1LpWLuTkxTCbwFrG*9txW0?143dc~HCWCpupF%K_XjgbUPNs5z#egprnM2i?uW_@^ zn5*~Ujmjt%ZQQBnu=DRs`lfrTn$LcG&*b2y8WNERLG9;rZo9YKTB|%_XuO^|+{0zJ zEO;izUc;wW=#N3fXR_ue#2G2uGsaDtTmq~NgR*^M#p;R{(PuYSA%qd# z{>q;L94pbK^a;p1zSt;f3EvFR@)HP6%lzuP=?~w@Nx}k4r+mo>jL*1Q8KQRlfqw{f zYn{&LxeSUS-=jkYMX5lM2~~VhqZaw4HM@DSfkKSx9~C|)D0^WOM|Fz%>&xaBt#S1a zVG zo%p7mHLVdvAzFW|(ul(HcyN1+XoMNztslewlTkmo=(&rY+K0}jq>KJQVZkgJ!_bHx z8XWv#A3QSd^V)wdQrdu4d|_u%4d1J(@T<~CFno^0uiN3bS}ukI%MRhI^E<)FcCErq zlLJ~bD{hQVaMiJ$CZ;7--=SSyU0wrKXF9AJ#(j~u(Y;%Lek2v#Hi?*N_ruYivnM`4 z2?M!A_{HIoJID1WZy!mA7T$h-*{fBpd02sQD)(!at?DH4QMH(ZJpKU>g||gn5&vP& z83WjoKd_OMWAe_1H=tf4OGMy)~0R8mXG47Y`3){MgiAH@iVys((3iDcB=ar=D6~j(2bxlA0*m*gH z1jG-2e$kT6@5imS71mza*~?3XpM{p_qC69*W1`PP|@WA*x&B};VA)6#&)8e^o&+-Ja628)_6=2{A> z91`3z7pSi^xkVe$x7I&iYh8D63aATi8IV!T&$h3yk6XfN=L}rdQsHU9%+uBG%;8V` z1yI4FBInz!9=b_ebJVy(G?rsKWfFX{%ccG4-`OW~VPqtpL%%@owXr z?klm)rm9FQOc}7|rK!of_E5eb!8{()SOW)7^EV-CJe5gJG|2y*FSv3$udv$4FVXBI z<3)A<+L9~Jc#a0g0Lud7WCsL4693?On+B1qiGt?#WmEM1E7^N=UD4SwKvLq54KIrL zoUkFf^(yP|3%T);h|KAB^A$Z>xMU@$uqz~E_KiR)quN+-4g_B8D`Rp)n|PC)c=9If z5|tyORi75&7430KyvW<}mzinA)Z2vTb9(6ruRB<&(@qhPxXYY7Woh`PX8j3|Uk3b; z907LV#R){A9J%=hn2F2Z0NB|+fRb;i5$PB+{E$7Zk04;X{X~Z8f}YbnS`o2~%&5S5BKE3fH<(zVp#);~9F5D~`w)xn+?c>aM_7T~L5NYH_% z_QvALTU6Hk|^%d(Mxi?^s($i|gVd4YfoP;=K`1tcz z!pDMk}?lx)9J>&;OOXd#cO8IP4i$<{#%qm0g^*R(4asx02>>sxl^)$JQmz($^lM z?;9rfnq^>2%%Q}x^Kb!E5i;Opq4VW515%te-p4$kP>{aqj>jaxG>ckN!F%u#8FmQp`bent!KXD?B=j`T~f}0$c2K z=#UEG7+T89^$^-_1FC4B>_jV`KFC!Y@80AkYKz{WEE@S<&S@4q{?hD|7H*$SV>(7~ zRuSQ~c)m9P(lO{1>w*}|il%y9re^cAYY;j-^_O+B?lfix%5hq6U0XJ)L>3zep6CN< zaD^w~+J%cByq@?|rfGtlQAHk~VCx^!Z=yk~qzs?bf7A{BYId>vd$cKCNVU8PMUtBH zeJz<_*q*8Xn4YFAB^gV=e?{qjsK}$dp60zLR_-&Rja#zGxHjyF@%W`zI~mck&eV(0 z?TeEcr}*gBUo3GNC2R8s^VpKbf(dTTKu9}x_;ns6#IX`M0U~O4=>0pXW}o)4IBu}P z0HF~0?EvIZ0Jp`&IQZVwS8^oM)4zg@pD*+|m%q8aN}{0jJb#pfBETI4M!y2$RJN^& zUr;sYZK7PNbIE^}y=*%$mFA!P;TZqTKGo$joYs2#%+*PwN*0({Kd#tF_~P9AwlTuu z^Ob4JgxMeDyL3yw_gYGwF@t-urf{_S7Iz8l2+E~Yw8za=#@dq8X^H-%=~a0x^&6T- zA0LYiMyeR zo~0kZ#z+MutgJuP1_VqgyyO4*{@De}w@Z($T_W?rdz4Eu+MlcUQ?6>UXJO0wkbdA$#V^(e*u3-^`8jvzYDyD0{3+atsx|441AVZ6`X>8*Hsf!) zD+!5n$5RxpdsI`*z-!~fMwrPR^Z2z%w2yJ9nM;u`#`(xpx*I0ZrLSZzXhfD27lVQJN)(^^c1)39k9=px9uIb&trr+#M}_$aQbfH zn)P`r>0k4~u(xf(1)VKm%NSR1(=Fo$7dLQ29k9Rh*zi+Wdp2!E`+552h3BGc?7c$3 zP_d^HH?B!7J#BI^In%;UWgxRFgoWNXP8DXn zr>{YtRcq+><6|SFGJmAEsSI!}c8VWb9AX1uwFsfae86@`@SdHW;D2BI;@E0$OIYW8 zn9T6xO#z7Cot!T~bjD z+^l2xRNFM1;a+o|ySbVbE9ZREY*{YkT41TZQ2T74c!_dB#0(L;ho9)r78}0dj7mNU z5Y3cInA^XjWGbO*j@6pe(^Jm2A@G;YpCf+BLRCX7Rf9cLWz$q)D!4~j-+yJHVjFOA zq+OPmzP(YB$aOV^dt2^hOO=7E`%{JNa{6cDgMZf)=Z{ChhIxBDvO$P+S8z)y{0Ca` z_dTbNUh?}c500TfW_3^1-g=B*r(NA}) z$F*KY-Euu^J!^fCZvJoP-@?|0c7~>g=Kg;=`#Tq3fF2PZZ65jeLHCjOGWJal-lgtjM{p~AjilH!1`pXzQev_iozKy<_ zzLmbMzP-Nly2ZNH>&!!?W5R{~x|n`6(_*^mcoX(Iy+g|bYu6^RQ$pkLI_AaZMbE|i zIrPQys?3VadZ1(Pyzo#!!n@cj@~gx{$HVhO`9;k6^m@j+_UiKLcB6Acul?r=a2~k) zQTv|w-u|8{G@j?o(xSb3Mr0K+kG>n}O39o2^jZ8O04oqK;N%wTv2m2S(?0H+Z(e0y zX5MO^w~sIkHT)yKG#)#iE1rjtfKZ?4O2CuXQ{aIQl{cO@z`caejW8^pJp%VKdhx_l zntwkhH)k`aC&xb*I!7XBS-{zIYme|M=Rm=8>ni6m=g@EAx&8J#NF0O#Vh72A^gsX* zHHaG&F{PaAn`111#;=}JIfZQfPX$7Ex5gu^_eIg|3r+(x4g+Hh@nIkF<1X3|qikZ&6%}F)nmL~EGkgi6kN_+WiQ`EJ ziRZ|#oxoqqre0w?w)sbf_HTA$+bYY2-fQP)#sqa&$o*$3%YTAzU&QMq#zuV%<~gA! zZrf3*%yHd`R^^r{#3zcs9^OT|z|=aYexH`2MJw%!6|{4DeqLZZ{il#(+u~LX%*C0J zM>#51tz#|`l+69DEPnia0?SM0{^4WTJGjilIZN}`XLbYp)#Dg4WZG^)WH-)dHr6d< zl6@`ADudwM5IWoK5; z=LQI?E9AQ4-+7PWo=3Su?U6S>@n1v5%b#hp@lTAO3vrKSM>htkkdZsw8+R&S_nrt# zEy^NQz@F$~%xqAu&23Ai$`+*+WgkmB$6T7hJF4{ND|HQ`m{DG!7Z1|D xMCX%!ylgZaeZu%cR73w~_Q%}B#KZz3xLSUJPxLMf>IjJyA3~rWW(ES{zW{eUz8nAm literal 0 HcmV?d00001 diff --git a/font/Roboto-Light.ttf b/font/Roboto-Light.ttf new file mode 100644 index 0000000000000000000000000000000000000000..e7307e72c5e7bced5d36c776d0986bf71b605f15 GIT binary patch literal 167000 zcmbrn2Y3`!)HgnNW_C9{yXn+a5=tPzk_3>Zf*>s*y>|$ZPy+-LdWS&hz4z{B2nYg# zg(5Y8CX|J^$~*%{=0zwdkg&y(!#Y?(Rt+;h+Qopwhcf*=IpK^CH# zH*NM-;Tw+Wf*7`15XAn?+og55nAtc*5F?y|(Cbq34xQe3ciOB8f;j%XAcQVX>yT7? zRmFlOf+%;zS^5sh>^JJ{@9i@M(Yr_x%+m*q&x%SqE^idX;MVxO>)=sCGN-(V*(ZqB zj)Fi=4Cyy^l;9-<<9d1c+;2$6#K9*@X36+0Q6LXGrw`1`Zr=BuQ8@omK`=$659&A2 z+3osJfjs^c-%m-$8$Nq{R^jtHIF3ut%*yU`{bXZ&4thiCW{e!rFZB7!EP?#6T@ZXK zX7fNNkE`k_j z1x@t@6Fw89$|i>(31-1t@Dl=sk&2(yW(%t4@UuFEBKBzJkKX*zzesQh&AK|oq;dsf z#YWxufq(-cpiwtP76trLF{w`$^~sArIehLae(D?k>Kmff5pcI=t>|c5v}7a1Mx@A4Gnv#D=BTm1+ z&_FQ<1TxCajPh5|=?6OfKxcv3 zMKx;$OR>TBaQ5(LNIR+DuOaQ_ex@vXj-1J34>(6B{Ja2kmlGm|B`T?$I*p}PkZRQq zII}@FXLeb9QkF5hf}e=N<~k}QMHC2v+@Z2r6=T zSsfw#%V9+h!Rmj-th<^lX~}ZAyr>1O0_m zNgI$xUP~L8_QT&DO6cP@1Mia8I?yv@EgekO_9DmnlFgL=_odBy;l01fF@XqUOKM6k z^FpDTkfTV6)iD&o;giIN=#3$IYlAniB?_NJg}6BpgM%2x2{Fc-O}n?XAeeagAX*E` z=niA7j>sZMjJ2SO=S_^XPJBXqd~%(X)MR^TSXgLmo%q-ob7)AIjDrxfB{Vh}AKOF1 z?6p&*S&NU{KXQKmV8=%jwk=$=X({QmDedy1L7&~tKC*4mAu?p-!p6-%S-0(Cz=aFe zpU-VLwjwus&VDxaG`0%9v_h;4@Kn4lN_n) z=nYFN4r7dCQ;*f+Qv?PTSY^Wy!*z#oR!0TQXObh%T2NK@eg$hmEzcpulgl}V7aJ3w z9L~pD)1$ zdbJYIwUSp)-?wjW^QrrG&$sl-p4j*8UK6tiI&VzhyJvoLJj^xq&Ysx+?Y`s25BRyJ zJfSEPLPep9kPL~ao|4L@w7S(1Uc?@w-0Rs4ewa1U2*yFMwk{%Y5W%Fu z-yDpeDxvCAMgCL}>6vsTPW>d2KS5a(*gYRYBDgr%nIf=QNDAiO5+0ji=DdQK1lLYU zB?%TLB2q|rYD$XAII_G;+L1L2_V3(G7WWuA_1xmgX;;SCk{0pBC1d zGo#5n8QuG5No9xbXun3;lDl|8F0HZbgMaq@ytHlCo64}0<6W<}X-;kh#%^9YW0Rct z?#_WVMzm}@bTX8+FhPDpxDTykg4{S{x6a|JCMel{-`3qU*ff@2C6!m=yLq&#m}%}S z1PNBfD>#I&E&L=9dM>z5P*7@kfGCEB1O?8Qele{=|6P6i?&(`$@)x7ThIi>uda!Mqw@5sh^TSm#jU+W|(}o_TFhoSC zAWo86LMsLcQL0XqF{v`2?xHddAt}P)jc4zq2wbaDstFpqQn)GDB1s8kC@qX4t>ttt z(lnYL`nJ;-mpVy5XAH_AW9Wh@gT~UY8k1V|T0?e~R>D%bmV89;h4#hCn3aR-Fnff+ zA#oiT8f}ZUK?B5@n+JU=*!L?vEnZ*|{3fCnQifAy~j`hgVX8?6EB1 zbplr_Av2oykjBv`b>5w^ZW4e55o(F;M5}lha*tn$Nd?4nZHx?;6s}7+$`uQ^Vu7d^i2AFgvZc$6*OKb(YSd^~y+v%TT5mCnMTCkaGsK77N(SQ$ zMm1BjR(LG2FUS=8W=t}3`<7kutMoV4qCfc4Lh&hIfvrRztHb7AgFd*tkG2LS97t>p z`dABWo?*Ze7DRXsF)FJV8zV|KYmmKmP_U&Aj?5NY2s=uZ{=R?r){oMIJGUOl+1qw( z-7fCgvUvv)`_N1DBT|o4BPL=e4d_++rTGOBUOc1s|E2#B@n1GJ1t$53=`iq1eU&>V zP-N1`^FR~9#D*a!6-XYk6j5UD0))icOv#n(;*o5cMV^G1qLz(1igOQzl`aELlo8q~ z-et>iS!d|ShD+F9zJz|fKm>|xV_?W(vle*kt{BGOWw0liXvzatCnbez9J7T)%VnIS z$e+1=w=P$60M%5ytN?C^C2b~R?@8y^<(0u6t+e)q)cVTl4ZF=*0$fcJnvBUm{ zoA>O}^xG4%KVH4=!0{v5Qkl1>bjaJdW6tQ~gKwP}T96ojZ0hj`>Bn;yytir9(zOhU zy@~m01-bTt#%QU|z5zRM^}&bjEvv&W5IyuVU`rTAQoPK*Y*haITWlDiv3%)6>Y{BH zDgFEJvt1p;tutIB#Inic<{-M5jA|-5ot2~}J(&#dhV=>q9TkM3O4*8$IG0Kc0o#ct zC;m9_XDfwR4KipU{){$Dl4A2`%ysx%3qo{MTC4?SJ%>sK{&gsq;Ap@xh)s}c$P?^g z45X;$$nsTl@s=y!K5oOAhYzlP_^WNKZFm0qP0HF%liv~_if2dCkai`)%^&IKr&g>v z`~Kz&)fzE=PRBUBOvi;VAyycqNO6@IIl#2x?v>A1lQy0}j3+=pAFdZLCFVn{zdeFbWSoB~DNqUk^=8yxjYweu$!5=>P zX3zd2%uyT2iVx<@V+Dr6Y9$IYl(_0O7~g=#;4|T|qMLgmU|5L8JuatAG^=_u`|PNS6x$W>c(4BK)@6BQ=aiH|Z{8CUU2g}LnY z_cJplucp^-xRy@InEDHu`skgH>GjheY+83B{=n?Hd-lwmvrn!!`cTzsrzao3^VNym zkE>NZH2Uxtx11~HzPFz~+_7D1Hfh(E8B^D9U@|nQEQSCPKDQs}LQVYL62t0j9Yj zEr429lMhGcewaS;-1IxosDthbBUNEitaMNIsmjMTEIRU@xV86?srP3u{&fZ<>BkH@ zo@S-XBxYB8z7~Sl1u7JU6=bXQrL+rDXuv;l(4+#vv&bA!AhPEWJY$#!|I(Mv52ZKQztnJ; zt9TfrIal~e`b=&Jnro}Xa&H908kV@p!dy>>DalAFVRVvY=}lKXsg3iHxQry_(AoXy ztXxpqvE(1A733lUa?wihR^`H*c`WLM4E9QM?F!0!5Dvp>z%DRTI(v%IG?LvqFSgW( znnU5rs|FQjhh&Ql&en^ZH6eSMI6?es_x^MJ=Z&6IEg`;o_4tHpWcm2TpS17SXj1;X zaof*)wm;+KG*CZN1plEbdzDb3b+JXj{z z7CSRMij_aXCbN}WU+0d)yRlmfDu^DRt{T>|?hwOm>^a161Av_36PP=a+DOD0!zD9Y z(WyZ~bc`&DK{7iCXUif^3=Rs4v)7h`2r~_-!EDWo=C4fs4~*&eZb-(5dGmt;C!ES` zlhwxVlh!3?jdw8JNw?gf2R@D1KpNiZsWgbJcPae^U46*U?`tykuM*OKZ}D;8?k$gX zt^Q_@k=t5}WLfc?!973z4I$ww_=v}g!rX#Y{P9#zp1i}PF z<>c|8CIH$iN*JYx(J_o3_}(@|cvt{Il~sdXFNK&9gaf{aBqb?=^ePDStU1LiFoK;3 zmeT5ZmjMBtUs|)4-4@N`Pj1~!aLDa>rDzpZMIR*xAO4Xue9F3izoE;?=#9q;*3(&} z-P%3x70}Bj$44hd?}{t`@sy8lizi(Bx8%>+Fhp7~D{mIo^$ggkQ&@fFg*OzbLPZdx zj>HgMDaG=B)`B1%ozTI$ND6prmLYy(LUNrTtTH(~7JL$Ia44DR$*0QDUw+>D?_4ss zWamQ?7?xXut~>kahfhBGG@P9M@!3{V<q=j_y5&f-oqvhD#NDp0?0Oe)y(N&F8<3kA zp^xGl8&@hf1}m<~jS?tvD-I+5lKAKWY>cnOCN?I?DEyU}P{l#0V8WS9#Y0;$q2j6w z(IAGtZZx5f_w1w3ml9H&h~JT*vN`3h?z;5*=o|AkjcFN2CyL^M9)q`&#E*}XB+}=H z&o=L%-_U>d?<~oy9i2$B)7>0=%xChQSInfrx7AiQ*Egke>}}@D+#7T3jXBmi6x@$w z9K}Mm+2zN%&eJ(LQj=WF;9Dk#YmB%E*S`UAYK!Yj@D})q4e>thlKfP5(l#dtU&XJQ z!^?XgF9U^|iiyYP)gK!WIrn^_67>rLl4ZB4kv=twnJT@O`}niZALQ&@wctGj5S--d z^KZ^=+;nuKCg(qK?zC3C{g`9&3NhpH2Mxk;@-?N**~gT!zy(k-Yg*XCxdKH@VnyL{ zPLB9+ool1GWTklHg6nS(@mR9CvYofw^CN)*6C!~g#=m?uy;Rq%Rf&nPozxMpIuGZj zW3i~`G28JfK4!AUG#Z=#9kTP*yEi40+ECXl^nInLv`xzbzj%HI^nm>k8?^0&~ildzf@HV=gC zwNPa4sX{S1g54U{u&PVBg5|7_t3#3LmNVpwsn!^#SA*iBWW*GCqz6!(#dd7+uL~&M zQ$o)!BqC`?NS>7Q`_+q2pIo{2P@MM}E!s{pNy^G|#J*!Iz4eKeSV&cRlRjHbUGy3P zKfZ3euoT?#L1ryY@d*iK>H`?T&rLOFhsG$)kyCsn?o1>7q1VDNWjfUa;=wSe8|Fe+ zQmt)5G_mK9C*Ko`>j_;%#vM9wd_SEbws*Z}a{PSp^0(=GSFbuGGVC%DIObFpJ`X)##{hi}Zla{LyWVC3n~%jOdAPV~!n zI_;#cMFgzfwU>wsbAB(n@bvMOqF=>%Mf76YPpt_F-%hHZ*~ZwronARljO?un_F^1| zVcU-Ll@cxtP{Jb0F#d*F9VRz_Uuj~vAZj5E#md(?@U5QNiZ5R#%<_d9hRBT$XSxOc zKXId+Xz>>9mP>B`Ld@IelZ+Y8f62skCq6BpJH+0uttQ9)i+NvEajozd&pGne?!_wV z3Te&&{}Y74ig)EIrK`vg0;+)JT!CpMu>wmHDFNO*oCz4Nk5(dAq>EB0G5otoqZL## z5MC!fE&;wK=Hx%AB4)g__?Gl!FKv*%oCJTdEtB|=D2GifZTT4r3<@|VR z?-eq5;=(DdhV9>VZPfXk6^B=3kIWlBc5~+bq7i4Ou2onJ9~oeIIcCKoB&o{5fH+FE zF47lz`IdR)nh7<`<#OtuLj&Z#rsgk>nVKV*NrbK7**2WjCNxw`?&v*3(*`?YU|^|H z2{5q2#{7`D1em&zu&9(|o4k!o{bkjU^pWY^>m>aeRC>i_UyUpNMtbB-IC}BY5$P*j zJrZNWM~GzCQpaRix(2hNT~;CP2u!8$RF32A;z%!Va$%0^{&?5@*@cmjvhzhdoRbI$ z?west5L0(+e8brd>eR3_HKWBvT8nDox;{)^%<7Xre}cZ?RJch=ut4+M$(ib@q`^p1 zH>D=AOr{IQ(<7DQ=)v(cZ!(#8|CU^<`07^=Re^j`AApAuY{i{|y;z0qiG2&5{|C36x8)KP=C$UoQyHz`3kBPV76*s#E&$wR)~co zX$12H)!3QX>UaCQJ04__YTZfIEK`u{ocN`y3eHGM)=6U!o07Cl5@e7!5o*$CxPD$&t+ai}8yHSeAi6 zt>@&nJgYlb39niWUv-3gEMz#8b2x%a9+qmrt91E`;T+=l@h690Qel!`V?VJM&Z2jv zmEdp|ImmScBS(k;n$E381xrD!M|^;i5Iuo;<$B!iju9sCa8bA|Bs`Qy4r9$c!P`8@ z)cBLk^&b(-qucrOCT-7t^4{TVQZ)6-K3VOHLl@@Q2fowo;H-^DbFyYl?M|khJiL_( z8`}DH&g%EOIAvsyY-U~8LE}o`ULdk*gOy-4n-;917cW(qILg%6pv5&fPqg__On!WU z`|u|=Bt0VgkAEHl-JD=yehB>YAX@^D_F8N#P9#hBPTtQljhZm#-sx|%cdi<{xk~p3 z%Xf)EF8bx}odb!4-*q7?XiqCjX9Zd^WRa|@|2&v z#(M3Vvh~oR_6tW1*w&L&ei66%`RvaROj-Z%M(ov}8K+T0Al=}cE(;tJQI6-YBCL>C z0ns84KVK%9e^QF{O%fwk@!?M|v)Pc+L@M20I~+_#c>wcOc_l@v4SuURg!&;?yTA^COAp817e>S!eXE@B3Q0I{)9Mh{KjbS4%%;m$KhCGgB0(m zXvQ!^ML5CYlLggfKuL@yQ1Zj^f3<>Wzu0>B~<+q5RZX+6U)cBH? zqkp~Crrs&Zd3Klo`cIj@Y>WXFm_vb!)0IFT#Zkw=MI_3-+PP$F1LEdbJJ3eQGN-!4 zxI<*+Nrr(UA{ShQ5+)YqiA}~@hP0!kfn_!@plI74hYsyo)$}#*+OG{5K}1TWTm5J3 zRBQwNaz@YY@2t#ahPIbJ2j5vG&TJ(vp)%t;V!K+l!aycpni`qhSF*ZC!xb4rnI?%v zt&~7t9=w9*q(hlXDF2YDE`9O_tJjc3RmCdR?X{zff<@JmnS`meI*>58hqNsGToC!9XCoCPW`*FW1R|(h&`F?uE) zU>2NJ-X*JOT^Ea1?ZuqLfD&W0N~1)Z1eMmNx|okC6c`~?g9R0FP>~Igcr$yinE4Y+s_534qb!3#;Sj_q{=i~05JofVO>|O8 zyyXUkBZ$zf{>S4;PfIq*SujoU$tKfgkKUS)E!TJS?|*$zCY|Z3A%3P7j$-FbLHu zz%Dc7BiBIl~F^|d~|K*1dChl4JW#8(XhOXMnZNsi*FXF}cwah47 ze|%gF>NoT;Tf49d;xddQKxnS`1~UBi3SDSWL8XWlOcyPsnM_Ot!>m|}Oh~xaVayH} z7spo{wJv(XII++5+7+bFEt4j=%E`fKInqioBz*^}0i;BFZW)5f%)y6e#5K9h%bY~QVA$MGHK zZGS&+=(rJ#x8Wtfif^0JfQ{kmjnzY}SV&j3#fo*1 zstjTZ5dLf|f-^|`hC?jklNm&^kY0y#IU%L)gG0~ zz_ZjbvE1?HroXj}FV(A+0Dri<9E~z^kT@@X{DJ(#~ZNbpufSAL2$fHEyIn z`ST|b;Eo%yXaUNH@n9#Lf+C1Kn2_?}-{jIWgXu$mfBJYZJ(~lSA~kSc95YmERD4$I z{#pa)E|t=n;Dmyg#;B1-DW^v};7VRIn;L>s1RphOq(1#GQ6ut%#4Vyr$drYs!MwDP zOr}c~iZjUjuHRg@$pxAsCWyg|cTu#JYfQ0VD2&urM!%eF5t{w z&|!=&Y1h;Q(%_u_-$s`niKmtv6ridL;dEJJlWW_OQ&^b>5}cN>(9oc8ankO+Km9~@?gJX^wG1j`*!{G)85_mM0*l<&(*YP@lDd8b(;N$55A?v>Ga>PkKeD&uD*oql7Ezf z!K?a8ko)R^;8hW-0Ue=5j!;8QE-MjyNu&8Ob;dAs4Fn6!p%yh*!xFt@g47-seGsKt z&K~cvUYD0=AN%~q2ifb+o3$Xyf-ws|wLbVEoK*ZJ?i&&te)o>``ufsX3K4oCTKg6} zqiA8YQr=BBLotRL#?l~Nq>0xsMjAt`#!^ZL+M~RR5S(!M7qNQ4z#=7F{TZqk4QP`D zhgn!`*PfD^k|4oqxr@2nW*K#_WVyHahWLxKTz`An5YhX@QSYyh9l4RSa@wTj-lVHI zi#|HoJw4sOcguH3SUQP0|J#j%Im-*y(QncjQp+s)!PG~7ig;XA;dMo-mWWwlhEvPi zY1YGGEn*d0fLpo@9gAXBHrg|$uPg~zCUsI1Qo~cC!c)VsrqMQHA-sBl7w@3*(ku^e zdw%r&9qT%6%AK{l)2d-E0@==$E?dtdI^-bKur|71blmKZ@O&VE_J0RNYk#Dx{1J>qOhq{lUXnx ztvi8R3FLGY6OpQ6xdA9Jw7w}jJDdI?Nluq^UVM&U52iM`59BmOXeP{5D!tX5&1EI4 zqY*rK*4hD)tpuO35<6AnO1w?1u^|wqND+3@nr)^V4|W?jtaDEy-n{p3_USD=%l=iEuY-Ky!8EfW7B%Ku5KT) z=&^rRw+?+<*DdcqwsZgSb-~Sfra18dI@7%2)mG&FEGQ5u?UeN6{ekEk4B~x6TCZd! z-ZI8?p<4%%BdRVmW$_})8a1Na&(OTX(fjrs^A#afMhMijg9+4_=9N*w4L^3I#8#7vAvWv5tZOoe;wT2=yE) z5o&0p4yrCaJ#mgiqW&8cr=R56ptvXGg&EwRanNc7N@va6kJ-=ss~`$tLDoda$Sn>smTSf=yiZ6tp8E)uhAC;fWYKKj+>PRB_rGL^JEUPuq0Jwp#4 z5&Nv(`uXAR2XAg!eQ)c+9<#R%8~?6@UMf_G{g6Ux6&#^O2QQHB=g!fc7tYY#*RGPT zY~^ea4~hSPm*wFtO;-ZBx5P}UR&O2#wuLdlm*c}2m(T$Qn+u2{hH(_=EAzJj1j4KZ zfga1E1o<-9-o)Q8sC-ajP=lb>K|O=ay;#bug6CVT6g%2;2$rc;sVoX>Osj+a{IXic0S*j$DjhyzQ_)#@`#<+na-fh^ack>qJ!87t_ z(=$E0v~Q5Od+B6Wft@2Pmzqc$gm|(v@HfFimXf9=ERvZ^rc1jeFVqa7qYK02J-v)x zLva>R5ld|C&UUqy63KK@9u?2BFc=c~2e{}DKGqgq6E-UK8#H9>s?VxW*<%_0810r! zZXGnq)O2fs%_BU_V3hG3Dizgww~9$hl=>sPco zsuww;tOc)mq!*gdBX3c0Vk)FD$|H;>=ty2d3KbL*79JW4n-m|Lz;F_@O6*`&@LC-| zLJy|`Fh`O`t?q7wLcM;ZCu!*u9x@VfL7g49Bx0f%dmsuZB6{n z(cM=ee&-I+O9cfaS#gk>g$4BLQ@TrPo-=7yld_qsn}%ECmUe5|qhqs9Z=+6zK&vzs zpP4pe^_NkI%u3#Q)0?hm8N!2^5udqMh{H{d`$4wzrO(Cjm=$j!L};W)pyC6aYS!UZ{4DS?0C1xJF!OItKgR zMe)+v_W-Qy08IvYj4=pt=`hcz9q2!k1Rg*S0G#%Gp!fzb_e2TdA9B8<$5rhHK^Gb; zKMoY5#dWSTq{*=MXW_H4ahq;$XDrGcJm}rjYoM^XxJX<8 z3gd+dN)*qYs0hSBVHuEH1{5l!3?nHQN#qD+Bq5_6AkLyA&WCW45#p$%dpD#K$1N2q z@xr7sfMcEyF#@~3<&0TY*8IW>x>(r8ZGO>i=G(b_pU&-D_Z_-n!qla$-`cuy`m}As zhIZ`I>&n}52eCziMlD;`@0*)FxNh%=s>=rFP8rgued{+CG5O6!Hk;*1BxVy?{+}t) z!a!u|5N$&GK|)We#CuZh$i2U|x8gAG>x1YYoX4~k{oIY}1bBxNSgo4K|^cBZBO&7)l=$n{+_3y+P> zlEmV(YY$FB@#R;H(heA2G?1h(Y7Hb^-6xN`86=IztObUo5cRi#$!a3qCho~t`OSFZ z^WdK7m34E$&5{@GaNccV4rwDrp{gWSk-QnPs(f(wDgvX?`BqH1K>`_)*p|pzSQEv| zu{CSHm6XIN>`&W}d}J2_gq{c;c;b>;Jb`09Eg^Bl586CYka`tHnQNM%X$dcoVbu#L zn6j8Xpa!z$?>R&!QI(wpRb{3=_uH{?uBF@PF5N0L30>JUqwVXd^%K3OfS$?X8d79> z4^gRC^^<9f5;ZKm$A|Tu8l*PXW8@ztx;K2>ieiU zY@~6t-doN3;3FRA#)bekj}d03C&F{c8Om0Kdo_4k{ozH=DOkF!z_E0hk}*1cz}T$w zW95uBr%r9`z2?J{n@nS7&&=*KcGlc+%<7I6wn8;IV0D)Tq3?+-BM3q8_XX1g(?KDD zoGtlFT#e`DBuwxk7BNcv2s33>rxJLU;G_*`KzYh4=l}?j7%5BiO8w+{y zR%8@Kb! z`2!;_%xRw4jHGFp8Jnem>5*8QfrwBpRDyUvKicFRX16WWd?mvQ;0Ia|P_d~s>j?Ac z5D{Anx=4%SqkhQ;h3!5lY}ciASxJH3q6P79V*N);ti1+yjWweNN2A%s66w5ZbM60e zsWe*Z>uMP;wsE~*R{ZeA0ntB}%%UiY9_;#duxl$>A~nP+>Lm=6&PrFHcj~g8R8Vuy z#Tv!+vczNiKHlsN;0C5Un3rxUQY<`t$y_#HIHIY9;4hrjwvt}(IrpTI&ZJ8k-9rYa z(Y18lAygRmZb!HB2f8ecWYFc%<&#Pd$S+La@lKyAs8o>E>LldJu!ja7ORJpzVAi1Yc}Gy;yZheq;X_u;CQiS1#q0^EEn=^_YX}z_l!pyaY-UwoS(6Nza41rph%j3OB+89313kewO0mW#pe>_W{^z5g-Jl1v zvq{qpr{7;?a?ql|Cv%Q{C({#nJ>0cuABor}Go8N*wNow3UeM(2lvv(XE1>D{zjChS zh?PK7D8!_oyocjTOej~t2uA7p*m=U5hY>?V0^B(&;0s*r@d3NZu+8mX-f-h zMh&V^XKMe3qi4@))AwG-UK8jQh8{YV{Azv|@yBZLd>aer6{*SVZkbLhVv_xuTjKam zCYo;I8E@@CGjE&&eAzQ=ELG%?&flz-FCeaKNQWyS{_F##ocgr}{K?TEsUXI~CAPp5 zJcmjZ^-E9?y5I)GwKY9oLVb>-F4i9kiL`>cl+LP@@nHy+}MmEqA z5~da!)rps%E;!eq!Gi^RKKS}F379f@`pyZz9GO&5xxvY-PrnmgzNhyeTadJ*`0pOa ztCtA#_Ir~V()5{y!D7yqkj#GlGxjXra7gMpbNTe4{)-ET4;wi8>Fr?^KKp)rmze{5 zHX(`c4h`uyN?QHt?#Q)=Ap0vIQIY1VsAWrJIXJabJHYCQar2%-Rjs^1i^wVgWq7Si zK#~LKQ&&aB@T;DSa6OE>3xx$ku9#o}B%xR=EIgh^3L$0)5=z*4h(#9EAYrmp?RWF2 z3zMgRp8NRRzyq&U-Tj^Y%DgKHf9(3$j&#exky{RR-!y39d#3OgG=0RTl7%_n&;5Y} zw0X05_ky>l4tsM*i@b?%J0I2S(D(YWA%_N`7F7~j2-oEr@-n>g!&>j4Sk$$Sm>w`J z9<3O)i0wx_Nn3_zeHl8sRqk##2M0#{6@q?&rEiC5udvEl&`(2cHtmI6!&z4P(e;sv z;@ntnW)^9o!a0|jJtIIa3EfJv{5Wjq*F`d-3^Onze!9&f>ms&hr7QNm% zx5}1_&3|7#;=_hg*=ofXeTN)r?0gvW?zmSQMI%oN33PxjM^(H1iKytvWIYyXA!~;Sp-G)XVpte zAb(;ZdDIUh1_{-qOByiMq?>ifB{uL#z^sbQ$g(O;$SHRCvLhmoggCF!l&2k33p81+ zCiaoW_+=(ALu%d?!C+0<%% zln}lL2J$9~HH1agkWnZMv_@LtI${m7l2{L3cOZ6v{vi`;8sVhqG%bdV&A(ux5s*iK zkEi$Ddz8Sovb;X;IAn`a{&LBZ9P}N|n}^QB9A`7BiSxAFzi%;RO^CACm$an{Y3mwu zkJ;DVc!)yQ)L7m1vbEvH(oUkXQ5D60wH5e*{mM`1ua4ZXI1Tx7!<3go3A4z+6KZk` zN1CU=J1a~yH^7s=+Eei%?w|?Vi3Q_BwgIuK?eiv1oK21|rBPzgYVw3y<@z*=+;cs3 z-DH%ZctN5Z+4NE=!LLm!s= zs02{KqB)ji_}Zi)`>56p5n64 z{d&#FltM~3#Qh(sg2v`FfD*RHAW02nH8m;)%?=&PE-cKJdcM`;EoX^%9Zw=_G!+ih zR`M_4NC+YjW*$XTNibkMu1_^Cl}5K%?!-{m!6}6n(!)L_KwwRxN-%!{Ri4Vnl;mW> z=8o9pU$!o;pXy&drE4GOC+LW6h*pUg`wD{xTc?iiEZ;A#hz7+-`8!7M2GkRZ_-Oy`QKOtM*t%f)@R5b> z0}~FPbDcs|pA&|&tlNeegwYTnGvGQ+zcX-Q^(X-)NR8(O!95Ftd#2wa2z# zJZV=YmQJt)$f*5 zzgwwDsiM9DPS9gpr1_IP%;u33E*>1gYW4AIB^5`z`Xb0EbEpPpdr!PGf6?7W4ZfIs z=0}%&cJ1}Cb!I)A`%V3ZcTgMoclOz>o6nr1r_ah8rymU#r|uT3Z*b0CFl+MCfA93I zaO}?s@62DEJAFPYmOS(P`PJ}G{`uz$IeTGK#ieY=Oehm^5bEmv1v|pH%$VrZucl5) zMG50G?2}>#Ix8)Yx9!@ttVs)N?bQAm^2r^Cg8TYU9$nl7GMroTt0@XH914UyNDcGhbXiTM<-i_*cj{`7>y&AOG~I7 zg#NCOAn_*sg?=bkp?^Pl^j^Y#Y%dc>+O45ax1FWF&W_Fy@(!s!;Z5R0s%7__O;^&7 za?j9@9YaadzsWz-zo1b{&d^rkUT`f+$WlUdwa7d>rg6>C$`Xk{QlzF{c=U=vuSl!D zddPuc=qi_(Z9oDuu!yD7bhrds1~qs(1;g7EQauw9Y8Sgc+mc^5HK0<1u6+v&yS=lZ zNsGYhP5P%llWrAXg`ODzB`MeHKVyIQcTgve&IQcUOw7`5a|KwI1fjPg^WIm6nd96O z#dWx*&`S-B)t{SP`0e4IP$WZ6I_<#Vv!NS7jaRUJ8F(*p7|T%GS%m%NKJPzSpZ6~6 zTuAbWw0q8%t%eU9IVG=y^zQFPtlgV6C9`(#9i;lF+s=~OyN2{J%~-YVROp5;{1Ca( zHGR9glDh$2Zk7*1n^wXYL)7eM2n)36q+XjPUY5dqVYAFJq!|VIx-l9nOxPH8UYoYo zJtI?nHvI}`ZW{9fuFvY;X2{TCX%hT^+w`vqpKagr*@dm}0a&T?V$0we%hm?XDyRuBBAc7`EP4( z_K;y)cfHh~d&7Iqy0gbEuC_0?>4H|&f6BMzb)a>aV%5>gEttl0L!GK7WL!^|g5S_! zE^(~D%R{(gR^LT4kIuYdr9i4w(5@>F4Mi+9S-w3ZFH6)_iB5{N%Imf-HaRyM%0*|H z+gBOP)_flNI;z4b`(w8Zy{6>6*64o~oOubr*htmQi|VENSF78#4_Y-c_Z(7}<1(zl zo~V&6f^B0RTj{Up*fLy;$FrGy2Jp;@1Ca!%MSZDBsH#PgOw^lhXjG#%VIehlNFfpnRz>B;`K>qe-vG3){kGCY@bcUsmFp{Q+La4=l-?4?#jRO{=MC= z!qLCpNbRt|DV^V30j32rzFB@KUNk*NbRGMCA%pJm`~i`nSw#K_0BT7(5@?CEBw3_h ziezEa3!a6>!h23mwTl-=?%tL8du#cb&-?HDm*Bkl^0(qi(-Ri8XXjM{WztJc&mRdS zk%$K3G=j|XdkkcIu}x%C?V;FZI@wNk?jQbZ=U<1LYJBWRi;3Bn!< zy7o-NxtE*5Nj=m1!0kaS_gjEnNwiv+(##5v8ZJ+a=lwj1roo=5{W$q3`P*CzyH2FGh*k6HgD9f)%2~T zT5p=hwi_{`U0Oy)TJ1MpwN~F@7aqJyPdJJvZDa9W6dfRs0bJN=mt<^EQV`8^LoSqp4qu((*_Nz)Og)=x$n3Ujp|B0vtz2pyiu3o?rgb8@-~$KZ(4*VETE+X zinu-m_f=_HW~$*~1Fj&{#mQxqT9CuFs5xA|6MU>VaWVa6V#>UqGEF7ei6n_$!<9DB zd~&5^59TyPcvDxEz;?wCbx-gSL9$9wlGKYi>;8z5d#05QJ=k-Iop;?KXk6 zW8TocN#m|vC--<;Z3p|Y;X54~yw)zQL1g91kx^BvLh7COUXfl&UX8udyiC2I_js36 z0D_Gk2C#lkT`m@sq9~f_v{W^VZ*k>!X_wj4GH*kVPDH*w$&ygBdhIs7)86P{X(Wiw zB>4gT%=9~-_cu5n8o$H`zT%FsPW2=>cx2PKI zEDiktZRvI>0X4h`Mow_~=SU;?)+E>wL&P}*Qy-iSO$kCx^?U|sq@7P6n^DaCRuVWT zw)bN@{CSI=UtAW`X;bMgEyH&9!~ank2`@<)4@~m_o{a=u6)P;&?bicE@qgO22fYnk zevzTyVPch}27%wLHSjiU$;018A+190UKbhbwexr#Th;b$px zG+f}ZqT7q&AYPl4Qrs>Cb7@DZXqfsc45lG0G7K?VeOMYXX{c1F8emzSD*_)F=T{>6 zPeD8GrC~M8yiEaHB-KfYL!W}pmV$IJqVeH2vso&)m-sASLjT&IyPy8ObSd%OoBP$D z&z;{ryLIz#`I$8LFM8##`O@jrc(rEr^ZC*VddIm*ene(Ir0;zEi!Ak(**+CH7&DW) z2mzSV-Dkp>Zcs~xbUts}*G8{ZTLF*qS_8qD-v)5RV8hsCpmPTy=@5a%U*iRJ4U+Z#DeTfDQFO3 z`|gYIAcDdnPtj`I2#a&^9Wq(hh4|Z={o?%-C%-+gnbsmvGgr;|Ys1Fpv)4=|(KLAi z_N;$6{V}OXzkNFDS9&^SLV7>a;{4bTNc4tH^xn~|&*+ifyvJf>$s?KNx>+oAiW05H zLZew5i#j^)uxp-}!Bx_Pkgu20U^Lc*Zo3}EH-rhO62_lc2t(&3vE)g#!7*kVpJ<^w z3_l@(ZB2)UJa;ZA9;j2gP6mcnuvMwowpYN29}a)*L++jWJ|n=DyM5K2UWuEAuh=cu zcV0j2)hoQmuruh%YD~OIt;@$;XD)sJlx&_n?;lvO0y-LdgtFLKRUtzW)V`837_b|B z8_XGZrI|-qMPPltjw(s|IuXSjUxf`YQ2h|=OK&+4&^1~PchRG}>XB9bp>tqe}v z+!YO||4p_>iSHc!Zb+ZGEB?AX=tzU?ksp(#o2O6OU;S{#7R9l8X*W{M^y$sLgLYI; zIJ@x7_e~p6lj+E)QzPiCiR+d5&Py#i?OUE-K$wp$B+`D;C-AX-H0xoQMXpA*`Q?g^ zMdiKCEMtikgror5E(*J8NzHTV=xjPFm%J7h0U!GZ`Ud$>F6TVlts6rHJz%vX%}s=BL)8cv=|+NnoXXRle_j=zoJjT9_2ti7yPb z=^X*ERAlTWzTX*8YJ38?`_+YQTc`TuEzF!VXHn626YeayN&mXEoMdE~&yc2{^xWKQ z#}$)z&Z;+tyxwDNdZh;RhqL8ZKbc8lpGFiC|8ibc7r2_Ac=YR;uz`msT7Nyo_&OYM zg`ZKm7B4JOLUa|id|hZ9)l>k|i*amdAxVXaAq_$RNk)cD!IP2}0;4%4BtHa3vl~mU zmcfHQ4vQuL7AZpoDL(<$mu|meN1P!js6@rF<;J{Hbt`#RQ>ZmbMj48MQ&*xwTl&&=CK>;C=4B>rE%5(eLvr*Ly|=x{#b|OQj7z zWcL{rXRp|yY4aTk_4*A@s?(xn?G_XMh0Goq3_3SL`dOaJhTR|hbo=OWdDLdfup)Rd z2@53a^J>Ub@uV;WR#$EZ;@!C^EMzQVW@m`MdCbp8-y_*m@}9Lo9%}#T^Vf?H$Za;Q zbGbrU{z($%W3Bi=|FF8Qia@v$=N}dWGtqd|BB$KYX_iF;AxZ$>;SP~0rB47uxB;gE zAWmrkeFDVMh2I7|#*vbbPn0C~@sxmkyw92=b-5On0MPC5#Fn8>FJn~dJz$d6d%$3q zu{x*}4K=YiKMLMbWuKqF{QRNx@a7l4h|Yh+#Rus5edxO#wSU9v{SrX^73|CQ7OC_qo&t2~~`-?sz2wq`cj+8tXqj}`dy?d--4zw_w-iH8LhAw$ht}|#JRp{S+cx*{i@ZxA`;sUPNaX+owLZ6 zUV(|7l1$O2-`Biee6?kLd4O-b(c&l0Idb{O(LLop*x3`^r*ww24ib@wDig_FKgYzc zDiB0fO#*l#gjK&*HQ-vst9n&NDE3v?Ei?t|h9As?nlrJ9aA=A57>GAek)mpcvdnY#Czkd9@zizF} zG9_1;H2n7=M-R;2*T2@A_C^cq)K6%ZR;5MBR@Btq}lJ-c7No)!jg7S_iU0SUHKB z!YD08^9JffVq1^Vv!hJ+;i<@3Nvp%EWt#;j&&EFDl=ZD9?yMMd*o0H(}NIIAAYZ8sTg`n#MHqgr5 z2qr^Jy0nbEBA4Zg8uaPC1^`s!PB$K75^6QMl{FI>=xTLmII-d3@5XGJwc#jPvaHX5 z`fpT7dwtO3XJfJ3$~9&Bx<4*Q)5HDadUtHNHojijmL2OPht(n8*Q>4jXP$;2G9Vi^ zV2HL#d39&C@+>x~%7`KLrzr?ln4=d9L~Hd94#c87lZNQhXgqFPQ%A5zb4ZE82NinB z8T4%C#Oyosk@_XRUy4c2A!oO4I(K&4<}=cbrr#}#Uj7H$7$@Qv66d;p=J{Vm;m3Y^ zd6t9Os&Lpe?hPbu5FSLCK&xIA*Suc5PaU(#6Fd1sM5t}ji3~1R$c?6WP8I`KxJax__ zlev7sh*7GAn_@1df6mKk+LFd65 zNH=5)H4VYl1y8rLxzQZza$-W)mnh5vni_{Ku%qD3jZiA9-dtrCEq;Z6&L@U>b8%%^*_tCR z$$*Q{jpu7-!(g!^BhF?etI&wL;%f{QdpfACB(_v_tf|TIOv9uw;B7-rhRGzxZMMkc zn)eoOUijpvY_fUDiY3x7^y!|NtM_$i;mW*l#B}x@v~zB&VE+2XgU2^WbCDKoE7-d8 z$57&X({IJmqEmFYIh>?#zZ66Uk+AWIhm~AIG-8J7IPhajrK}r&Go8-oQbP>f&(aP` z)ezLYZU#}QsgxC1AO+)b@5*T5ULRXKfHkc4DmXk9t4R)OSf@fnVoHlx`cn@5A+C9H zVnl`7jk=9Wx)T=hRqc_c&EIfr?Xv%)*KoG|-`01$WZo zGtE%}R0FfOaDLD?kP1j3#V15F)u9(ybETbKPwo4=UE1?~r@F2({dPv~=j_s7oH;Ic z78lp7nbM8Re^UCT(@Cn(r&ou#zWnCFk9V*9arQVVLw`Pb<`0H-QcC_ZZGwkX4tqX? z@Lk4UA&(r>X=qdnH845TkI__?chw>jhJb?3Bs);sqDS?rp5!_t8eu!!@r7rg)fPF` zJAo~cSGz8>KlIjb#60%aaptu z@np!U!zRC;3@x{KY|CZ`5n`{^&T*vGZS zaCe&%xVz1Ees`O1p4UjPrQY5KQfe0jFYKoP?;!@=vOa3PGvQAlD_-&2U~C}lg(u4>B*n`fT`U{ zPtrRxMC?u-g^>8`~gPH?!gg^dd}qvQS01ckF+Qt&*^B2gx?EJ zJ1CV|Hhj&vDMaM4GX32vm3-v~B%!B8j(LD%6pOge5G;_g$DEno@E zB0^JcPT)>vb#utDWnX1c+(o7=V*5GLA(Fo5)bZ_-P2A*KFz|y(AAUzzT_ji%>sqsW z(cE1kU!#abe}Zv`;;uRRecrgq)`n;ZbZSF<$sbWI8_bK*)ttz&T=0-4=2A2lzc^}Y@uD5wMEWtET>wjyjVt@3 zKBBkcqwc+m)xdgE*rXe@K&hI>1nO0o=*L6#=wZ?Io-s|WGqQY^)m}6%_NDVHN4f&Y z&=p^er2qwy(g+--@A>fPPH^h!fMb(C`Wmwu=~}aE$^31YRZ(b8H<{*w;sDHRFC~ck znXC_!W&hpV!oRZcPEWDB=l~O_)|9IT1_5lHLx)g38p%~*WJ@y|Qpy&q{M$N`Q+y<+ z-^h6ZWu;2ameOmkOCkPK(rGH`2ey8eMNSfLF#s9QZ)qRuq%D~&hGQ>QcCVRGVXn$p zu30o8G!Sg41K0Acd@>9ZUs`Nu_2&6HD=Lz$O!?x$Fjg6NR^>qt*1CY(tOoPY2j+v- z^v5E?foK|QT~Ler<)Pbi_Ryg9Vma!)f?ROq>?aqNQ*W{SI`Ocp75{&NodHLPOZorn zu0V_q`)11nVMD42la&PC!Kn_CM_{x;mRdt>S)$J(n0=W6R1)};BLTxmV8g&eVOm0; z1lSE-DWqb<-7fI$a$&p=5_O)sO`I_%)#bw;3T^|5+9;@5*7>DvLck;NDJjy8v+r-b zk~8`H*}oFMMcJf0{r>6FC-3Hbwd>>;AD;c7@W_X9op~E~Ps@6L$n}p#9<17${&w>I z$dkEwhj%Spu}mCB7Gm5duzH?gR=u(B5uYK$qAlGWgDoV(M3yFU)!pwDZ+?pfkdGY$ zY?v3jO&*t%XAPKEYJ~izz z{mSioHS{m|@w+jzEuza}lM*#&fz5W%QXW=Dl_B>|HA;XlZ!t$zpjt77IBFH0aWBuy zigo*yBBjTsEDSfcL&1P07CXSWvzqf9q|Ev^m-dsj}wr?Z8%Zum* zXA5G!b(x4hggS3sD-nE%bQGLFM7NkmgY&+u7RtS6gu8pqAQT1@qG@u2)i8Uk2G@Lt znAOPl9#eeMZ8biCitEFqa6hDQ4WI3||1a-fqlqL+kXV^vE*$F!y`us~LjEx0yy}#B zmZ>&*P44BB|NIK`dEn%!i%j^$mG!>(hHRqg1zYNgFkOf=L$4})^st<4of*c=VXG!^ zhUKWOikGwgj0Z4H43F@CQd@j0M{Jt&f>l*8%gL=LF37G;w^$nBWuVYWa6qK~A1<$D zRR4Q)P;}x^!{a#_s4b0JGKoPlv z$c?8#C%{VnuLfO$LFCQlMuYBy+s|nBJc>r(NVVq<*Id<}w{*=awdWE;rzTeW;fv^2 zgfLPG<~0KhETeu7k6POUCs&(GL&<;v!fDw@Y_jUEp3ti~R@I?{aF}b)2(Vx^$jR_$ zvS$!E>IkG}fSz?<#w4yQAAXOBh0#$Guh>TnygCv1{ z0x{NnyjGIf8&d-F@rFLr4K!--`{4Z;qMB+m2kW3!h%0qU6tg;eE9`PPsHO7RjrMJ0$op_KCE zTL2R<90(clre31V&|^Q|CcrYMh`NUr|3hy?JdGkb2rh(qi0v;jVeB^xt|77@{Jbe=CPt>R1m0J90Dl7AAKh~ZtiV5!6qVw=3L8X2;(Rq;S zU-Hlo_dhJP z6Z;IAeB~sXzVw{AXwgh{D@qGWtzM!2B-dHIphH#}{{?-r*JAeBs6mN6h76QH9MF^a z(`Kk`^|5br2CKe7x8nTj3|6VW^QK)~EnUg4g0BF^)KpK&Z8#MD$v)KhFziE`WnDxQ z(ww&S@7Jl`WHWj2GGAzmkj5xi(CkAjz+@jLB-Dag2nyy_q90+nBuH0!@wmM=3JE4wx zj}3UwzK9K2td3wOSF%6UBJ?L#5{s)*>>K*iYr+!?evvlue43d`fFYlTLto#aF1yKf zV6qUssh^Bej^DtmpPO1Ps~l~pTm#%F?aEP>O}XrHV8e!rGQ}1_Ni|Bz0PdYYhLJfl z#ce8*JO35k>Y^i&s#Qx0Tr_FS*0lROj^y?0m3QapfnV)=CeB{HVA0mY&!p0cL(+QA zbZl7s_Qc5A=N9cexHxg@=mGEcLzX_y76CKq8=S4#I@CAI)lvO{={O6mLo1!2;>B+u zH{I5uG{A0mevpPXa_CH-j%OUBvTqn5n;H7O(sm|?D3VqsAozblUpXn>Fx=j zwDTaw;n9Hl17vw*djP~n?*Hok0H?O4r*C2Nm#aFd)C29DFZwoa_tfEcf_VKK~%`%@GAq;KtNGruB@rwpqamC~bFAcxl#b?EoM zj2I${r}M=k1U2woOFV~KcMWPL2FqciuSyq&d{rj0)0e0SyuGMm(QZ8X4SY$i!y_vv zPJGJtF0rfqx~SdK)^3tk;1KQkv!r?XW3Y-syzvo+Y3t!Xq%XllF*^4*t1MpLT+Dfa zP`t9d=;XYzy&wwOdC_5dHNaEKdM`+US0zw$%SZag?mTR_i!JP~;|pOw7*=M^)7Dq^ z%Ei1kPh0y5DM9Dl+_xcZ1q7^A{pRLt`nS7D1i?Fl^AUXA!3QpqGDX4kt?D?tI$fF| zUCpm7T}_5uO;!6@dV?ZE5SOYhOjfKlcxPAe^>EJJD?>88YBHS((wwd8c_4ZufUnYB z91pH#oOp;}fWdKvTuBr!J7USk=83P68}L!VEuiO!K=&16(C&s}IizAlk)okBNlCqQ zXmG2v*2DMYd9l0$!`@D7o3iVow>)m`{Ny(p^q;pTpDml;r$L?m^Hz%f2&LADW#6kI zaq3V1H2Pb;8>=>FhpRH_helsNubh80=KG7E-K`)sukeiXDD}A93NZH(>Qbj^z9ADG znmCACRvHk;@v_@s3!%+$ui^QZ!ImmXUqbBlV2yx1!$J4!@LRq@E0d4N#Fw~_NI!EO zqm-iDftiWq(O`2%L%7LlKw)WVoqfs7>!;*2ar*lA=GfWUS|5GN%Bgo&&z7r+#aw*Z za-Q$m=rhoNlasjqz{I3=5;t-{;2sA6Gx`ilv+wQfrv3TXIsl|E^(iOo)Z-q_@!Q3P zx7_N83Er>&`Pcajyk6xh2xNEc>|6U3ui?)x6|O=%3j93uB)Ajh1GuaC3YcaQAS$HW ztEN@y2mk-uR{%Z9)rgK_UVxSs=89x_><#;Y-q#PQUDWM(P;2ty>`debq;*}Dy!IpM zs+y*jnlf_ObWzifEgkr|X`TYB<|zOjqaQx5CQpHWf~Mi7x`DvaT-P?xv;Tka6nJ3$ zFL?^W+*?S~)-OH*>&)Rhz*c-+J8%-TMUpns9oH^i>nG4xuP^w2^Ao(Z#(%HxUuy$( zrXt6PCTE2Ta}=AwOYm>%B+C#clz|EV#}T7S89sm1Q6^S7UIqdvwM4&ylk|bq4FSqQFG=Kqtvo7G)F#q+=XL2rX_RRn0Jp2J)7^%#^lXbei*ZL z;<7_lHHba*Iv_pB9+|kMpZf4l+cnQ-Uf(r&&F$k!hrXpVfRCUQ_DVHq>y%4O5kb+93o2e`drJG7Sv{lZ{V%ZGA@AYgSp30474d|8Q zc*U5sNo5npsscp)!wSu7Kp?z6ig4XS-(tAWz?|U5!Z1E@;#V`Sv!ZB-a7q2kTfLB$ z#&)^eBD7ud%Oo}Lc`!9ZD0xT4m@nsLJ_b(fiXhUQ5kA{V^9~eFVALbTCSG9D%-p(a z3GUa$Py(ZZrdTDei9l^L#Ko~GlGQH%KrQM976hgM??8@Sb*TSLW46?cE4k|L>BCre z_2PtVQrlRr`WE}R%(jt>9qQ4(<3)A^B|EI(M|3KpuFz5NZE3yt1xJJ?#!QE(ySF&y za~BX*Lc~VEi&LB~QKcj31f$Gd&qND%d?DOvF#H@iB4c*|L=lla6h$5;Ixe1@fvBXh z-v~D~D1V$9k6^&H*iyZq4ddLfuztN_l^S>JpFV?TavnUG#^U8Gs)HtT=6~;MJR+4^ zx{wBR%2$%lo+SxJ)d@E^tcWFX2 zSsmhP5(|qah_hLrZu1uHXVMk5k^0TglFJ5h{-idXkTSr|uv^%S9&51M63|wsD@>bT z2&wGn^3!oGSl_M5BcR)G=B+bdyC!`%T|a)!YNPRSVt;-rBRhNe~=qhS(I+9lsO z*)O{Ij%20DLTI+<(p87rh8qmWS*@%kOEblJY_Hl*-o1EK41YR#W0$oT0=G;0I>HLWZTnqZ@`ukJXU3#d)^jUJWo98Y&L(>+BxFK;8q29f!p7sNebm!A-hb= zwL(6PuAB00ly;@)bDY*6=x0FD1EXps7IP)n5DugK75yv_mox4QcIV6KRE4@W&>DLy z)VReY^(afQ=l1#PfZF+!Pe8qR96x2zf()ptv%9SyycsFSNz|R&c}(@3Id;Tsi6qYD zf?qu5VJ1;IVTmH~n2KhiN1}!$QpYxa68eL1yb`W!N+Ea*K%*SCSx!e%4h{W*2^zXGq7*C5p-~Ka zW3enM?P^zyJT%1`P|=sGxQiiuN9Jk>Kx9ldM0$czD?u%g0V$Lq?W44Eoo@MlZ}ycMV>z&9!VW7J+j)Ok|AAT4 z{Q)a_bx+osyyMB|Zp)^Wn^!*P3ebOh0{ zUn#0}A3&9;fe7H_QHt>wuoa%RgnsvE6M|0n(yGVlcXmw@1JlG3lbkh>63@O?qv?+) z3^Wr@V_hX(krf~K9F6PJPv^f`mo~5p>oWL1XncxcRw#Qt2&kN&TxCj$2%? zZMW;VGsVGbR7QquhAlNK8et~i zI_Udlq|gSkx&Q<92e9)Cw?r5)qsN*la=JJ`?E#_ogyT(lLKCs02}9(m+1EZl(O30{ zb`(tb!?9(Inj&4arXeSx2Bs&*#?>TjiPbuPqE0?mGiIUzjA$KabXKi|Mss5{6!|2f zFk)hHF&ff}R63Nt4aTQIsdGe|Hmnqnh4O?StdZxafmVPuLTkj9s@U84cXo8PdYSr5{pnDK5DwLciYOza~CeE z>$vjysc92gR_-+;xA=$=7=nIEjD?$h zF|oO^F%4p%D6Nm#ha06`3^^BLvgm#`?knqK5aKd~V3Z;}cbF7xL+Od5s?AY7&ruy< ztR7lDwmQIAp=xzH0z_;o5*S@jDhv`E;Tfj&N+WQ(p(x&5vq1fdg?@T(SknD}uGCoH zvwfApGv8yO%i8Dcnl>4A^A9u-N@Y6tXDMr@znxXC?uYYs%b5uS%XW)d2{_j5-C$7f z6v-bDe9o*s@P6x^vwTr|83`UGYhFv)<^apURiB&7rHhVrIteO%TGv@WV4WY5miGA_ z_XFhG`StaKo9C9_V?0u_@rn{lFK78><9SlT4JFM;#xs^)5sxy-qL+HqqH7j<3-De8 zzGyDMyAAlt_x{~8E5E3ePHnR&kp0-*q1Mzmt9rQJoOW`M>O>F zD zp3jXf*??{ymn^`OIYy-$oh1%8yUTD(hjbToCc)m*mdVJo$D zR`U9U)jg%ITXoh;{L<$vZ)U4|ZL2-2aOmCxao-P0?p@d)rJ>K}Sw4m~+d@J66d5ihKGi-8kJ`a4P!M*w~tlr zW7VJ;1@FEq9iBgd4f8HD??o?&ORNRae3>D$!xw824H@aCptZ>{$!%a#1DFRNG>Ekr z>>3apuSayXf8mQ^N_2%!$_;H>FTyuCqEiL+Vn20SCM!OBwm52N|G>UQM&{3lgc3m` zqvU%YwSf&4Fsrd}mK3%BA(O*29D>L8NGF310!tF5Ja7jZQbbU%OS*|M;X9g4DSx>B zNCAOaiPr6W+PB~vgFJ6M=^Fa3~}-&NF}9*jv@-+KMZVuHtx3sk@RY3CF5y8qa*fvo%= z*j@L6Okn#_Xw)%64@J`YO{1fTK26Q!XyOp3-Qf+;GHg^JgKJKdLDd40pbXETWT8Hd z+RJDo%(Z4zv{Mlcr+CI2-=YT^R|R5=-5(rFJ9Yo3<7vkph-Kz&-8wIA_O|V_(!?q^ zS;fGSBLmgzKa{$qUh^M4CXmJapf(A5np z)ukE=#ao*l4UY*#8?FspC&x7govZ1{j4~~)`huEyj+ziad5}RlRSNqflUTqKYQ*N$HauC==4hDbXc+5gmgi_jvVqiY z-fc(ojm1-qdI0I(E^m@vB~2Y<{LqdA(S1Oas#5}k7)KsxD)JAxm)RA#&Z!5Rc<Qd)py#JzoVn|$ zWpgyib2N!{w9Ip~ds#PIdw>E@9eQ^ToY+XyJ~Xto1dsZ zpSjh$fB)V$KVkl7ZYAwlr2e#Y*XDUoU(8;&HJg=Lx{D3z(ynsEmeRw=( zEIEEiNzgL)-g-QN_qMz+SFvjL2JD_F7u^5vy|`q}#N?sAiY76Y$J{YPN9OYM!w7x> zq5tI{)nMqt;{@NR_;Ads0H^y89^d>3@u{fh!aMz+Jiotn>^O2HS(n`Z3t?b>l;3^UN&r@xc?n{BZIubusHvf{kJk ztBKvKmMrW`t*ut;KSODnu6Ac6fa1iN zX~*u4zA{U>H+s|LHAnWZ>ZC4Wy?j~B!$>rpxv?{IS%M1Q!E6j)>Yk1iWea>Cu@GrstvP~0_(0h-j8nkZ0U&@#WCWbJ$W*PB zpbj1~wb#;^_H(k5D-QXil}Et1VWqc(cZumDt?)qazMVcVy(-StxgGCK)!!>|=b=FCAtoUP>%U#QKAUT)S~Y#%biwm^n0lHW zwd}*UmV#yyFEkd~3B80NLY4p->F5;e=ojmFLo@Jnzow}ld^4V4-OQHrmVpdm)O6wo zeUNm3+qc zYhI@aE@GLYtgX83#n__ie%3=;BeI4~(K8mZ;Y2eQUCDPzqcc)J-zAMt*Da>*L0EHB zG1gJtp!yccOl{Icc$eK4(zs$cYsz+O}N;HSyL-k|JKx+W< zI=@9P>=*W3W^Y`chK2gX8hs=fyE(RYjva+?j1MXYUfXHnN zeRp~WkP^uIPy=I3!5Y{UhDa0^3x_nEOacDZAS;@)MDYd}w1I?}1XeAR{dSjmPRnXD zcfgbs8Hprd`@wR1suxJh<@1i0r0oYM$=$-tzsTb5!Karld2hMh`a;mcJjc zcZ7n0o}|4(!_mExCWDjft-3Jcr=chE>NcR_Wut~}N-=&zc9PNY0V~j8Bw<4~Z7007 zTU-!WNeLK$kfYH1N$14Af-o;doH)Ggu-@ao8<&>qG4{JtH?^6Ll{UQFuf_1UQ#y(z z)Thg4Hkn?3jq@wZ*DLp^U#l-oJ7mIR5c3mFhWf*u@yTmc-4v;~NjF_vBWTvi!S*zBt2R)ZCOb4|kv{tE(^V``k zm~G(uL&N4QlEf<~=Gw(INDo@~^hWZjaelo*_srgL*!c`g1m6l$*ILoQ1AOai@GYy@ z0Udei`CxUeM+n{$E&QrIZ@~mcJpYk8*WyF(7vZC!_sf=oU-A5AWRuk7@BdwUUV_L6 zH1-42uHyavdi98#a}pcnZZrMx+=xN1==@D7?#=tZxJo6p@<^>-(&%hbm^O?PY7Te1 zRIBbKq74P$LI3ei6o@*?%hh)5*EMRZUdanHGZ!ZJXA=;TXy~+w4)rc8<7|&dLs%*G zM{yageG}H+0DJNmGBxrr(LDDa=!dB*_{ecN*+$;9-<}3bx*Qyx!&%f5s`!W4%{o4iFKb)6|Mm|v2S`h03?dUYpnbXb|&epvfmoLf*x_DGpyMcJiY>$_ zwUbs&)P?AvHW0Rs8Yf;DZo{NVoFsrRol4Z=! z35>?np&*ls9!T;kp+R^r-`Zjn8(Xz85>gYB+=y|FmEf|&V-_I(y4CC8{y$$m@B2n# zBwiOIw0h~Qg^Ysx zmX+``R1gvn!>ss5ZGvV+>QZT7pgOk+qqwgAfQtnkv31Qu5#=!Y2DD&;&()ZEX%31Y zfNSci0vf1N$ciYiTeI*XjkSlyh^8tu_^?)7kmji%70cC>7OGc(ufJ#wL>768AUNH5 zt8R|~%gQfyEZCE){wn|p=YGnX_xwUt5(dqjd`7T z^ym|4q2ti?W)if}Xz;;>id-|EAONj7#sxzY-IW0h%jj6~IvAF5!H{hILO78AWl*UJ zhS7#s0YgJeee#D_gDlcOr}fe!pES*zAizayEiB2sk790~eP0XiAW~lgAm^j%%r_DFR;8IiIgr9+p*A_)zbFll|qORUJ?FSfS2q~-^; zeXwxIsiy_P<$jaCdaE@1V(QttlNsx??9&6Qh|4B`o1WoZ7U?9UiAX0nm#wC*<+-!b z@E7ps#VZDB~n}f?7-ngjYLf8g~ce414&1A z6Id*j^za@lEJq#17G$!W>N~vm&BzxYT3RJDmi!_Vyj7I`;$#9Thp4`Wg)Yqq~=Rbdn*S}%wvBu@FNzP*Jv_=oZ8cmhBrnG;ZniN|gB^G6( zS7ETJ6A40mh=5V=@NoCK;<3oW^-5>$$;m079k#QK_fk@Oqi!&&=#H%(r#@%?>ire- zWfrC0E*6dgLS%mehKm40ig;QK5l?OwzmiGniQedz!4sb>rcQa)s*}=74ynQhg4;4a zKlb;}QU*E)oKYZSHB<}YT$x(8XbHFI)C{UZPZ#iM zX`lMDdTjZ+K?4WRf3{R*W%HZ;bLZ}s=+#btsp0LDM}7!^tETLGHf34LS}~2ejUhhYi{xc3^ajq(hS zHD%I4RS(zqYB2GYQrf$bQ)@^KAs$NXl5aMMn$&=4(tuA2D`Bg!_rvj`_|cqOnW>|b zriL(K?3^RH~^x43SbY3k#dhyBIbmiPT6e@8VQBQ1NtOyldWuiS3!p0ryU|CUT%lC6?xTdU76PGsp*!s$s93 z27qz!mB}XaIh@62X6_RsmN;)R8>=|-;(WI1&`-Bj=xEN7Vzv`q^{=bHt$98x>Cxx6 z9}_p(q&}5?_aa>BQe*>DJqD@UPDiVs| z&#f(ZW6WwYuTAkV8rFPwVzn!G2W%Z4K69BQUOs;$J$(k-wOoA1|A$ipKOE!VyUebs zTXHn>i`@n%Ee~HJI*5XvZW70Y53yyz6+B-V-oo~rlRnX&mm0xK$b=(m8~HxoAAr8O z-k{u9a#EAq%q^z~KHR@ZHE7I_vjkT+)!cpTIF$+VeyXu(s+!vl*|*5Wfs$)f2%zK| z5=4phk__ikuKSozUUS)XIL*%dG|Bh-Ha|^n z*TLu8fIt6%`mx6be)`q5Q?|UvpWgwjAP-Tr@YIDBt8x0~3GIUvPi;aw`V#3GN1Qly zS>Vq7E2_=kED91a?H4^cc~T$_*^oa(inO@^%{7)i61UJ@Y}&AP44Tk}wC~oZvYUss z*Vl%$TTZ(Nvzx}XKYvdj)6RsE&}V6OTL^rr7DHES6BBYLyyDCZMmh8Yvg8$u$n#q< zKG3FBoS9ogIgrg~*pw1tAtgl9#%S~kbwz%;U8gkE87&PG)n<2pDiAJBxtf-FZ1K8- zyOjy(p4-p)xAeo+6SoH)pE5sp`}~~UEQ#=Wq&k-OQVK!0D}5)f?mv7pK6;!y$EDCOi?n zosT{5eKz5vq?8kr=N{U=fOYmtA?z&)-ac;8()oJ|I1bp@j{#i}e?kquH!FN`}r-)bUx^`#rC@@+S+CZD}#rwSxv4l?O z$Fpc)!%#n6VpJxr#_cW>7$4?^IkV7kjb2(H_DgL=QGFDF7E$FAz@s8OY=J@2(oD7V zcWOQpZn2^2ift#+$@wjD;PdwA-+hNUON*I1)bI9*Sle6hhZVuvsP?m$tICbC!}Z!v zPFOmXx#gi%>+(f{$Q1g9imli*LW2x3AR#28wnE~eX!n&+D+V(S(C;e}!)0IJfxbN|{M=<*NR}^L_C0#S=Y>o;p7<0LPfD012;J|1rihkd10vIhE6c93?|*tiEa9t|HubfoIoze}Bk zD$9{SJ)b@G221WTdzUOV8y{3nlAj)1y4qvu3~|#Cwqyw_F<3lxIO~IoP1N5z4zP=V ze7gI7x1*E(Q7_9|x3pQXlDJUSf*)k+bW|Q(D2C_wX!{H!cZx1mbz9Mt z1-ilbU}N}?oFrunb0n1L07{5P4yS&UF68Aqij0h*ty2zj4BxvI$oy%OR#Rq6a_fwc zilTgV@03gr^>gu?aWj@qRiAh;zhhUA<%nOlt}h-~zkWl9_3bV^mPci^Ju*;A>egqR zI6&O8@cow64`P3b>gUo8ME=VOiSDYvQkwo}I!f;P;71wt-ydZe#5|4Do%tEs&}q(n zhJj!vapc8_sg)2(ZjI>3bGbQ3a*nD$vd|y@nUS8ACCOu2Hf`BMuHAI+YH?Arw2YNn zm&`7l8GE2o*mm_=MgMtScQmYNpV>4gefDbXXoPxIT8|S8#g0P5!U0!-0^)9sO+U1f zIKh&9N1nWCFf)fhA;=9zzN_i2qa=jBX-6?}V2n}W35ew)I*M$L!rcHF!_SI;&}cRF z12jNB{l&@SDXkl~=x%8d&QvG6lI-#EXCBGxmwnK<`i0^1dhMuNbNs}}i32YZ-Zd_G zWNiofiomo+floNl@QzzbI(?b^!20+X8Xjj5>f~uR`c^|tM!!boG@xJI{pscUU&1PE69EI<{!3;^1D3w##CZ^iq{X`SB-fH;HI1y;VJ_ zem7Ib9D`BJhZPqOzSuE&27CDB?jKp=`4psn-#kEZyEtTx`r?$rXn%&bm~kdo<6+oI_s?1@2KkGT;OIRxWGWF`>m3XDb>AiNvZ-Ud7SQN z{QFBW$e z&&(8mM*`sH2~))fSGWDFpPwfi#~M46V2E@U;f!L!GRVeKILp$)az)_z`x>sAa`xTX z7(|F%fzxH8n-Hy9;iwgshc(NJZMCyfHa{-8S=MYTW%FD2;rZNHYXiD@Y%Rc(CS(A8 z8f+}ZPp|CM_sNqWjVBKdD|K{!hzg>nX)L8P&RlBhI`c)VJ=h~P00v3`d*lq5Fa^iG z{yW!k%O4!U&Syc}UIY((82VuuvDm~qb*+Zvozbff)y9zLj3GaoptDc|a?H$vuOi3_ zm#&uCr^z`|X=L%>#sXpfd?swXG`akYO(&l`Il1KnS&APY=FepHS-=JsvX6P}V<8*f z>fcoT{wMXW`n{-ZT6Jk@cC$;5vFfn}kEGM^#)JyZl@cxvNvC2m`1Kg85xk1*&5Jfs zeq-`RMAb^D34M#ytq49|37#;%b}i97P<;EDC3s9}CZFDOS$#J0;`GDsMD$&-BDZ?= z{_Ul&$B$a}&dMFE+Ox9DpU-X3_l>_Vl$lV+^IPwSymgiCdbju9iA1@a`PlMU^)WUDL^K|otNvwX8B zOqId;B_>Z7f6jBH_EgWSyE)3#?^lFyVEsI+6|R zHE9``@Yt$BOulho>1I#q%cLadv?(*igL_t-XRGGbyY`37SGf>gxO>3Xib94G#Wh%s zPE8C+w+&6YjE4``OQVChVHgcJRWuk*UtDtAMfZ&c5gIg8#WyCJI96%x-B582)scWa zcfRp{ypdrLp4uqOay%JIpM^_G0fynSB9AT|`Z;52!5R$VxVUn~l$9)$v4u}&W&E)7 zlUsS8o;Y^(TQ+mSw%qv>KYI7hM>Cb^iu;Bi|1$Z5u~WCNo8;s5Q#RW)_5)#*e|IzWR_l!x$Tc{(&Ue()lk{7Z10qO+@IC*C?R+Xeu}3TB`V{Gcmc4C#8ST?9xbR@>(`b&q_j33CE~`- znlz%vGqgd8*;(MGi^(1*sAYilinQZ@v+^LTqkZO^oB+OvK;qD6Nptxt!6xTP%8u#6fZXN+{v z6m^a#i#c2lSKHgw*6g62IixT0W2G+!vvKMi`t{Dp)2ohO_tWzdxQeF#y6Fc^ZhZdh zdX}DB2Ga8NfU{A-Cau+|#v$N`fpNZ6FHdz{Jno~f*R^H}v&q%fX6cxwxP$)E*-^C_ z{`#>ENz(8X+CWPFBHF_uv9xHBHY2wpNRf)sq-cSsr#ZYUb^9vh+wN2{1(}7ibY2M* zzbLs=$7C_NQ|Dyyw{`>jw=LhHUt$Mf!M1{b#7Dv!aEua2&_aed+sCm3& T&&O!b z)9X?Ku!IRc#W7MW*VIHu_l+Y5I`2u=YijJvVIVQT2I5nbb)d>8-je;5O zkF3eOt{_^di5%_rLTBMaMeOnp_TvzFsx__^c&G7rer?IGoq&rScp5iBpI3Y9$H(3J zq=-oEuaS9_VT=}B6$`Vp$$eDa@DK?IZ{#_u zQ{k5*(w5WQ@DduFv@LuH4Ag+mYpE!f4L#gdgr%i$yH3O0E6<|I4_CO`{F+}33zFX6 zn9(^sEHblG#-{a|ohL+w**j%yb{5_E&&Ktc8C#^zN!{K_N_nSCGP{w~txHn!JDpQr z)J*BxC23ISPJg4xF5g+S4kc zm255WVxSueG~-#^1Li+iy}@DziZ#(WzzG1?0H~2D zE)NI|hz*c>=aev1bim*UGsNjolUObfS>g^}eH0F@64?OoT)~izL6SHBa~Cd9x&|t- zP=F-&?RluVgHnJYk6kOHtQ&us-bOyL)>K8dS7T z>yE>_@91&u(_gOS%v_SQ`dW`2&{iVBBoLYBJw77Aqbdj|lrmZ^RvA8`7gta)u2KA| z`!0F=1|3``Wl+x?;8KU<2RTe^MYt&n@#dfBKijUj`e-zRt*Rz}}y(-V|%0UC21hFFM@Wo}0V% zp7fP77E{*~eY59^mk(c-orf%rlLuIU`bZtSXZ@xkm4FO^B$w zNfb|QS|w#10`a)E1*6*FwxbxOu_qab+MDYWi1Pt=p<4vvgPb-XIWb)f_kp6gk3J=M z4{#Ol!Afx-=#Bf){C;_UAKfMJZB_D=YT7#Rphlh&uRXvw)yh+9^9Q7Bq3Q;BY{eh! zgK+%0*nxzuCVn`Zgm4T0i;0vHB7=gW=)&S+Vyx00R&C(Kp=*?Ccm5fZF+#mHa{RbI zzN@x(#n6e#EN+lpt+qQ>{af6=tBU$u{p_zQtIvf@35gW91WztiL7g+gHzr0p&3;z> zlW!(D?}?@9FX?77J;Q#^|KcY7Q|nHdbo*gi`a{XP;|=PryBcRW5tdO=@RHGrr&db> z80fVbTmtAgZb}W%%}35LxIPGukz@48S$w%PLwy|1%4C{rso2FKvrreOCDQO_xr{Va zDg%zAa!zr+W1EyFjhe2mO=Bz4SfAOWr8KtPA=f#5Ufs>QF3o27m8m@1_C2sS0Az%N1A1xdk2&}k_t$Vh<$MIshQf7B4qHQTG0ykqlZc^ZSv2zq>As~0E5<5^z8m?=y&hX2 zI7vcSXwFy&uNtc?oKZrmR3)lJTTQTOT!G)XR&voGKTRDrdd(0FUSsZ};vLQ7HmDHs z09nLhpfgffc;^+^oE2>q!zxy*2v`^`5C~mEKNwJvF%U#VRUE>An5~OMB~GGvhBwe6 zz~Mb4RBR}P2D(6ZaKQ|{aJch;yz!tlk8oBq2B51I6^`%;qkAkV<1GHM-}c?BabmNl z$HtEas7u*cZ)ddlttUd4`+8kgv&3BITWsoARKO%PP1>FG*YuG|6{}W>NqWk5R;|3d z{KMI@(>YfrEeZMa(g6iWDUv8WD}&S_$FO67}l%5+}pg7khkx{k*{WoNb<$ zTf%uH9DT!-HP<7g4we=J9E10L;yhSXOzFn5GY4lZwhX3SRWA5TqN#jk(J@Lk%rED; z&^kuw{&~gEm%Qu_zgX&I6BK(;2(kr*1wmCZlr2*;4o$m4kr&u5M7}rA>NG~@K|)2) z4uYO1vkz!c3|>6bFfOI6g^X;RI?Zg1L{2i6v zzJ`ApPw?;JBPLl!eyU#D>Mic*HLlmDUSoT)@#@h*V+U;-GLHPX4BZp3m%=RRICu=I3sC6If~cZ!r#IGMQ-?YKW2h(ytw==&V)1=fuj7YnA>B>J#vIFz;B`Hw->gnZ+Fa-t$ zUI4!aU1L5RRcwU{S?Iz=Q7?Ch-DI!pJ^>|A%Z_-d@K2_N5*_1;1M-2}sJKt4PpnS^ zACR!oK=;F|ym1l2G3L@bGSgkus1*$&$1Wf|$~7+_T)d$!9RC^qvEP1VQvOfsOg2JY z$j0qGadeBC!8)OiwA0|FgT%ide)`$>Y^ig{>SYcYybGDymTBDjjRMUj053~v8SkZR z4k6EhD|i>KwZOa34g_r}=rG(y4Lyj2Y8FY>u9wXrfqTR%MZLIb3(vf*&@d&7Hn8v^ z!tp%h3GENML1`4(BLBVcWe2RpO5phPA> zM)%Js)}mZGqi)@(Msbc~B4+Sn&@XY36RklCNEGdK0^=m)^>d5e zS3ItKG%jn(XmQq<C%n zVQa$PO;H~L19G5dZn2zzb!$V`Y>co)iPAdgK=#5ni!0CuKf)UA8T z!x~-`9y|~O6bx!XJMK8^BIho#i*s%cE2r#TveYqbR7&EQQJ;-euTFNhwNDlgjAJ?d z#U3k<9$BBV{OFN2@~Fv^#tj}Zb^7QRD_!G-QU3y7FjQJD*9I>rQdk9O+OCNrcnTl@ zp|AjM93zKO@FH=pNPMO*AgXvWE#l8P0)a*Mno(jtICN1isHt|2L9M0j`1v7uoR^}Z zTY~iaikd50_1v(bM@yBhZ?&Gyv?Rbqw_WWnmQ*{kz0UjWs#=*{ZHBckg^somktym0 zmZXSUFEEkBg(g!?-bOboE2TBfC>5JyD?A4%@i~5l50wCSXN|zZXOVG1PA3}pOYT4T z|H5f-PXfaLpK#x3+3(QN_x_l<@SkbZ?@p7PLfWd+yVZru$BU&$_ZhX8vBlXaqL}-1 zVSV+3;p3%>`3lKF8I^3!Lmj`l&0VZaRzRe2){tATE%iwQCLYfZ980k0{Yde z!=9Q{@TW&hORf+jh6=UCCG;7#)%BSJg`d$A0QhI@_)M&27d{g%6hRp!eP+7rGiwSz zqbGpz&kV)d?pSu?Gm%0SaXJ4?nCmm`3O`f0Hk`{4#5rqNp5jz0qAOQ`mi+_`mikt? zIEWtIQiP%uVIy-4bYf&8Q1=a)*-(Iuo&A73P~1d>;*krIfWr{{z&l7CrdpR;K{^u! z8$r5u!%?T2`QF;!VMd#vD(UO;nLo??9wQu|jk~sZ(9V; z(^~DQj%)Uz`sEf@_UDod?`0<6JQ$q&kX8J%YU(^}UD#VtR9+04vBKJ?&kw zvL{V8s+9I5a%K3+>0$@59c5E_7D(;mY zASNUs0}%d2V|+*BK+=Q!kEABz-eg)|Lud;WPw<9a&bP|*)pT~~R_d6AWTWCOaox^4 z)~KUv?dEu=;rKZAqkjpuGE?>G_jKjsC+*wI$50P4iRN~t!DIcH=lxekR4P-k)^9+# z0pU@OEBNt7x+cL41`f?0nqP@n#ma-tN;j_6+wg>oMv;@<0;V!>0_gM?{8qnDPj~3@`4Jv<3w67vimt)4!U&=2^#1f3NyX z{WJag^fiBX>hyfgv}<{BXF}wTPqvX>31y?`+{OW?g7R9E2Hs<~p75&cKbe{D|zcljJiJ*!hAKiX&9`8$MHG|R+j24KN35d?PR>#_mQK#1 zRmP#TAF4e3E-=8L!~=tki48z;!#WVzA$1?`gntF8Ww0|tV@Xj9r=k^8H#-NXQ2yM)Z+&xDC>8i8V#3M-}S z@*#N2aTtE)L@}T&j1@$PafLL5(Pk{%0GNuC;3P7XViF(^{h$!|1;IECha2d+^YMxn zldmw_KD80cU)y5(ef3)hyUI4He^dl1og4V1iwqg|`-0@~Ta(qybZE~0__13H zu+w!E4laH1+*PSRFxih>An4?*tKXa(>yp#~AGxU*k`tQIhy+Aju~Fz-@EN&@+?V^z z!vUOoxw#+inz6ut{KUVVPO+)oE`61ly65~nsm6Ego+FNdr9 zU=e`Gb-vD_99D)i=8&o&Y*hRWk+auBh#N*aP+pI(dj1{6q^#~ev@0yIDsH(2Iu-V2;DU{OBO zeSrJ0IZ}Z^<&nMUZ^-aAk*=xon;*Ommq=_lQb!gODBM&L^IwH~$gN<=2PCl(0(S#R z4r5>kt|A1$xOx)ca?ria^ zMAoWAsfi7u1m6AH!SAB4#u>dLAO&w)@WZ8=-OetVz0yO9*LMwFUdj~c3eS}2F_#x=? zGfo(f%$zC03}KG2fW1&`*$W9kHZCo31rp&}jbH2XYkhug&abWcwLQOf<=3A4+Mizs z@M{XczRRy8_%)4RGx#-&U#IcwEPkB_Eux-=GtF`S&B?zov6WOw$rz7I+7w)}>fqvC zuQ{z0fdh%0aSph8i&4)gO^s4gd7_RZHCEY|%0?dSo0^)Ml`8da=yfUeR_f!_f>gP; zQh---i>HRBilZ8O(d*f%>r>@XIqeIZHYx$_Q~A%pPg(dZ^wFFs!$VLT0?-w(2%D>u za81Lj^=vtHeaP(*{#D2y-5vF8j-h!DfCcaYFzebJ!}1)1@*D&6DCwn+&Cw{&(cG5P z!tjy4c@6*;5C*`T+Z_G!9KG@!-SQl5@<1Anx2gS5P6xv;qQ}5E@4{b9vpE3a@ixcw zJV%Dj0XR>zIcDcMvTTldd5$SI$9&=wS_TbA-M}C}UoZu#Nx&r%9q_rF)tru)WUcw4 z;{3T=(icsRC`|#31r=V1xT8fZsD%ZOM^>ook6Wa!`+oE)+F&u=*^66L%V`mM z+)A}34@{mUP8^gpNo?|A;;7CwYUZigts6CL-MVqZR??APgFAM6xA&k5wVs_GdD>F` zZ0bMnviQ@MpINQc{9Y4N5-+eoXHOR3?1g^a-i7x+>^-hd-QM}XkL3TX5kL$4V^^QVui~`e8j(>(mL3nb~^M^0`@YH5=CKTenfKw|ee$z9oCF zqu#2h(GyooVc+L#u!C(_o#Cw-wC&z%*n-UDr11$~ zV2u+Ys`I2kel!5iUQ_#4Def(jqLM2rEvw;(*g z5HRuLoF&tyE)y5en7Mdx_im0J-S)BO@9%hjp-AEvFNvjW^b0~J_y{vv0#me3OUA>*OF=FNH3F}sf#Ts?|X1C+f zKXfU`{cHKF3 z+V)LzXKfB&F=pK2#rCmFB}@yAieAxwnZvPc;PR-*IXxHc+3ReRJY~|L!I=}pxxF*T z4d^!xHaq3`O55c5gx?CYQYpD+iE}j9VWFcr+%U%Yxgxmm8}>Mb8>DT{s}scX&a%<~ zxy6gaa&!t|QwE^FOBpWww+F*3qxbu($w`Wc3a!+e(Vg z|63f>36SmrNG}2HmKWA2AqFdn+W|VH-PO&kIHx#7lO9q7jBJLl2Bj7@!%(O(hEkd? zhw0J^7g#(-4}s$2`VteJDglfd#VVm8Jf{>j(Gi7*L;Hg4EEFwShvP5P8D0%?wB%me80J3z{SWqjwxz$iUfkM)?Hjhb`QWrU#i6j+}vA-zZ)wt{eXFz{m; z;6WKhhu~za(aO zezLwKJ(!qdNK!d177`|2-2^GX1!lmIdY_Pmm?Z6El2lZ1ilkfn9ApTl5t(ou$<7Y`6G5@xF@q zeqVgQkI?BA-_LaxeRKNv7j7F03^0R!O!3feB!M#sZH6I0&%L`OP_T#)_Rd1MR`+CM zxXqdcY?shHJuF?zyvNN5AkECIZO28 zaUy)bEOoM4eXLr2%9q``N{3&J=HEW6;5Q_!%tzjQJw-Nj7c)s%6FBq(GCIhxSJzw` zBsOvFBf(b|e*5gguhO`E%a`qC^8EZ?4}N(mS1OhN>zp+!=STtAStVfkpZIMnG%SAH z+=Z({TE}t+oUsN(N7xO&#>1J=9{yC{NmIK@=jG@ZUvu4o39UihS3up-==NaaoyxQ? zH%TP-OkAlnX=;!)eC93GE@MChG;Z*6DdGIRltW{1=U_24qje{YyZ^tzRin3Ok$Oy1 zSMKce^GC;?*muvLyKNEcw5QweC#QXtVgG2;f?X`U`>>w9GCI%P`N^u0vqtpjH29r9 zQ@hRE)@A+hw3#EpwZOXMKtQV)bkZS;g?qlCT)A~3elGt49-*nOR46u?cbT!p<~Fdk zvqAhC;HvoWVcZVHA`P0i4r;Q1FsTb!d+yYzk?OiMw)WiJd81j)NzQaucgmWL3)SnC zAR*8HaLedjVQPrCSZBtNu`^Jeh8SHzoLmp*QUW$;OU0Ad(Nio!pC@IK&K%>&a)F#R zEC=s@No~Wbc$;?~dq)nc5M(vE59E3iMts+yplPd{>hG-B&97NekF;}pcAoom_r5b? zk@x>-w7=Cw4DooD`X{TP{_wcOXZP=4PW|lRgD zwDjQg(NAGqAJu-?#K|~Q(Sf6LGXBGEjHAWsDf|lKM4@Pd>e1dn6M0@pU5@ni17r0g z$(O)Ru~X~QeAG93sC9hOu%wdG{THLzO))fImi}};P$RJ}zCvj%?paXn!hiFXBx@u} zq%Kgj3ELJ2I-}B(lJxHZCCx|HluMev9=2{KYTA{+r%R(VI&Y!np*`~EH^59Q0os=Y z2R8WOuC%mm?33lr0vHL( zUQLCbJ#LMN$I?IBe&mvU;pCCC0mF~io!Y-CJ#FqN{0g$^SOxrwB$Uy<-gJ&S0&%BF z9IIfr`Gbf?`Ao1wvQ`A0h6!nkkD+Ubi9U4{REqm>1*b@H*cLC*Dmb!%R+TXR zPDdCzPlRQKq4PxXFqkb3!rI}!nK?#zSYho^0tpzSK8@X_lO|9#tz0qY?U_gKst=gw z_op9~ObgsQZ}*wYN(#y|4zUw+YeqL>(SJ{T!oogye?sP+^Ha|JQoRZ*ljwpi_W^hb zjsF8KD_)w!@iH?*?8uO8rOOx~dN}%998Z&qIERXj!~o|bd47u81+Z*g;3r+NjE2TA zTJft)juP!hO)}k`4oSEk0cSpM=Ol%40dMn)z^CB3F{OK$m3SqZ_iv(14@0sJmZuAN zs)B~w#a;QYg=4h7vuF}PSnQJWSKm+CbD>boU*4N^>_?NBpS0Ecs~fgca|%WMfEeG+AG2FC%Nh`SunRSg);-3O5rlL|r+;Bz%e^6DvmkiWvaeoGJO z*iqT@`0hRX?GMt%d&q2^7})d=bh!1)o87c)#)Y5Ge8fWD-2`u}EGA`qO`S@(cE67K(A0dYM&KFWa>nbSK#}g_|*>y0VV%6coJEsjIj^ z$NY)dfdvy%7)n8PTzn-nCw0o8Wa+3bO>{0!6Yp`Ybv|UeuH#Fv!0IU$-mif6Ai8Aw zE7=`-ACh%$PIU{%j;ydX!sSXYeqoo!hLphkgD_1h&izhENAD|9M-8d3gkCq<(t7tz@7z!+RsvtTBr$Wto0oJs zn5t$M9F8OCKj|UTFy{j?_{wXPk$~jX*Qq1GI~~HF$RV=o{UCsMuY{~w3RpRq5$GrT zjSRU}Qvg_YX|noWsgm-17;7JN?M9Gofy-OK49X7j5e6xiBHU?*a|C2@)qP{HlsrZO zgLFb3q(FC}Q0xE4-FwGZRcwF5d-iE5q<3l{kRXUr1Ja9tpaLSG(g`SpB1jDhMT%0S zL_h?TlF$N(0^tZqk)k39qM+DCt^$e*RuB<$_IbW*&7OVs3Gv?hd*0`H{&?^G2<&~< zS!ZU=%<5}RCfWxDontujZOJJRMxdJm|537*qhsmc4p%4NGJR+#`d5y4J4Y;mPgQDU zbuc=Q)(VoJm|{6Xv0OPwjKr25V~Ib=k9=Qto@1f8kD(2K0{ihB6|_!XcLX_It0QpuBuEDnEh2Y4^@!+OR0ZeEt-3yqLe*R{wp!FRYX)`wZzQC zmWiDcvlHEY?I(^>3c*s5Ob73?%~+(Ol*-i62lfrdpD3?U0N+X1qH98q&^6zvJ3te? zBoqqjKmH`DPWx?n-WB5qQTgXQ@%%@J4t!Gj=tl<+eC({{EsHQ__{aHnm#U$`DkjZAr2_*AAru5(!on2D`Y zvjm(U!$nf2txF@4ohW~aiWY03)Xa3X!k>@S`36~tnIQTMsYVxO>y+cDzq53feq`G# zN9U|C`u#S1>V$6=eI=sCkNv~Q9@p@v!&wKu|L#cjx2`oEH>jQOP_1QGW*MLTUg=e% zv?KR`t$`}+aL_*qUBf-LytE(=VVoDrMPpF|2B z3-0Jn5upaDxRfFlYl9TSnT{uo@r4^tee=QFqS{L1@Wy;NNd_Ck+rEBy+rB}hck>~=byU=9?q9G*TpY zNTU`|?!2G_qGrrH&+_~sh0az0NwHPi5kYw&lL3jhHb;6n-eMG*LEY7L4?>rB-h~Ja zxdUoZ?J)LfhWnRKUN>495wS6g)@JH8pXeJAk8ZFn-}>BFOSFFG#f39P|J4U{XSz|< zIA|DR_ytj2uWa1@`XffvtnnjeI;8i)ZD6Lc@N)0;3hCvBE)-XuP7yS1#R_=Z>h!^; z=KW}P4)>l!6hs9}qHt4>**RRk!n%u0YAoIopLm_JM>A>_5Y@8Lij*UJ@-;S8nT<^r zmUNPUJY;YQMIczjEAogSL?_>%ygWy&B#b^ruLMG@0eMV&?6{%{dmLTyph*Jt_qBCV z4?3et`{e0cO%cjJqpGOi&WnOhWZ;g4G_gUQ$A6u41*oR`70_Zfr>tH&OF0Fbr2XO@z^Cqj+rs;Ye;T!@-XdgD0ebI5?ZHrFCvaPX@ z+rN5@W3q3FT3-E}@hR1*rhf}gIG#KKCa{f0yX2ZFn)(VqyF@m zIQ{*Z`K9CX#pimlQ8_v(=kBedzxeF$&y5a9=-Dl{Iu03AjcMSw;6g6)EGB40USZpZ zLG~=WZIuuVI-tB=p^XQ&gnCfh)f*kkrxhKMvZF~)kIs$;|E(6C3AgW#1<^%#3Yt9C z&Z#IL#bD#is2=h;@qehj(0$F$9B>Oc-hMLwNn`kO@s{z#5;1I2{$%m^Vq=7Odxf6j zyU71_)?4_l=n?$iQ6Kl-d;q?pV~{c#+H7wcS3^pk8>A0g8Rg)s_Mo0SjNGr@bpERZ zZT+~TU$ykibP_Mz*l)*z^djo-mlg=qd85

1!1exG)EK6u~BkmExnKuQI2?mS0J2 za8eyD)-9Q_IsdBnz4POS{BrHRiO0v~>)!e6H?A*Su|PC+^caHZ>q76Qr32=rwfK1D z_AbkY4Vymc@$o}N4py6dp`EbWp;||;X1h}}$TJxf$O&3tY$jYIs7+R=w*Qtqa7SN5<*9N}EpY8#chxqKjiIjb=Hx z+Chva9KBCN|64i0iwkxTbh_E~11*?uol@L?N$7~uF2wzFh(DR^UuO4gXBxEkM%ri) zv{8cCkun;+5waD2M5f(dWpk0UPVmE=cY^K*j_vt{8zBbs^hFzcmNd&3Hy{JW1IEr3 zbL&*n5g6oCi;Yc?L(nL2U7~N1O)%ia!vlbYMwZdFAmW0fdrIN zkg4yE#qURHJ-y+kXZt@&VOHtYM)~KZKnMxaGo<(JSZJ?<(l&l z(MpLRO@LDCtC#n7x-Zwx3Cmq|-x zbLOtb{aRJ!x#TQu{|f zRiDHaui3cY5W@fFrlPg6d9lA7`$9OKaRCOHa?8WwC_%@2xoUGPF9f4n=M+ zZH@g43-shlE5@&W2@%Co!|Awt%m{H9Rj&e^NpoOI7Dl2HC}mAF}Mpw~(cey_SFNnts9e{53t%_oJSeqsI(39-&dxGPX0H zOwo3DLpUGVDkKluH9=>H8(AqnsT3UEPl}-YU}Y{OMRTH85;j*7ZLXy3BvNOSGLz^) zB^BYxzM;IDhui)k_tcgVN-GN0HY$nN1f6}j4+PX&)^O#YHa*vwDe@nGqJuHK99>s` z$cQmIiq7S!yP(NxNbb5=nP_ObV79E=z!IXx2|89mKK63S;EfFDnMY#gku>wj>`0n< zWM(AIJhBK+WoAxp*OK6wV>xZpWCcxkiM_)|wlvaBB~9;Y#HkrcJ&oo89zGlMh}Uwx zAuMgA3=R^zTwbZxGHCf&OMouob-CFX#6?HhE=;1DE7JuwP~^hRT=3dqtTz1(>7P8~ zDDs=4%ma5?b0j(q&Qs<9BI)6r=Hj*IS*@eKAL%* zpBtnhXXIEaGWgO^^bu<7EQWvdPV`Eum{p&awqkV29VTDmBvwUwrEU;O;l9?LoYxAK z1x?bzPhsN)3Hvg>mzq@AxV?73a>BQ^X9garUrvTQjjjSih9kGQh^G5pYw|PQRA^p{ zOMMDt`y98&)%N7-X~k&7`2Jt-=__~sezs>jJW*-`ff=-u}f+(~a#&*|R7-tm5DSL5SUY;NZ{$Hq+ zLE&_YNMN}Uln7QbrI(mA-rHF7H)fD?*i!%D7cOl@{-1yakOcpe{npkX*2SJ|we^%p z#3gL3TSf;p;2J0~iAxaPuz4!rTthK7ok6(7p*kSfAgP7Up$n@Uqr4xqe!64Sug1Ae+lGvv z;GA-9&Seo&y7bdS2cEfgq2XJ9!ma;)Bvw@Vvu?lm(?x@cdZV93%H;FUUNZi44C9$B68if0oW7_d&Q-=Y*Pbl=W3C9lI_Azd zFo_X=@0ODg+5`Td;cY5FJ?coWs}iZ@<=BtP$3e``rWPqz%YSjZpncI`iB%-2g`*}f z7bFEe;7h8G>-ikMT(r_KCZ4!>a3{0rL#z=kF3+wlHgY#(S%n3gY zfqG{}>m#9=RzbvW&?jLvkgP9Q*)Nd8T2Jm#-J}z|B%VZ8t356mq znz@mDN&H9UzGP^{X4f>ia@++$?K+D|yDL;BsfcEtQPj*+AmIa@04e^^68si*w;Cc9 z+a1AqF&^6^wxdn`C_y9dc*OJqO-!y}m~eB+ux@-6w~3-`+bXCu;Gq z?xB;rw^8XcpiBeo>B-2NaN<>_!IPqBeKS2# z6}Ovpw`9i3ma#;(gz65r(Is;2@&O|v#tc33W9;UQ8()gPbfoXJu$&PqSA$&}4+yW= zxc1f5oT?W-?mn~o=RZ~(oVsgc_nA=h$lKr<=PbgBi9uxV7VTxT<2v2Hj4gpbA%yU! z%92e-964R>rP_F?tsa7Gy)+eWCe^(<9=Q1)_6K$e>c4epO_@9BvJw!}-gXZm+k`;t zHf*Tvl>?978qz79QN1=+ka9e^?H;+HrTe)|!$l|;Wcdm--n-xTjb6>S&b#P2@2d+I z?HDm8XF%@QL640!E~}(i-=pHUrF&jm)$GNcy9(VSr%lOiK4SXxQKhfivt@PEMrqC# z;EogreOl#5DAS+MmZFE!iT~>^oDr%EXXo(jaAZr^l2vFML6qS@mF>idJXfgqW0j+J%IvQZy0|>(aT!4xAC<& zIju?x8(h>}Kky;=BiMkMaexZq;P8T(E7+hu3ev+L>5~0OmyZ31uF9MCT*r263!j?4 zZrIR=y7ztmKCxH7w^@t(?{Ct7a(+&ue#vzf=S-eDxJ8$at)GRAMy`#%3w0D!foJ9c z^T}5eQ|3UR=x$dOt|ZD7XmzUxMp^>fUyy-NUfCViM3vp?4W~{WYK(S@f|15}QN5ao z>#=WNYdzU#h-oOM!3`3i_6rzKWz;vO*lt$i{i6n@40@8c_Nr%(&Z$!6);HS5igT0_ zc=T3IvDG(vOd?zLjZA%yXWAX&ePd45*8L_ww`lg*<(?s){`YL_->ci5l|_MgruT*P zIk^)TOd5Dkn^!yaON}G#r$1)0%uxkVfNIdq<7AEi*qQQBvNES;WN8r4d4z3jE{)w` zuFUOmfwv?@D+V61OA=CTb(xfq(lEH@w3_i&a)+nK&09Ko=IXDG&T2d~Zg95_{rfqp z^fZ3#y&!Mg^7V6DefX`zSkg4}_I^(c?%XAt)}jTbu^4ONM)z5^W!bl&hOACB8Q#vo zXuO(}3~vky3+8m zk~FX6qP0iCGxbYJL=vv83|fEk-sbHdE_`}s;Y014-@9nyq~{(QvNmV%`aubO?(OhM zulw%rp|@+^C%NKdZ3pM)4{kd!C8c-sf#dUx+go;ipjF%ZI*QFrI(BHuilN0JRWX68Q1fToe5niR2eN0(D&M9Wt7_JJWyEQ3)%$qV7M2Q>9- z7#O&>aziRU_NIm~7<9Bd0w0oL*39;p5xu=l;1RP*V~qs6F9QcFOA0Y_FE+%6q`m{Z zEt9GvUrT0YLFiRabI64<<)7pCm-`IaH@ain`=)O{czovN=Z;){{IS+K4-fcmTgSGd zoA0TIhizCga_11^hxayaoRGI+?UR$%IX>+-KeqR(-tmp@8?bE7LxufTJ=x>A+irVg zM3;I?dnLYjIKH%M^0fPV4ZnM2lYRQE0R{7iL=RrL;0c`3k?8+gA2cTJJ;GZ>p3y3S zTxxdsDcZ6spdOUFk~yYjLup~u!rdFee+AjG$WXA^=yXg$8$Ba~9Ak+|xI`l^OsV<{brh9+`t{ti{&{pLrvz-uMAS#`|trzih&$)YMIROE!tS)@{Pg)h+2_>zCGx z#=djD3&MZm{FiTae(&sc%xo`ow&SjC;JvAcQdE-_bk$^*3+bmkWYroqC~|K3Kaypd zEpB+WZEn;xQR_}hQVYKQ5L&0HiY21MZBl}H>v5o9t-&Pm%MX9gopjxG&y#IldC?dp z2JJqyZJ$w~FZ7LaZ98#f{LZ>fjNhJHe0Or2*T&9YHe~67`Kxdu_hTkkUDWrvpFEL# z2-VP;i>e1pSXs_<^5eL5KC3YLvSIXjGx3Mr)9(%Aj?lD&NpptoOfFmI$J#bp~sQ7CAoRb~_V zY_N9fl7EGAp*+T5BZp`-X;KA!^BP}${m&x(ZC_JRqo=;nPK_4GGhGG`v?Bm!lU!UjpIM8@Xx~rFqN0(onqtEC+w>!LDcE5kP`YcJ(iDc}E6Zl)}qGn@a*TlwFny#AYf*Ya{* zFPxacsjd#9o#PB>NA<$hv?<;c$q7@46XI@gt7A~PH1sATd;*rBJV(+=WIB-SZX$0; z6!Q)cIXWOZXbFNMxJ+|WU1Zl9q0%@dE2&j|xft{QCxiM8oB8=0#!1oOnWYP+7<)z7 zoac>m#-GlDHA{NTtdg)IXXVR!hS4JL@xgg||G|S#xM+_gVUIMyJUy_4Ogo5oYFYc0 zPc(;qh=EvzY4B+vEZ;J;b7=6rFWxZDdcu*Z?vRBa)g6wuzIP{27GI1NpG@-QiwnPr zh6Y@W2+Xe12hi?V=G-OPxo<%gPnE!}qu|WEHHneH2@!Pv=#7qd>3I#^x7_h)_c>2B zg(pMT<~skSpLXZr+(pyvsC4zBt=AfBT;$o88Hz|x#MNf(j6jt_B!vZAMpBH>jpn(s z#~K-%*yFXblG60kYo4C5X7C{IW5&D@^B!=OWRDq>-S*y&t&YBrUoL&kxzjl5o&=vj zMQs@eagi?*0->5iRNxS);+l3n`hc5?ZFv zW#1!H=&tXg6uJrgWJ*}yR#CxkJ)_Arkd)afJSvG|yDgJCC%O94W~~VIup(btOFfTn zqt@%_Q$<>TA{}4{6h-4?-!`e4U27y{G;H0c?d@Y~#Ah^`KmOGot^w_O$J`a3RVS)# zpZmia)e)gzg35iIrya-K17IPAY5jSwE(9Xsxu)0|W^5{{a>hD-UC&JRI^kWyp>X5T z4KD#`fr2{RKFN!ML3=1QuBE552ZM$!6u4nKVm;gTjcSVb#Ji!&-)Q4s+4gpz+kAVP)y11^f2R7ZKx({eyPSb{^ld z<@XL3L7@@O3yz)c?HEry`Zr=9V)Op@<8ia}@yLrA5Ppk5oPl8@L_1HU8g>%;DqC<=Iu5r&SabDQF_xEw5TvgIL-QT56epG!q zQc#ZWuB?o{l0+Z}_h>9H#Pq966>5oO@hGlmwZCG<+$=M%0)I2rw{u+tZGh20Ud`Eum z8kx=uue|cZ2hDoKHHv7M5pj3F9+{NON!)x2ZUpn7>QLQEOd3&8ESvcr#k^pi*b*c$ z6!2eC<{a*PDm)zSI&3_Oh8d`m{vYxiCWu^2@SBKg5t$J!BRWT9N8H43GO*bL{N{-f zn>z=w8~WXI`XTpKXgHNnIY)dh#70A2D3jH|5ic1YJE6T172nO=a+&k}?cEKfbW{BW+)%m&_hqEo z;n?RvaC}24;D!U=3k+AC;i4rz7QcYwj_CaXv&^*X-0i|57-Sv7T7l(99l?sO)} znjhRW6(37XzM=DaCF9%kTZ~JqUKH_L&WqGalPZ7go%hAWNoVJ6KUq~AIrrC2(eb@z z?~9JRO22t+|BJ@A+naASzAf~=p+@fKJN2@?ktn`X@jx~K)yP#2jyKlDUS0Cz+teC3 z%xr3nw^k%)s^H?ipvGRcP#Om*Rg=3xl{*zKzW0Z=nmF&JJ;tZS;sp`E^%6>SCRP1n z``j-l&iH-VQyvhrjRK^FQ)1GrD)~#J-ocGq)X`Efxp@GOBg7ZtZ z)HSvglAt3;$-XdOtmgMW(UG08$T8HN9#+<3Q9h@qXY3a*91!)lip|EmZyM)C z{2G{gbFZ&-m6R9<-)a1=@do;KwEw&M^CIHqmRm&h5*?d~_KYhFdnN{Pt9!jpj$6T@ z0u4d#9d-(<5n4P9t?leVW8|v@8>f7sT7br}X#Ap+l}(p8@rH>-3*_a-rI$7umkPy7 zk+^Y_NLVQ*PW)=ltS@na@mJ5x{(Q2oA9&B$xo@wy|D7f$M5ldwj9u?Kw~C5|D~*er zn{73|Tm6Da+QM@PZE=^mGd zuJy6a1A2{Z9;tWHG+usQBs^@Kc=*9h#^pj$DB@`@t0qqVa?Z@pr|1*Uis&yU3H`vA z#w$I(>2xD`kLd7D{r5!Iefx}U?}-vowQz;;-OEk48W-0r7s;ps=gcmhGP{C3#7bvY z+XQiN-r>Ow);Ob^JFCsk!)<8qRC8QkJK{KDeHwc))wvoqsKK4)%1sxyr+iwS=H!K& zpj%Ls=mN2CJuFz&5z_3gF4PaoX7*ZD)cUf1uYbJW7vnM8#o9}LGEjh zE}fq`R&=J-9%QU`lse}lFX1t7Mf2XpC`mzORNB=LuS>8i(!+m2J@v-Qe?gnFXBr^L zV>>k7stjZ`v4l9MUQ{?U7A~orHAO9ccrw7@!Sc)1q$b zT>883#mV}>(oY?moGotTyGG-_BHRk*Jf*+s-l9byE2s}^HDR{a*wT7~E(`~E%qiTy zNLEr}rne!e(!x%us<6tm0)szL9^pnn2h$GK+P7)J-aS`E z6A{^^$Mg&H7GIuioH4E#zkX;Oex+sq#=3Ln$l>39_v5aO7jRPi1*j9MtIdXIB0W$~ zCeJ%dZMx&h#A5nv9`##s>z&Th^H|83Xvi41-r-kz9*)ncj?Arhu0hX@p`kVc z<=y(A*XemOYPxjSKAi80H|Y5>tc-&{?{?tT46&xipy|&WAM_E4Jr_Rplo-$ z@5wLS{X~<&D>nK<_1``}-D`lcGi1YpSKe$u`~OL-!emf620Y$sR6syq))-|q;$7`kn z4U<9z7TB{-zeS%Yx}cW|EuuHP+@{YrQ;AQVhXNnO7-D2IT8}Kgb0&>AGUeNG6>{>% ze0}7n{9P-?uD+$-x+h+EO^+=y=DhNPTl1~jZhU0;-6yV$9$_R88uAB8$^O`{pChVP z1)W`@ZQZMRi>kpZy*G%RIk=6AH%@j_39j_PJ_N7IiiAJ!rd|)=U3%k5FNKbEjoCl^ zQ^OedX@U1*$k>pLQ#b70-(}vI$JX@`x0F_2dHtD>woG05+xyi|TmrqbG0rbQ?|1K z$!w#G;|tKZA~cby-ZW`rqy^+?(7;*0Q?pC>Uyc(J<`b<@;0m~bX!dp{(>)e=#LEoS3`g)t1^H#4~Kkm$wcShvzdGSo& z?q9Fiqi5U@^>(fO`_Budx82ga)BfF$t|_=t^zPN8;@T%yJ|I5P3+JOY<{Se~c#o|L zP1%Ng1k^ND_E1^F&}NfEPvoX9bU&&rkW0_NfSKDt?PhM#Yx2M5%{@CspZxXQd0$P@ z^ZLCstmstsOGArJ>GhW1D426?*{v)7p8L$TW#JRwA6t4XX~M~|{-bo3jsEDs`T+J8 z!s(`ahgy-#}#>)O5^}vgz)`ypo*N43$-mrgilGwCh=6z6kDSVeihuY+P z>QE>Cdg$!Y8A&A2kJLOzMFrr{<$p#p`xbI+iz_{|Zv+!J>jD zBiASi+Y9vo_G3fqlRfD#|e*t7i8{`HE6uG+0vFD{+GdFhRGJ$<>z zyyiOg`#wE(z|c~27dbx0$|Qn64f2LcE(;I5%8m|N45ep!lesVlF)q8hiNsfX6)dg< zQ8geopB3L8t@;NcMd0=>^#xUy?mdpx7>?8bk#+iO%F|!-|Jmse%4m+G6b4!zzG1?4 z`U4D(R%-b9gI|x0$ob)9my+f+XFa#Fc1vu7xeO|c8dPI-Q1f&`h!|g-nCX^|9HM0+*_IdqGh$Q32$}zVO{wld`-5r7+zgU z#k20!fOo-z3t_V2JxSp68Mt)W!}%`Ab4gM?{KfB^Pj$)Z{dk+Ar>3r2*H!fFF>30Y z1rt_Z-1n66Lhp913hFfJ+Vk<=58U53wrEzj*&SLxKB?8{`SXX&%4=q)< zaQe` zSEgRy_d8pUlVc6l+n>9q%sLiUSnxh0oGXzUo$H8f+$6lY?J!f)(3vHvfPwtjex}lFA(tja}KYr}!`@7fETaP#$i~pdW zja{DJv+$3n(L_3Xvajo*Tt|42fc^?8QvdAuU>9+;6B-L z@R;h{3&EyKsA=3>N~UD5O7|i5W@}1@b|Dn0_C|)8p%wFL|4~+Vl}E`HNJ$c6a^lgZ`uMS(Zo^Y$W;)E zkGkT_xa^8U=T0PS;rd^1CjQvb*_rJ`^|HM}8~F^u@)sP%t~yZIfUcQ@;y|V8cRIQ7 z)S1TZ)2KF8LyL;)pX}Tzmc1$}7Zn=c?5%E`5K)&zqVeI z)ilSCu!$;Z%g0 z_t>f>kRJBBlnUxF^R|M=9(A&gMZZq^>n|hH+24rx)zP~2fTMMeC>lD{7=Tek`Y-Ea zT%B|yp5D1x#oq8_a5Qb=D7dxE>+-7J;i_Oo}!fb-WT<;ZWGz&sv#GT1lp z7jwZOE=2?%!k{Vd7B`Q>Ndi-WYLoGwc;xG^MP=i{-5sm8abgZ8dXXDbN z#-BMxVXq!xkH*ZMB}U|k&__iAX=PPG<8M4o;ln~SSa`IyB2hF|5pb5}vE!>6LYvn*kn-q}HZVAm405nDzfpPWW z5!RC<^lu1Y;X^|sbeUKX%+o8{pmVhc0?<|_s#`+)4M5-Gp%F^lh5mj4XbX3U2~EjP ztvxhCK+^~b+N9xC53Mwn6M3{^ks4TOD-S`@@JaX$E_>PoI!n$+v={9a#aSn#`BXFK zQx-Z`{8|olY7lgD3wI$_^Jd&Nf}qnZ=wi($9#kW-(juw{L06E_ygtB<*{{WX*71pJ zY|f`e;8a)~F3!YK7CKj43_x3<==dOLI&l(rp>{0*ZQ)J~f~GSBXdWS;F~Vf*@vWW) zc;B2ym@C$Z25O!vi=`yo#4-?mxT9{ua*v!3{H&USwl0^i`w!5$Vp=)Sx7wjy*DTzH z+JbVRZwrDxV?h_gFCTi-Na_Vazb&D8_P~wV%lX`8VOb|4(E;A%Al7`c&XhEQB(dN- zD?ECJcEkTR-uE-wI!-tr!29+XGjWja%t7D>b9~J7y3eE+&Y&8l<-Sn6s4R4@Kvd)= zXthHbn&~BR7vcuyo1tr(6p+wF0YEba01f7M4&%DRbrtUma?=i;1{D#Bahz~{ ziT7Q3EXCR!%c8&bV#s2Hdm~faa9|Zp{7#?3Lp@pPuG?U?!>ATYkD&^Eb;v z=Zc8|Xe;)hI15Abd?fBdQ1)ir33l8(LO}Bf0gVx+8XX)ZJi@*_!d%fJfW=ag)Ne90 zV*xZgLBl!`4%RB%7chINgN)LS2IPSyvy~3Q=hO8Ppe4T>w4?Z~bj)$s0bZgHC zTTj1BEH;L?N=mypQgGO`fyQHw58#Eo4cvc?H>pWe8Ub2mDZiV=>SzYHz`<~fnj3d# zP}T!$X7qqtMp)TJUKj-7LT?@n}?Gl@hDC&$93JpvsTOu=)~5z6d#os ziPkhOkG2*XB-0uryM-tKi*q-)PHS+T%Gw-njIF+&Po&~D>?u+wl9{ABk&4@RD?%-SlK$k__3v+OBrNH6i9UdQE>x-|O2t#5z5#rrhN!8S49z_)jmRfFn5>s1bg~Tl03lbsJHT+NO8OQiX{B z)Byjfk!Uhn%hhyiD7%_&{kLJA1O@_&W#>f=WL(*%i2`(6C$yko5ByDLryH2r9#0Sh z=FDB!gH|x;2Yl|@!JMK7pPrN?IwJH9YE6eJxys^Nz z@*2Mg-nhVkW7q)C~l~jTs#IozH zB1~m_vsUUM8=<2~(>vj1z-Zz!OOoYi{710K2YadjM%v27{yqVIWm(o96M6-po4HN_ zTI$@z{yY3W#oa8*E9kEQ-GOQId*xQ8H zs1w!*Zx7L~KL{!gbQ2GizqwYc8=$`BS5O{-)3|@-Gw?^{SGY}wry5w6Ko`|qhw+|p zd*I1N%k>_hvC4E_{7w9y20@d>p?1P< zV~O)3&oa~8bF=*&FqW0zZs%C)cQ)V0_}k!p3HF)8lML*)Gxl7qzDQQsEiad86=|yh z^Q(uvQ;-rEORiQ^%rSSaX=g!NG<7<71i;9|mz`qt9tq4IP3{)6yF ztI@9&Nxp6LxhL@btLzDpexO`EF5s=O{6I?Y=QGFik(OYtn2dR0&q1eG9u1-es?9NH zU}Rc+#&ce2G&OB;e3#$1vS@MDbDM+*eT&dEr-fP#i`yz~fJfjWZYxJn1dD;WEk>YD zJ?t}Rd%A)F)qA+QDvXwusQfkTt#bbg=qz5}QQCFr1CozaHMtsiJZwYKWTYiIN}C2t zQqEiHn9}Q2*}}6%gn%DNJW4y0c!(2AJZrRt*65XfCh@eh@N5u|!5>zE`C96k61rar zJ4<5nTbMRzmq1lOfezKrl%z>0a2@A9UfsLo-bH0bQ*+!(*8@fMb-bq3V3_td`HnDd z=+8>V?Zley<23~yUb`G#yM9JXeKGr2@Lk^D-x_~lbnIdX-lczprf89-#kl_CHb*IqB~u0UseNDWsAqSvCmT{z{%vcN3aMXYv|L zrKxTHy2MTXM~S;oJNTb)vuq~Z(EA77j9HDNhjE`1#eWhrc;N)$^_PM=gN*i$l}t6@ z;P(&vZ{U3o+~sV8$623joMZSpc6hw6W_(TXOcly~l_M{Lx1M(#^jIacnHEf2Io_f% zN;grXtdBd75#a%rM&{ZMGcE#ql5vNApNo8IJ>+|B7r(D>bh5)^R;!uACmEgeNdb6m zf_9nTa|Fp#N8+dRt?d&{)M{AgTgjCqM0%FE+k;=3zbXc}mSHr~2TT6y#=NG*+SPzg zX8B;1hA;gp&MnQfZ_IOB)qu|wJj|VIj zbxPzIqpdNnfqljv58w$N8F@S;o;BKY(&GU1h-~iM|8x-L= zx=H_$nAV99_(%Z-Ox4aH5Xw+;OteQNt_^65*wn;@2n@vwO3m1nxDi3?xpvS0;mXTaGiHBb&&v54D*AXviOw z^F?>ubKXzD=hDzqqb7B-jZaL)t$5CLPfgCbSLykK9{A3<#Wxv&b?}g-5B9Z6-nf7Iv|ycp?jBW$G#8hs8BF z7fV&y4K;u6^b7MGTqX8 zmA@AG9 zX!2ppOQKE&V|L$=nAy7t%$Q9t{NfmvVeo1)?_U#`jrEI4Ljjj_Y-Zly&xFP})o)tf zS_RFx-K0%O+-t>@0B+0TP*R9-yU6~LxYvq`<=|G(a@-^XCGH|^e?SIWxRqWbabGcU zLk9MA+%#$h8v~(HjpK#WWad(At!NPx`kw zZ+NES*CC6E#=2WfRlm%a{vdb;=w^g+b5*XNj35=P&|4GU72+F@JRNmYn?(6iJoDhrbtqz&ikp?TB%x_O5_h4tGyrYkR@Ny)^Qr)vM+j()FafJl zfk$|nN0=)*2C#4-iAJd6?R;KbN3idRE5Nfo%A$#Nx`Ma^Y2YRYSp(ZK)#M<2ieUjJ z!Q0#lIhX%7#zg}l4U1zDN)`Ek6XSB|1H~ut0d=JRLwT|{(_XY?qLxenjbz4MB9q!Q z`J(j!peuLqBDxX>Vd*^+bPe8-5{6E^gelaX576G)V`~30v6h*4A#ViSt}m5_L8!X^Nmd04+|McFzovnXnZS2?!22v zbHNZJ3XeOaj;UHwy4kOVTLc?(2ZgaAD_}l4g`YD*$at8s#m1 z571FOQ;B_T2q+IK<=xP0mlHyH4{Scln z<))BT(L69;wuoq3Dz3cTXy|OtzxRzvwMxcM_{Bxg9CDe@ehn{=XJT! znCdVvI>0jJD;mF;d&YXt-qYTWuax&x?E|)J>CD+6aps2kV#xbxdPj)|wByl8S)41j z1mI{$;fxrQJyxNoy-5>L_%7OE)>9@`yJ8X8A^|*>%TK1S>BG2#(UbS3t51^#}syu)%7>!YB zM|1`Z&nueg3_AherFqeJ!664IxmDS;8}7#)v!@yR=O*?(61$3|Vznd;$!CIrKKHn_ z;+Aygs4KxDdT^T08em!9?C&=$95fP%qbQ$8f?rXwqq@eOHtn4}>WRkp_$F)hJO^m? zG>0KKuZ-H^b&Ut?IN;gph&p^zd*QPyHH>1&Crc7o`iQc(q=rH3E1^dr9?0u!^Cn3c zT3-pX23|3qk&ojn@&!*D#{2DAze%rF8TLmuE{nPk!fEAq08K|G!;iy>p^W2p$&UGYUu81tS zZZI$UnD<>F>j377v$(19SxDWEb|C3?*nz#R9cZ0#wFelM^+UjtMbVqI!w%phpYT5a zkbf^%bOLS7-?MnV;u8$bTvbA^6?MviR@$z_O?#W5u|tPiJJiCgct=&7$Zy>BVW-#P z(QT!k4E`2)+uBXJqPOs=*=pp`zV!pM^B49IBA$xVwAGk_&WCMbW4C6-GrYesd2k_nG*>LYZ$41s8P1;F3c!DY-|scy?_v0L{%d+`_v66- z7E^1RKg@p#nha~NmiHAq7ydESETl_L#dCun+D&uAXm$hpcz#Z|o{^D(&oN7U-V2|P z;b+X>g#U$~F81XSF| zRNN}Y!S7-2IrgQJZ&GQzk0a*sy&K*Yj^kpjMIHZX11KTTu-;Q^{;~pGqVkQEEXLY9PjelzYEN6ZCfpCG3R6j$>PhkGLsH2Fk8Q<@9O(d^|s6KRCAYGxh*@S}WsWXM5*8T2)l% z#haaxh>cl4NDr)CC~q#iw3G?OxjJ}2`z>zofu;$z8;Vep%x_UuWGf>=LE)WANpWc1 z6qiIrARhMJIz*aqWTdB|8RGKiAF91v)PK^L^Lbk1Hq8>^D%O2)c>5A#+f=dit?G^X zwsqBU-t%%dW4+$lmuV#5o97-}VaRGx;LCS}i#_*@c0X1Dd*)qGJ{^0APZC+H{?`6W zpa@f$--F7;rTWrarW>y+=mrgKt7w5$m9k!0J@T8hmb5+u6YoDrFpv_8U(SKYFv|5C zVBVK7dSCy36GlbN=yb~6EP2i_s*}JN_TYU6`zE*ZcXNfU^1+Jyv4)40;|LbA1v1dB zBh+?BJ&QLLM5uziqbhK)dmW)wxXh0KLd)0fwnhuoQo+{1&Re4jry5l$TT@v-y=Dd{ zFj8ZyG<9=@lS)IWA0oH2gm7~Fy4h$p{^Mg;k9g(O6Gx^k_bvr3%5b26ggna4xJ_E;<;7s}pGDD=3d(L-39FICtbML$A@kl+i3$ zwn|6&PrQ~_kw=78#E5vc`C0N_*eA|k`8UqU-@rkz>V%lv(BdhXif`Y}bLI^Gyeua!!+B*j0rL}kah2kH5bj{Nzj5A<*|%lVXef zKb!kq&7mw$qV5zr<9LX95a(SYllOn4V@n9}7jOK3=-5JScw|{$n(V7V9b0aF@+A*< zZ29=FQ6J1V8@8-{_0=u~qo`p^?He^$Uzx;9 z%(UU`aL|VRV^8t>b%1N(x&#_smU~dU2$^n5Bg^NnEH{oiGj+Q2x0m2MlyT&ytRV0; z?4R-~4g4>l`%?pkdqDlFGlAuLWqzf}Yk(d5V@| zb!YHAx|q=TWi<~LF0@|CQ&iP>&iM`ajnt|=+D6ewb6`FLjprOkv5UFN0ps5Y-HP5b zH)I(HZpc*nclC$Z>)@4Ct$0R4i!p&{oy9AaXPM6*j~wgAxc^;clH^=;DkbM?tDOp* zN=xfgxFp%tM8a0V9W z+}AaX-}f@QItPLqq-$T&{?WULBL7anKWcPytaWz6`(MoW4g9O}$n6~p&4 zy4tJ2@V+a*=d;1|z^TIbZ-rL;7Sm%tKbMi;6bp1;WvTZC&v%XZEFksRuAx1cVxA>n zcWW$Fem0?T&#T&lmKCI+%l;NiXb-GHvmWPvt9k}=l6Z(;z^Wp?VAkl$%A1=j?_vN{ zjcGESX~5BA=H#>ZS?>Kr;FEiQ4QK3|oY0CrD~>Fs3H*y@ZghFM`{)?xN_BsRUDU%I z;dv29Yk9BHo&xs!!KgGoyPV@I&+aHF_*T8cO=nl0#^-@sp2kty z^gy*8+wycC)M;egT$u~pJiej~N{RNVx0yM&0 zdg8en(G$6@^vs`o1n7>>`S%k=GX>{5jrV{fN=%Bx+X8(19S>zU!j5HV4Yibh-2Zgh zahg{J{sV#@Nw`@DWx7cnbd>h4%uIt0syMuq)-M5LUz`ub(Asf66(AHBmzpfCEwC^p zK4c2XZ_?hyH|aOIf=xoVlF;Cs8^m4UvvzJor!)BsIwi1Yl(6jOEC%|0z-!xq4gOP% zz|48VOkM$xdy!Y^8*>b$JccKr4;8Y^_)CuAezL&!g2P_&P;cAsLNqNytLLP>O1ttP zf9oZD-i0Nif%h3&ffLe{?zQna*L9Ez5} z>CACwE1ZB|g*x@J^}6-Wlw{d_J)UJ2{zlqy(Ne)%8dBt$k~j%p8FV@7?qlI^gc=~H zg13^kfd5WAen>J&2fZ=4-p%8$VCB-$_}jX-S@;`k3(6UP-7~m2j`xn_swXY{O|ZYK zsqt9!0e;Gy!@iI-Ut-~JqRsbztKjY9uW#a~b>|E&;@TOA(4(^g)|1y?$qwZPuUjIaSgclxJ!EkL#r@b-7+-ugLY{>R0|2PV-Ic zRnJ_n-Nr{w@`EaG?-u8)8uh&GWt?EEmSrt?$`&94&ghZFpAo>{+C%$j_-?Dy9#oG@9<~I-RMfl_a z{j#IG+>3}*o0U@9i}>V-dnjNEOqg47;;zT@@2Ur+-x;#DLpek!cVk}k|^@bt>wu89?EsSZ_niI}`yu%qMcKDLO9W*T`!(=PPMWdtBmgqRsGIyWAQ- z=wosK-T|KG62C?g6`f$PR$FK%+_bW)7N?hi{lU8&zrx>|%`3dciG8BJW&N7+C(M9F zvx$G}1%}SmJ)*gSw(LZeUqX8MYPR@k=aYB$>A;9A+{*7>7I$0GuN>Se&Qlh5TegzQ zdhIey$s%#T3L43AzwC+@y>1$}8i~YBGDD90W#@eC`m(rHd`jX@v2YiQc+pNlTQsR_ zj)ZY@=MIUx7zEVEjJ&iYTg9q_hVBz=Aa`%_D;0CB; z!28DL`yUzp1!&dBVgGX#1jD!I_p*k9v}(X(gu1?l&h_(S%Vt8c8b*LLY$bSG1$ zwz0}l%oMmr6o3a!Mg?90{EK{&*gDp`IMNvY3BVUJ{9h7Yufy;606v4^$<_uQI+>v4 zI>5JJctjWY{WN}G(`e&b=z0s|?r7rq#J>TKnQKOGXE&1D$itB1`OSX-cviwHr#>HU zstJeo>%$7=!`_YqXBzF*WY2BY?A-EbQh1o_dnm{l7ac}TSmHA6oeaIYdWj$!( zEBlhtMe>0LDO3PtyH7#(YvB8HoN4Gf^Zs&b*+{B{QZH(Yx(Qe=_`C+iTlJgBTQV&5oZS*bC9ER1^%EA%s(C- z8TcoI<{z{{6)_l0rD1s^C1oNrz1fj;lJUn5x{$U^qgPy=hW^u@;6A(PZix=NXz_}> zsbU4)f&F%1#F_ewgIU7UFKH6xRM(3;Obn2+*FH9c~9tHJY0@1h(@j|t};5QCOE zG1$}D_@nH(2NTAA#uZ*@rj9@9(ocBi)6k?#VDlC6=iwk>fs+o9%+tzM`ldR z7`^X~kug*8mwohJr-V_%_eSt@x#O?>vB)kkP8cT&MBihFjn@i9O;Ph%(SlUARa^>AhV=VM&Z&U;cd_YXmx4L#Y z;2KOdim^K(3ZE_=~D0u2Z9fi0rTRxg@b zrd}oc$xvLYg9(ErB;5ciZdTwYfXEVp{35oM+r0-2pS%B{RDXFb$`X#7;jB@%R31VbEdSP7h>$K zJ@o(&uMBALigQ4~LvD>7J|(lx8}=~#F_!@ke+_umDmQ;c7FVqS50 z0{D4%)A(IyaKdP(_z~e3#jdY`UwUO;(a-pQSL0`RIvN(d%%1dQ0Y1wd&r1Q1o5c}e z&zqep*q-O{!_wyQ-w_zUMSb`S=r%r3ZU*SH&qMU5nGk&*E{FapXG*Hb@#93`eqN%_ zBNE=RFo55pPqgI$gk6FTv4p>|iGOtfzv|Fpi*3^xInL|b(}d3otgjXOvts84^jzmf z&Ye9q5E?9UbK&X7H^{q7S+D8P+3Ecj=X31AS^-?iZ z;jm>Xl4N#p1D*{g{-$<#xUAIRMvXgrjwm9E_?@w?&}|4QQ{v!fns z=Dr@QzG5V&fyuPD|HhEN+YI0KZ>~_cPcx*u|Xr zX$5cZZv$(vBEM&fFnm{YJWug^>DfAqIr2%+?}GQ>F?DjRlYoPT|L-jM|G2h0v8hwU9n@O}VyFsTpA+SW@1?_c3vmt-8b_YwjA>-=7xvJXKUK4qZI zMbVw2EAXsTv3z&cK#>nP^3a6mSggGj@DW%PQT8#;jbpouaP|T;?BnKEZMtP2t9++w zpvMQiyMdYa%9~7&bnI@*FlHS5z7fA~jy>@w!#6ee#07re()h^r2E+F@-?!)Yy?~!` z)VT)$<9VImcL)B>4ByZE-9Gf*KxU)sPS+`%{VW`Fj~ETXPFw+U*V)7c4*}JX0?$_e zyT(`G%5yQ-?_BX3s^^;Vd*dzY4C5~X{FAO&EnQqN-+yMD#ru~*kwLB)ygzTgzrc7( zQQLJNzyB&s;%~t30pz@w-=8+&bLqW?yi4a;S2XZkG2#0gXVf@RN#v$+;u(Gy#~FT> zIra36sz!VcZiUYwyB(j@6` z9#K#hvotAhUwIl?C$3pEcw}p|w>9h6*b1g!Q`xtGzG4drMd_v^zV3)G0cn7{kLcoJz6m=k)zMSZnvmfONq&h`;0!N{1o2pY(@=CS1yZ=`gI3`IL z`ZaNmYDY1uNSt8G4Nlj`vINW3xA1$uU5hxBo-5x|#|psTZ`@|D1cg3Y$GCv^kRz!4 z1HAZ{;oAyy9x3O2u8tlf>OFI22|tV~1@WG##CX8zAyM0E>-ar&8@J~IuNSZ@0Z*N} z?YY3~wGa6HZev7HHpY5HMP)X|prCAw^&*P1F{sl+6L2Oa$=8c`#vFYdblD{S(=D=lZ&44KhPLX;xUjk^xWV2|8nhh(Om4+ z+v*?dmmDdM_Kt~;)sB6R4;@#WVa@^01(s-J(jery z&{mdu(iMPVC{hI&stDjQDo(Q{&%Fh)$@Ruqxrl#O8^` zNuf#iCcTy%p4>J0spL;nQd0(|tV}tRa;4&(6~|XBsZ^oT#7h6Fe0SykRXkN1RXJO= zd)3FPPOo-nwT{&mR6CiPn)+Po|6=aHiLQ zZz4^)AX0)L0wTpq6+}bK5?bfyX)}B~9s!qo`^XlAx zrq?qE>dvm~tk<#LZ}n@|-&o&hP_4n(274R0p6&MR_UD>D7t_$Q;napV8okwAZ`eGr`R?Y97H_vW`9h-?wzqt`W$%`uFLrxz$E(}+&nJJ;+y zp!25AH@ZC2Wqy|wzovfM{h#;$G(Zbj5U@SqVb`f$GrK+0ZC1D7?jGIycVEIcd%fGpCK$QVQ2W%TyVc@*C+PwA0pap}igNFBzO1kVYTC)AtJd_wyPD<-%m&YtL)G2pgzTjsN@&$8c^*Iqtl`MpnG`y}v_$Q314v{~`t ziff-*KlS@`@~1aezO?d-m5HmGubQyx=&GdE^;QpDJ!AE+Yih3$#w149a?v1eV6s0uD`Ou zYeT;cpKnOm*kWUcjh}APHZ9wf`c3_B_H2G?^Nh_`w~X2{dCTc|K$C%_Al7~@%~l& zzuv!j{}20j??1Bt)Pd#)mK`{7u-d_Q4_-VJaA@OU{qVTM7mfrRdF@E-Z*_iK{aez} z0Y}dstISgC*s5dqjt@J&HMm@GgWwLqZv;;Yem{6k@TTA$!6$;R1!o7}KT+;PgA*N2 z^gJ>0#QP^!pZMX#@e^Sul26<_>2t}!pB#R2*2$G8*Pq;b^7zTHlgTIVo$@+W z{Z!*q?N7aaYQ(A8r&gWXe(Kn%E2ol9{eHUa>AI(%Ki%zgztiJS&po~J^p-Pa&jg)0 zb>_;M%n&7{LP*_^Rw3O&-U@j;WM0V1kS!s5LqbDtoPF(V>{-`2^<33+P0w{Y*YDhz zbF(>b$-$L_2>7TKXyL!eA4;c^97+L zLu-XL4eb!xEA;Kq*`bR=SBL%(dN?#9G$r&-m^G|QSi`XPVZFmfhP@j$Cu~jF*0952 z5n=YQhZjm*cGdR=wEhVuy=; zFOInw7TzYjclg-w`QcxLZwWsbeknXH{K2IPm!7@U@lxMQ<1PhW+Ii{9rNm1)m$l1P zFE_p1_Hy6LLobiNyzuhc%fDPceEIU_w95}7JR@pFycp3fVpzm`5g$gZiwKH17ZDwi z5h)_eM=pq56ZvE0k;sdY36XhGdQ|nORZ-tZ9gVsal@#TO)}pILKOfyC`nBlM(H}&A z9K9)eNA&UNOVMf3IafTdRJ+pXN{1_bt_->I?v=nRUtQUH<<~30SFT=3zH;ws$*VQ4 zzHqhc)ty&QUcGiTz5 z-ipnP&5P?A*DLPLxKVL);y#R99`{w;H*x#pF2tq8{c*$lM*SNtZgjZu#*IlgX5N^0 zW7&-jH@>@ZCh`99ug1R~KPmpB_%-pH;&;UFk3Sh79)C4HAwDhs zK|+Ou>Isb#IwbT=cr#&G!q|lQ39Az}CTvUCo$y=2xrFG1n+f?hOW&+=^O>8?ZnnGW zck`8-18$DGIr-+Cn=5W^y1C=#zMB_sCf>}vc{kCTSShh);&X{FCU#4FD{)%l^2Du) zM-rnFGm_M#YDq1VdMAxadM{~N(#E8HNf(n6lX8>GBsWNYA-Q96-{jHBA0&U4yfgV! za$IswN{JNTlr|~7QzoP=Nm-k+Gvz?asgz48aVa^sv|H70wYb&u)~H(_-1_|1u3M*X z-MsZjs&{Jr)b^>br%p^=l)653PwKJM(A2A`$*Fm1mb4maEz)|VjYylFwkmCV+Of3I zw5w^!X<2E8t(>iat-b9H+eq8HwmG&Xw$E(e*ml|u*pA!l*;m>xrI$^gl^%b)!R^_% zuV%E&csb*>jDZ;=GR9?0&3Hd!VaBqIH5uzNc4ZvPxRQ~XsbyBrY?0YBb5!OBnV)Cw z$~>KUBQwuY%2C--$I;aBlB27mpJSL~f@79rrDKcZC&wYj8ApU8-jVLOlO?jMWHrqS z$m*LlG;4g;ds!c5t<3s1>rmFEthg*&R&G{7cFF8Y*|oEqWVg)@$nKjxG<$sZd)f1| zmt?QZ{wn*M?CsgVWFN>ro_#j^Qg%%C&1_qCZuUc`r?Z-~w(~jXi_Z4W*PRob)14nU z7dbz1ZguW-?sXn@9kGTJpYM72)&9!G6Y4fAT3FRFS<4N{zOm1`oyt4iNn_kA8- zEm6%=>gy#?EX|*`LSU|T|CC&kk+>w*sjuMu_k(y{pDdnZV@G2M&#wfpt1&j>Z>6M9_ zB)-(^in-cW(cj}WF_kCyYU*}z$kJRavb-z~G5R>92eLm~tg-A6e9Bxz>tjV%OKIX; zi9=Qg$9@6h#XLRB4By9wwqHLc2I%L+3zp7etL_xFJQ|6Y^)%5+4--|eIlrZ3itn`g zq8Xvk`iob!8A4IJi?MolF`7JX&})gI`ZO_7uPxqEw~G$?6t+i;jl2Q>OC{P^t7V$o zJ3T6kuk0uDQM{-9 zBnEhN7v=P3rkCD4{kS-&2Z^t(XQ=xhsFxL@8)*&X+4If%7hWi|Q{kt-FJ{0e#|S%E4v7JlmEr^VXtuskOok`+Xc?jr`5mMW zgq}{(o$YV*FU4~8ZE*m;dS1(-4o-^}W_(w!nZ~)2PrHf@mbqdoeE7a~tyrdiEk4qF z7?IWmN$*pSZX!}o6@$sw2+FfV8%BA*5-(Wm zz{6!|Ukk)G+Rk^Bt&t^G>@fFHfU;Q^ z5)KeU^wEZm{R!3>@r*Thu}SZ0-14X=qAYo0yB;p;>a9gp{Gw8= zRuO9bNNlqFAinYZUPO9~hc{}98Cs$kD(gdE0`Fvr8d{FKexT_<4{y>OMcE3|#_s%~ z-juv1^VKo&;5)E@I-UW35aPZMXkghZj_RM2FY>`Sc^|T8gFceF45aS95r?%u#Wx;( zMP+MCQN=Qlx}O70(#5ZOU$NEtp6FxgDq2{6L|!Zt+dRF*e(K~K&jwE*$(s#$TW#hMOE!%QO=@qp90Ygp6hAtAYQR1QV)H|AK%i@ z{ist_Oz}7>I$NiR79JPGG3!R;OO9BoR}nKQgRiB!=mKx7hUN|6g+}TDF-*tm;K|H# zQK%lqc7R<0nJuVCVZIY_tGkH zi&5%0;ivWy&%%RU^be6UUy08&-o9t?#V7AAG05YR7^IFs=8X^|w68>aWX((3P-yy@ z7;dc(zkMPOdkz)*trgg=M!iiI{VelD9W9+URvp@P65}nkp?M1O=&%?h>qU2o3CwqV zX{{wjS>r_@@@O{lZi2psb32P)sPhi`YVo~3RCM&5LBHf-EF*7Y%)X?u@Yn0nhD(Z8 z$cZ25%lN{lc%L@if%<03#gLSSLBkAGUL=5n3DF&;zkfXPFmg1IpQy0iV zLHiSwT~H<=)_`_E#=iwNgK@~}o_Zbel-@=>N12=JU1*;lQ)j-SA@$Y+UR|s2Ab%aj zdctvfl2~ua7wfgH><=KkAYM}9D0e*jd&O)`hVnYAc$RaX()O?~)0Ec|YM+a)`WdcE z5-(YX5$6JS5r{0Qt|ZdMCC$ksDQMyDi0Q@UM*9qVHzEqVTbl zVgD7;#adacw+=VY)h(-uYXN46Vb*Sh?~2}f4dG>dlWRtE?ayM1?bNKXT{grX0aR@au6OItj#pm=q1D? z?R@|ZwGYK|Z62~-_P_8cm_Dc?phMTs;%MNeo4-UT(lbTARL0dv79FixMMcq_@q3gs%G zKUK~f_ho37;~s7CzGw}-H|UMTaOD0leHQo-etVC&d2FMTkj_NPGewVsU$zR}T33`c z`9{(@9$tsX==+7#&n$zG8-O%~)QQy3L|IeMlDgJEKK-Ad|LMKEx#X_@O-0xpQ^iA5yM6=8?KeFuqA%sWP$a?tK5Pg-lvJrZnr5qawlL+?{<`ABR zp7oUr#sk7&>rxS|eS^-0ZY%X9sozQ+6aEiEKiVR7UBa<4MAq&!!ym*>Sr3vve+~69 zMx?0|L$AMu=*Ln=m3lJa(86$$sWYRu{@2jWLzN48$sHaMm1Vuly7*s14|H;=7fT+I zbyuXDOZ`~t=*2^+r_v9+Zk9#rq=Cj26H>;~_Wu>g2PsF2Ly0qPBX748#(~qat(dY^ zww>aTGQ2pXj4ysKDSL_oc~ty9McY!QKLKPQ_uUGPf^hOu3T%-5fG+!kt&FGW8;W4& zPv-Hj`IP-i5oCW+9Fi_YC|-tQ+h$)S%OoNDup-EI^QqC^vcu>JuMV=zXMUoI(E^>Q zwEDL3o!PD!qnYi4dMX;1NuK@dcuew$S+9(}NXu?+OI;lOTk1@v-i>Z;jwxi@mSY#j zOVoWc?sY-x-DaC7H1%%TCeisNkJ4_U%{1kBkiJ3QU+UJ6@+Z&ZeA>oQGhehvQ&!3G zK^EIm&dL1AaiNqGvaL&2Aj5ikEWEaGi* z{LHvo+sL?u_AT`+nJ07nSR|(k^IDV-bL_%+qfmz~;n_{RRiYK++*9Na{l7?#nfG8^ zBk3W>8ghIfxBn{tirP?-Tr>4k#$$3!R&?E?w4s@#g`}Ms%JH?7C53wZqmXfm97{;L zOFH~5WPGE`F$m)ts~mgCF?CUS|2eGjUylDxuK(?P$s_+kZc~0gj(Zi+;jbaIJ464o zj_@#b{psK2XAv#jVa0zvUNrvwZ{c6%X5o0bXx#j7VG(crEnE$a;cM!nfgE2-`pY&W z`PW)BhM`~5ZN@_r<3&S`2}?60PgjlnB7TzXLbgM*tpuIZl>k#>y|v~+Y0+I^S;=Jq|A|aA{iF7;i5js5>&9r zl>5&Z%Pm!m&1xlMm;S5qKp$qbV{BI6JcbS`<-I9GP5F*Kk1UsZ{v)~Fk!>lH3&#p( z-@~@qPe>gf`7Or*GQOxUkox(f^ra3f?*YtoxsTajNgZFNE&C#wKI0Hm@0Q199QQEw zae1xO_Zc%VcHLt|XEd+pzOpYUf|-6*y_Gdfqx0U zt1ZV+kAIBPHGQ(uNLb*i^_CWlJ!`o`EmFLVokvKxe@RTG_ZB|#SvwyY)3sNxuA-w5 z#zU*%VTQLo)~g&)Hp^o?Hz&VXxCQIZjus;ZjB!i!xrO}D5T?<0HkQ~C6y zg;$;PvskQFKKWrWcg#Ic)+%rQW3o4~uHf(>TaI zb=||mL-!<29Dn3bo~d#NPt~efiRK!9GDn)sF+b8L8}gW$BXY{0)Xd9?^Wat<){-Up zSe6F~v$k42Jw3>?%&bm6__5O6C(1Pup4{HkL;eAmds=jE0JzeF>)iKd!%Rb$aaQPM zwUqEE!9`X{AekjL3m5Y;QsIu~CMU{^WTrJs2}=nL!m6sG2u-tkKnth=Q7oEnRZCC{ z)Po0C$fRYV^b)c-@=j#7q^uI?q{)y|JVXgDaaVzPL`D~FKyOHGQc0IYV$aNkrzD4& zUs)*1%&walgvCrU*5@mB^S#JdGV{v}r zc^aHnoOYa0bc*{7s7`jtHO)X0?eWj#O>Xcw=FyfsLS{tX*y`StEaIjG$+<6=g=LEx zdz`Z5K(kC-XI2~}BTg1qo(9Eax?EY5n3N%;W7Yr3>_`VM`Menh{dU(=KIe2Zf7wp6I8)a+36qnbH2^J@N4>s6l; zKBauh`8?%Q)2FsieV=ANEqvPgwDa-vnc=g*XPeJXpPzj8_#E*$=4fXN`2Vp=o6(LR9NbA#Y#PzQY(~Nq0|p3 zHMMWtHBy!9#wq2Zagr`G7#slyz&@}SFt?)oWK=Rd3`@bif=ne_602ZLfuB<6;q`~t z9)>>L_wdJuKRn#}aMMF^yJAxBu`A9kUTDV#H}6418;qs+&kGfnI{{Q zuv{f&CE1eFlD~kZl!oiZi zTYpneE9XppmzjI~OJ$Q1$@kwW&5J>{gJM5rhQcWKpMQ+i_;Ch)T*{B`bCeS>6w|EI zwyyRCBf(0*lbGspa0#Ti&wX;OMm@Rj-cwI;KKQ@<)CFpwx={U4U8F7+3)K(QRqASW zjry7Tx%!2`ZmE8$ex-h`u2t8G#o{A%y}Ci&sBTifQ8(kM{IR-I-KGAd{;d9@?iNeM zGBrruqwZCIRrg^jox{7?R;Y8ur|K;=RZUZEs$ESNE7jX-hMK85)GReyb*e6&U0ki^ zsJUvMdPmJy@2bD^X06ZEd+HzRef3ZEf%;H^}I_MJGReXng7A=)-1+&OVxgo-fUlYUYAL4=D- zTB`Q5h|+$+>b4uHwMW}4u86B5M%$;QY5T=B?SQzh9n=nqSnV+HS-BzNY2m+$1fH}! zsvXmgYr)zH?WDLV617v>X^|l^d6#mQb_QN^3YQilazw7k6L++;B40bFofmhtQ1Lr5 z!lqqNRP7?7GaNa2S&LA3?W$r$cG$HjzJKGXl+YJy>Dq0jvQkBVZllrLoe4FKai`Khzf?wVLB3`+}COIhB^YC$W{%T3@Vxq`9;leU?63%jL5`lwL}2h4-f-?fWaQBctaiZzyjn19Sx`hRP~c0Ntt# zRIs}0bM@+a4P}rrSQ(-WMXD`RhUri1%azH>6lJ0Ep}s=@MEP2OPhr+JNHpphRHFBQ zJ&Lk&;338MD9G<=z79KVXyYKIk&lo6IKLoec;g_oQIPVSZ{r}XkxxL7RyUyUfI0(x zmisL4K6<%NfX_Q4MhEG2&5#3QmJe*|6D0Z#7{|K*0N(MC4`ds56Pj?Fa1_X7i9wY`XUoO+@H=vGhP~h_A)t6I(qWu$3 z?jKUbUn4pekAMRHhm^qHBt=lix4Mj|<6Fm<91iqr98{uF&wc~^$)Yb=EZHdNIse8% zr5Xh_Bq-fzPkm*n&+>i)4tEr~7Q@KH{wbdm0MA>W47_r27$Hx?vC=6nmA@YC~mj8HM%1GHshZTeH0IY{;w7CJpp& z{xq8h)%jz4w>G!5_lEa;(cID(jef1Ixn=3v)_uuA>zY}E+t!g+cs#n+vrp$2>Q@8c?gN(WBV{cG-W%}OU<&_zG1LbN;d1dC_ z4%LVoCRYpOs%bS2^pdN|aEmrbrIcjcB-u4>if zpYsKRAV%rvc0u~pZBmKR17H)fpj^b3|b_r3MZ%?`J zRZ3J;7|EOWEsYmI?ZWLcVzAP!aJ#JNuWT#a=3R=)y~6Esq9miNE@HBn!q>y6;fFpJ z>dZg`YmRUJ^DI6ByCKV1-YqbOz3xzKlxV_MCt@ZN_I-5KbaQWvn9iBgDCZcSTx!B` zo-zV6fRA|P(b;`DK32>mjS-yvhI!3&j!xzd^8QV@dkfxe@RGSFbJOaHv>HE=R`Gj` zBL_aD>cc&!5soJ9N#m@9TSjM8DvwX#REl&_B zVjGJhenjWj&?b0$11;${*1;@_ILV@j(<}-)g47=`uqaYPU>(8J5{igsQP2TyvQB1E zL>i02*x@$oOz|h{hb#&s2}LLh!y?x7qA|k$$p$|dTrJ( zDlf8rNomKrlj6s^tI~~iPvuqCeUv_|`}2-6MfSX`M=B#(k5)#reuu9UE4&F|BI|dR zcUezSrm#MOmai$lDJO-hL@736GL#I~c}gDZd?lat1Eqjx-Z+6ZRxZ{ast4;5YAM#` zc)CkbE2&jjS5qm4+JJhXKTyfJ+E{JOx|!O7^)Pi5>p5tS}roIVdg|-?#!D}k74d{%*&IzKEpep74N(B&O*6UF{)Eor?98QQzQLUzrCFz z0wVn8KlP4(4gVT`NBylb42baWR@GYdivP-h!2xSKrF2;uaG+Wbzv2GlJFl*`+P{7E zv;OV<+xxBXAMYPn<9xuJr=RQGk+q-y_<)F~mv;{HQ%O^;S-Mt9pVB_rzN-V~_IB(vA***_O`Y3d$A;bFa@nrJFjm&t#ikYJz8&U`=0;!_ImsF9UF9P;6J{@j1C2I zt78MowSt__>DYrDhjm`xDW!8qzoT;TuOYYHWpu}P3hOvS(xX#Y*V3ITbsF8(+UZ0< zcEFvk)_^;N=F(|zz#T~^_u{{j^ouQ>pr3nj=h&Tk(b-*=`VB9>bYAUVB>jq(fH?tk zNO5=(Iz_k_Su>qi{}rU){_&m6I0x9U z_>Rk;VPjR%-2B_>-}6#L)#8^FFWuvKzODBE{Bu*@ec9tw;fvz*%l%)HmG7YY`;T|~ z{MVoRO7|U$-}f&UdOhAZzBH~q-utJmCwZ-St^d>6e~&Y4aP4il+T>%yZe0KSIsX=8 zoS?41D>`ZfL(S`~V~wrGLB7Q2Ft)(Q<&9m&StEsdFXKLwJJH%CEsVy%DI}j?EIhX5 zpQ&4PkH3dT2H%J=e}>~9j&7noe{O6tzBTq3tBk|OAw3m~IMLfUYE&U+F7W<)UHwySKfXUSIPv$q$QVgo_gx>~`=8JJUt;+J1}%+smZUB8 zE|Oe@l>T4RaGwJg*v*toIR_Uc|Cej2K~qLCdN86ufo{@XD(Js(r`-_HB* z@rClqz>IG33A>>Bw@=*5w}Dt)*KtwoOE%UIh7X7p|n!k$TzMlT@-)b^4pVn zlL5?ve64I!wkqFa>G*)zt73C*%wsK4KUSBj%hcuSC+Z6IQ*~wWIk?C3Z|VW{ka|Qt zsvcKQsHfC3>RI)?8m3-UFR2k~lzK&tVTL(QjaP4~NoopnZfVT7-DajOi&?fDX4mqW zQ@hW6S^=}rJRQNKAD+ydm1gG5n_0DrS|zQDR!ys+)zo~n+S)T(U9GBRF=wq;x#!j7teTut)4tZ$%Goq+gE^1(v-V5TyqNYYHk|$10XZAS?EaA_XTX^K ziq@`b*R)t{IW8@iXJg9f6_{oEP+x+DM*5m=V+_P72wS#%mq{tc6!M!Q6^N@#eOt`> zE=7I6h^Ek*iBoQfPn87TdbUbQQj*1LB~!@~pDBi7h%eQ$s<-$GOI$OaS8ahk zZoTTS`il)}SG60S2kR%&{Fx-u;L#Kz=Y;J-^671n4zFg4 z+wg0a$be^^A``xSC>*Mx8sZO3qsRgI%$4{Po-HdLz_;GwA-r2&6u`4S!hmn<@>p4Y zt-hkeyU!{Xt*O?U8PYCVSH)XV)3zyXutjcH+G;zsoxBhW-d5VlIbWqcynR^d2uB}Nx?-a|qx8_u zYUh;RJi~HP>C00r(aIZe_f=&89DYq12$#nzZ)u5Isxp+BT$?gl&dw@h;Cz=dmYLQ+ zm3OoP%}}N>hpHR2@1eh`{G<=lhbc#R zR%E8~8}qfZl#_C{RXHW?p~@M3q5h!~!hG&x8G442qq8Wvyd^3h zOL8gE8qFvQ#2D9=Ho)Jwu9XGepgcHfT-RrTV9u#RyRJx!D^Gi?q*P@tY@Mh?jhEMI z6Fvhz0qemg5Ka75a1F$=pCKw@E3SmUO(mWouY{jKC2Yi%cwW7teg>Q+T}4zde30bc zMkU5}l|)_o?+=zj0G^MT_YMx`UpeAK*)p;tllTfs9t)LZ2D}z94QL*g)J~ z(%1)nWB(Y(&J%`$3ml6g-5Bt@k*R2)jPZt2g|M2DuGA!~4cah=#&_Nof5PE}Bftc( z#Hge!2P?rUkZGJyvWydI3$Va=Lk$EA!G~ZGSPVV_>y4Z02Cxxq0^fkmU<>%x$W*t2 z@4zV9y5^beBGA;QCiM+ko-JjOM_;3PN= zLcloy9n=dT99#yGAR1f+*FahUPk<2q2_71mwD*d%_KLLiirP~~hE^F=1=YdRpcbfO zq-)KM6B=~TUH~n@i=Y)~4PFJWfnH#V@rJe%d_&q>xt20(+qr%R+dJ9bMYy-%o<_NN zR->YJ+PJBmA^t3&9<@-;CI4C!VH`-|yjz@OBQBGD2VoA#WBVS*{@~bs!aoTOzRa!w z6_f^Lfj6jVr0bPH74S6sHH|m)TEHensYX9(*O)+4M>*nsd^!siGZ5;h`i zOxT35DPc3h=Lwq=wjg|guqC1VQf3?Q5_lPO0G&V=;17C&S3xh(2f*_>Jg>hA27*Cg z2p9%NfKf&UzGE5sSTGJu0F%IEFqJY*2Q$Ge+T1$w06*#*xo!*C#yL9(;W<5+Fl7UIV?rC!D(;Yyu}qBU-*;8(agi z#AO(%TCS0xPXW`w3^2>MsxKi7COl=N>Sw@N-l};)l+~j^24lUaDNPWi$)Y3?lq86f z1W}SKN)kj#g2;UYxep@uLF7J&oJWxJ2yzrbjv~lW1UU*K7ZKzli(EvIiwJTNK`tW5 zMG(1&AQwU8B8yyPVclqgWvPvkt$qo<0$+o*AkE0uDuPO&3aAEZfSO>hk*$veqrpk8 ze-(Yq8yW76-c^ygBzTqWwG!^lkrwQ21~&*o@@9P8M7692eyIl0WuZYl8$UiN4BIR zThfs&>ByFJWJ@}-B^}w4j%-OswxlCl(vdCcwE1+}ayso;w&8T`hVaJ!)muvhHufDL z2N>|50#r~IJPlfbHsB@jGUxz0fiA!w3V18O*+h68FipoRl#IG~0D zYB->V18O*+h68FipoRl#IG~0DYB->V18O*+h68FipoRl#IG}(73OJyE0}42xfCCCR zpa5_02dzLGKpTVt4k+M&0uCtPfC7Av5ex!DzzDDr>;StcR~7nA2mPFbzQsY`;-GJF z(6>0~TO8^i;7{<7J5ws;umgGPKt?){j}GLK19{{?9yyRl4&;#odE`JIIgmkoJ&ilS zm&Rgx@Wu3!i|Hj7(@QRR?WkxPNWF9?a9=%f@ zJxm@wM;^UK9=%4McEiZkcz?c@2r}8v!;7jSs06BjsbD&o2~L9$?$trmXWbCA04>2H z@Htoueg$V}mC@wJ6I2G(!PB4?@CBW~cyi?r0zfy=9V`K>!5Z)xAWiiPa1#l8R1} ziawHxE|Q8Kl1hISO@9?le-%w%6^&ky%6Qz{s7V{HNlC*gVK^lW7w2eCW$<`j!c%n1 z!Ah{os7VRKDPcGz45x(QlrWqUhEu|DN*7M)!YN%ir3$A+;glkr+=r9vaB>|^j>E}u zI5`d{$Km8SoE(Rf<8X2uPL9LLaX2{+C&%IBFr3lC8G3M4gwdnkMKd{qWRFL($0OO} zk?irJ54;LbBky7ae2t`!XPl6L)JR5ZBqR0Xk^1qB6B3a8@ko(mah#qtm>NFG`KK5s zoW=qcf?e_~*2nXl6AHqZb-2Ja7qMiAV;jGO<<-IYv@ekZ^1vOC4}Rx-+NPjwia$XC z?Op)Qh*vCxRg4&pSyApxnFj8sgf*NUgt zil^6#r`L+7*NUgtil^6#r`L*C!@*?`38KMOa1G>gAIhoT1;2woxW}L1p%KIQAc65g z0^@@O#s>+E4-yz3Bp`v4>FwgREy$y->~G`T?ZoY1dnenw2+wdFDXX1j{~RH7M`9<_ zGsbHd*^dNK?8kv*wr#}Q2{YMuu$=?)*fxxKdd_%V1;}B&Bq(jf(2K_F-h>s6WTbqu zUIoV(4|_>2>4jb>ry^W9WI~ z(Z>_e$N7337zZYRNnkRV%6+GUnP4vWUkf&J%@(fR#_=6&?*jXYKL}29UI;uom;R4; z4uGz3W;f6s^aQVif!K@Rg6qfy9PWa{U2wP)4tK%f@o=?+mg5CSXTZ@eINAkApMj&D zaI_tcb-|r3xH2BDbitJ_xX=aHx!^h%T<4;XjiZl^qmPZFkBy^`je`?iaH0!Nbis)( zIMD^iIpH`b9Os1NoN!z`92XDAx!^b#9Or`LTyUHVj&s3rE;!Bw$GPA*7aZq;<6Ll@ z3yyQaRZh6d30FDcDkog!gsYrzRXkkff}@;pQ#{<{gp-_bk_+x}!9DSCj|;Aehil@s z8%7*`ejI&%9NgoAbK>C~p122jY#Y2mO93h<3o23zl|U8nH2bYU8^AmM;Sv{I;(|+D zaES{palr{LIKc%cxZna8T;PNYoN$2?E^xvHPPo7c7dYX9c(@=QE{Lc0UDUjjns-t2 zE^0iU8jr_jZe_;16zG9G8py2iTg<2p0UOvpMtBOjC3{?SHEPp`+RUK_L#U}O)YKMg zY6~^Bg__zzt>jQEIn+uHwUR@vgitFXP&Wta=0M#XsG9?IbD(Yx)D3~MIZ!qP%H}}X z94HzBHAA3g2-MsHHMc;`El@H93Wh+j5GWP`#X=~34yDhb^f{D1htlUz`W#B1L+L{( zeF&uwq2xJ~ID`_1FrMiOFLndnK~L~H_yVj0zX8d^=Lti>?}imUa`X~c6h}Ouh`)g zJG^3tSM2bL9bU1+D|UFmPW{`de>>LoFj{FAb$i#?rL;wFpG5sC>|JFY#^~=j+IDHv zDqno`*Mqp8jCT5QwHyV$#Ti3D@sZ#$c{P3KS;C@GVHnpHjSQ<88Q8>2LDPR5A(m!@ zSeg-HX-0^p86lQdqD2`c23%+S&)lVQgB&L?nnt@CsrYPwhZ-W@C(=t_K8yJe(--BjnZ>vsDFZo(t;&Q zVZjQC#wxtwh|}tU2H-h> zo~YgBS{;OPE?y9fAcul%wAvSdA7G!Wa=9v(t8%$2m#cEQDwnHrnYnt4uN=!OhY&I{ zLi*k@Q>bR#DV_>a^$t??4$|`uDdv%49x2N4=N+Wy9W&)9!Z?r#@<7p@DjIi?o_CO* zcaV^GkdSwfkav)dcaV;E$Vndard9(909rE+X^S4(j-1HxNFQdc`-1*}5e&I?l3N=& zm3nQYh~oShz@5cSD3Od#VdHuS$Fe!sMVJHfz#Whe?lLETkMKVD6BHPc=&_OLu#rj$ zBMqC`75W>S;$z%GhmB)=f= z7(iX>!l{fT-BC!A(Ma-%oIlA3rB4f^PYa_@3!_g9qfZM{$uX9De04{YhLLW3)T)lM|gZ z5}h+DyV(jP>3(b;n3^+_si; z_k)8Vm}94S$JS}iJ3|;k-8X8d>8=P>16HYh|Cmg5tL#g3JYBGyjbW)4Vd>F^6 zwM1&^xbg~b+>*60$w-32PAKezqQ{}=aVUCRtHb_waFXp%!YB|2GC>|Ctq3ZCDqt#@ z4rYQ`#&L3boLoA|p%c4GYx3Eap06D(jNTI7%^{aIa%qEKbK%!)_%)pz+u+q4avcHh z+2J?YhI7cZja*-X$7FlXA@??TEFB(8ho@xw&Vi@w@KYB2WTQk8@Q$6**eH#S(%9e+ zJG^0sH|+3+ow0W=ykUnYvMHero{(}Ohq|{@$9C%1PTfj5kwe`|S&>7XW>cTp)MYmH zm`)wqsXse)XQ$LQ>Mfl*vr~E-^<<}x?0CG?MT4tP=^BC-pe1N+BtelRsF5fZvHv+( z3-)sEuY~&uqd1OrmAP+kD3!#M5*2w$!n?3#Cy6Yqz`5-I!5i#!tht_0qYPnX!m7ko zCq(WmwSX^ZLtH09e?r+pyAjG(+Jo>_!roll2MmBOM#3YbjhlRl$XglDH51u?7fMfN ze+pJ(EV60<=mw|-bpdgKU?KPrECN`YRjkeG5{|D1YrtoKvSRy5r-l;M0~|jDj)0@Y z9S0}CDG&je1yZko7;q2V2M_4`${I=3Y!Wq_M9n5rvx!<`;+J4)UQYN4`zzW0n(g&$ zZvxy4yHPrJqjYLKiJDH-Qi#i7JBv6M$R+L`arcF{UW(c*1H6E2wbhIy-lXfT`+(Zu z8Bh;kRneaVjX)F73^WHXfF4}?3V03l27N()@CFzFnBBw{m5wbc9b1&-s6;p_5spfP zqmoQXFo$pn`|C(=Gv^!v!Ni>cXW)yLqB?D;HSMM?vftfbxZs`~afN5CVhClQkihm$ zdKepRr7Ch*%JMdBOS$b&*c(Y8+@8gDH%Jrhyq?7Hw+@VK5=GnNr;m z<7qcug_6U@Sg9;+g?BI$w+~3^afLW3L1JkKacn1mo9MmCJl85EiJerkm_fY$r3@h)$#uM$nRZdO zP}10G6J===UbF!(+JF~rz>7BEMH}#<4S3N8yr7^BUiX66z2J4Pzepz8YuVskFDPn* zZ@r+Z4G;RVP?cT_9gl0>Qi`}(;!@bpAj}2mPw=c4JnN-v=u#EXyPg7-K~*CSzDF)(gJ%f^WUxTQB(53%>QDc4W^adnMT;Ny(O`&mkS`xbUG@ zp@fsYkPT1G*4WP4LG_ohF-Ea{h3y!^Skrcz2JfZ8d$&a{qYoXlfVyqaDY_8)6ZR%V zQ>S)|%Y5GfR*{2~sIR3MELU z^`ugQRI>zGgt_#iI%r|ejiykFR7#OTDN-p#CZ$NF6tV=VlzBut-GmDLX$jbk_S!l zph+I|knJ!NI^;nI+1BzXa~@^Rqs*C;7$kAyw`!2R~ z0R6qV1MxN=oUPekJji!*SLe8k%PAAq~~^_DLU!7ooI@`JtrET6AjOahUY}XbE3I9(bSx1YEF7*7h0MVEzOBG=0qEF z(mT88nO$gIPQ4_k0p^0W0DT6n$tmiH$1_X$Xi52KN%@rG4!M=HOZntbYC-wvp#L<> zl#gbVk2aK#Hk6Mhl#eEqk0z9lepq~#DPPOuJ$)5HB~S%S1&jjGit^Eh^3jI!dD=3I zew;B2@+eC@fNz+oM&@K8Z?ecm7VR~Qww8rQ^ds`;oyjOWlTmgiqwGvZ*_ql~fM+7K!GPx?7-eTN%Fbkzoykbr#Yj4n zQFJDwXcwbs`5cMMv10da_0YJ`TNh$Ydp3Jx}Vo@F&cI;8qU;< zEPgzP!)P~?(QYOq-AtZuwK7sI1+Xhp%R$)0g0P7N@mysYY&^VFgV~MO2_M^dg2X!Z ze*gr;Q!ZlAT$Sc1Atm8+Jj0>^3;?|-2%SlDUGe* zJFpFW5B_QPqsT^dkZTSDT7-Ixb9piY8(@%n8iasz06M7`KsdMzB0)5`3a)|TGadi` zXDu`IJIh%5x&}}mc(jME4%3w1pgUzUnc8hauoa43|1!2<+(xlz! z590p>22ZglKn3oZlptCLGb!i~kL^c6dM&ux7t{fDL4EKnXb2jErr>$d0<;9JNRKCO z^q0WPpabXxx&WTS(f{+TOAz+cAbkjM&$^Z0cMoVkr7|Dm7^advKH} z9)yA{p0muQN56}v@jLr}7{BvI0(qAzN=?}&L0j5rXTokA>w#9(hhxLp9|0zSNyOpx zu71lKpSObVz&7wbI0k~jNpKp3fOCL*s~12xxC|meG`I?`VNd)9oB`xh<2=m<93Tf6 zqO=YrbQP2Z)r{ZurwLnuHsB@jGUxz0fiA!w3|9bQra`_3g36P0j%Q)#B#X3jT}5jlS55oGry{ z?PBl|_!uk&%V=XaKs-p~98YTQ2>QZhw3{2$RBvjC58aZ-G3X4Fj91LG@<~f-5&5K* zPg?n;l}}nyQ^+T+eA3FtV&i^p-beK2sh566pm@_*Ee0Wds~CaGNn4ANv2G*-4 zMuO6eHjeE_=?Iwh;~sI!Fyojqj&moHH$)sUVhe^rajY=y8m)bQf6#1(c-%cYlZSlof_ifiihe zrn0n#a^$ZaE#fE2_7r7nN7>4f%kh-2JLM})PCF>C7UV$36UwjxCoSfATFfxYTaxlx z$aynpdKGVyl8kudGmmAB%jR>BJ(%TsgSmhKgz|aENlOkEA zNT5O3;?_g&L-566DC*{w^QL{l#_Z)?tkP|b&D202%JuJq9DUYjjpgHwOMJL0;v&;32Nb;2M`X zl4?tvaa(B}ZoW{;At|tO!fk_~!y$4Oh9_YLp5J$k!(6wQT8ZM?^W1L>Ih9Nb9{LlTLu?MQdC0t2{C={;Qe3-OuorqeLl^hoNpP zM`Jk}%TcN8OKQov+H=&roTtsA)}%y{*Lu+oN^@-{*Oo1;FSpdOLZ38fA-Ou0BMwrP zqqi)sTL>3phZAVT)I$c8NJM@lQC~?&D@K1pr>4)bmCIGRT*cddh~Z5>a+a4mUC-#m zif>pc76q-yn>guRs9}+N775!3@4pf}E!Unne>9+~Xg}TY3X}d|Pn(`#HBC=2`gxW* zVlZ!qddBntt8e;%@ii%Ic+WD9IVKtl`L?Gf=6jx+V$r*bSJ*X{=2-ON;D>lDdN1Jj zWy9Cd!P1_0Ic77X@HeZ6M- zef2T@zWSPeU;RwKFTB3+`@-uBzc1djh~L)$)9-5_eqYOZtJ+Gwi#W>k`x;|O`ooC z)2A!W^y#```gFycK3xf>PuES;rz;Vk#EK#bzr;!+8Q;VzA_f1%YT_0?iZw(keu_0k zn(6n&tcmuVuuESr-k*uD7k*;+df^WyeZ5rE*Gto+ua~Fk>s8A1^(t-pdX+bQy=t4j zUUf`guV+kOuezqMS3UVwV*J6RmsfMs%c~_`UKjBMlm1Djf`^z2$^dUpMwkJLvh zKkB3P(aH|f%WJ3U)4wax z^zTYC{kxJ)|E?6%zw4Ii--XOoW|-}>s@XoPn(foqY@fcg&$_hLdMq{Yz^qTN*?>i% ztv!qUZNgF#J)tRAHe;zm`{G?;w67QN?37-fWw8jh;heTCPvaH#628FgSfqCtnm(R z+)Lk34^u8E$b~(O4)(IBcn$wb-uAJmrlcrgN($YS6dtCe@G)gXc_hS5%ALyMgPgFj zmXe~HDJk#+WAQL0MJ*&nfvAPFFocIGFKU_cqLwKyROCe!w9Be^!9A^1<0+XE$c`qQ z*;HvNYAel2Ekt?c1?2@i$6De`TFR6urA(Pp3Te`Xqkj03Ry5^^f*cvlRYRCN zD6b5~m$ahkOIlS~qAU@ zX-2D+O|Mo3Ie3;_onzeUi3j$1wnG`edg6;6#`Xoqv7UHkUt~L+@vJBQ*_YV9%(&K5 zja6gGr<9TfdjEGiESZH~;N+KmGRGZyjA- zRsB|VeY@)Y+J6nd$M1pP>-WOjOxb2Zt2PT-wOP=rHp81goq|?v7_@3p(5gj2s}=>V zS`@TuQP8SIL94bwtMa|i3hf=ELgs^ag6}Myql4s70Z+79w(wm9T+!y*!XJ(nJ`gQ@ zG^07)rF+y-dZ3XT1&z%ANoeForjc=-DpR9&GEL5nI+-uFO{W{XSVs}QHI&( zSF*m}Rm|)(O?@r$>*P8tf-aL~xLz;UbH^2|>)e3(=fA?|N?FNV&zs~Xp7dt9nNYWY zO*Lj^=U>CGl2wtKmS+ieX7utz%;<%$mTF?Vi`ASv$lY>xRAh73iSu6Ytj^3_rxr1L z{eJGdM%LhGt*piW!;-E{_y~7gFYBWwvVmEG4Vlrtkvl%hs?M=I#)w4$t;J^U^0+)6 zCGrI07cH0{{3LhZB3qbM_!OfV1@g2!9kr%Kc^)|?&!aZhu0WYL41J9`ti|#=|5z2u z8;q8#tX~8_> zFNuwrCdmH+Cf8VY%1%=971*4YU9yXKc7xG*`5N1(G40kj#JP`AmI7KY*4SiiXNelK zzOyn*uYuC{(qb*fW(zZwp^de1lrW3GiP?u_Y_3q7f%0|M=8Vx5Y75Z5&e~F2!nXtM z^V%K@Qn8kR`guJ>4}tFh`scMHmSke>1PbV}2GAM)P|!fHhv{MPT|foB{y={Ke>mu% z*CX@@_^zOYUXRox;g13>^w<&T27ffDq1W!(9lnS5fImi$f$yn3;d^N>_}X5H1wUFx!;jH1@XY^)(o2Yntd{_t!z|}woy#a>q0VErbFt3n@3%r-z^vzDy@0>q z3at+T{(7-q9CcuA?n|Q9Vf{?i#k!c0l_k0aIsFO7RhczTsFk`hQoRX8)$7elzgut7 zTX?SD=x<2LDqTeym`9E!xI6R?Y**f?cTxw+wH#OGltZf-yHUMM?;=iSmP46Yj#bio z^;2F*x(1(XbuBZqAJ7Mgff?t-NneE2ZqN-p-A3kYAFPiuhFYkPF?ai5<&RzX z&AK^iWAo9mNd2Tf$=K%>-9l_nG2^n4^S%ozc;==fXKp%v zn48X*fa3}OLi?ZF@?2}$<<>dzcb@Ib#Akh^=Z-c0?b;5TL0Lo(ra8TpVtPTfw}cB@ z9n^($-i5_G=(n)qDk9bkqQ6uKZ3fM**_8MCy}4Yv_ZNW=cQ@CR%k0l%z4Dk)mOJ^) zp2eQRHc3xenm&Ks_!|(u748kPehv!fR`kehD+%=5#A0p6VY$~PP?NA!1Z#qr%r>Jb z=Yk)T<#VELip|YjmD00yO46Ljor7a7lE#`B9F$FqNz$jt-`jH3+)Z9W>gsN-NLfCm z`FJISNo!0lgyoTK`O->5r_%k=_SA4&<*+IB+^C(?t#u8zzp(!dxz*{vx3%EAp%b1O z^KH!zi~ zsSe|_wJKi&>p1_@E^1crB1Rie$?x^u*;1A6MQASDi}`A)y{9@WeQ(o5n!d{I8T6Iy zh3qlma);H?+K1KT`i|Ou^7XpD%;;F3FvxQ6n!We-2Ir320q8BGo$DU@ zIev}zjn zT%H_0h%E(+fhpmdm-ra@(|&ZAK;3DZ8>zwwZ=bSNcwMp@pzj9<4pd z)j#u>t8c0PO(khvNo$D3Vb2xO(iGMlsgAbxI2YGHdd}T%|K~Ifx%yfEx%PWwb=lR_ z%d{jc@AF4mH?4kJ9^~7YG^f)0a=$Oq zu$KF_u$KF_u$KF_u$KG#QOn6a=1-1xPo8}K92ZTQdG0x`V9Ko7=evf}=AAX!HAQfi zYZqvzK#!g|>w@!LpP92K&vYjSIv~(e#Pi(n+2%TWj?oG8&!0TUojrg4(cRs2Xb(3F zdW>5D?dcXmd$~o>-fk)MSai_n%FhONvCgEpk`5 zWp1Tg<;t#eH#}4g3y2Kw| zHf#v|$g)AWk~g{1P0QD6n8~aS6yC!Yp*QA2jZ*YhVK}Td7;ir9O>?}}My)Q|yJFOO zC=RV-)bvWOw`=tPL_bg~JzJaKsMQ4<={IUR$MMF8`sLnF&+=-BInGpN)@8P4wqZXei%w8|do-fLtmt@@+{mnVtA^v%dzw~-h_WIK7Id{Dzd%ZGyzBPN^m_2V# z&q>4Vxo4UeNssI`mK-dd$mi!YH2+ec$=~s7 zdeZcU#TtiF`mUJ2?YqHursLbbt%vZBr8@H+srK>6Xt(>o&vnmHi=TDBch9(|-BWIh zd(u6@h++@lozcZE?tS-1_nv##ZDTz0ZTFV@17nnLx;Iz{?=|ZFEAD0Yl6%p;fL)92 z?n6J{U*H$`3;jI*Q(uOai;o$({FD3CeTMBmRw~2d#SZLU>~w#1U%6dwxBD8q7kk|| zbby?GgSAO)vwEy+V*8>1%NIqyfj{0Kz*uJ^f1rMa6^tgnsc(iIj274dY=s5Hw!WP| z*v7{g9qS>-NKfe{z2#WxBR`bmTNz6?_ikFsmU1iUH1(`l_exsg`X*)mxjjL)aSA1R?2DVrgh+>P?}#@Ir2X2tX<|- z;&sc_x2!4I<~3z&eE=6T;uS9lS}*+)ckuuuXV;PRN&VqJ=Ld@|@riZ(lpsM`%MYzX z+&U}qaZ5)0&%)x&*?zL0WcFVUD2aN<&ErmSm$+-(BkmUuj7#IO@uYZqJS$!pUm0H; zuZ&m4cf|L_55jc%aa?E+mgGI2a?Un^Jw)cXkDL7i(=L{Ex;CXQK;i4r|m0dOy9U-knV)% zFaA7|WKuFEna=ejhy5Gq>_GXWD47I~(G^=dJ{b*nlq_{Fmapgt*5OTR!$Ph#wbgr@;=3JjU$4vau@m@D45>Huesqe44oUok#u z8x^_7-DbDRxA!If5Z}Rf^qo97W8%SDPK*{}{c3?f$@la9{mK3mKY$$n|5b%=`aig; zu-V-z0vjF(CVU#0a4CPp4(1=(q0un@bTnLe1bFZ$(BLur@pA@<@OaSRGeLoW5n=Nb zD`8hgS7FJ1S+tygXdjFo0{2}PJrdcPz#F4S`5$XjRL5#)^b9KmXRUmK_A-i0Tx;t zVWG8&G?Nz6O4`W5(q0Y$KRgta@NjIk9#vyw^>&arEUe0%QZ5xBajT^Y)a)_YWNYYx zlRXJO_7u2S*3KywPM?<-RI=RF68nJH?%HF?&Mb)ji$Nbx}ji>#Tr!Q7mQZTeq`fa1>qn!;B zIDwwo40>S~(%<5XK`sKxTS%X4sa#FCE5JKf5N8m#b_*Vf(dSCMeQA+cn-7=8)>-h6w)ty)MxXJb`hk`fzUaSIRTSv z2i7K_Z(Raz))ka216I}toNEBcRcsV|YP3!OZE`ve#0E~M>nuXe0C&0&bZCJt#OFd? zq)YW`T8Nd%mg$Z7xKVEfAOTl%C^G-2)HJc*?wClrfrVnrWS({m<}JJZpj2h>v0Y4}!!$Lyvi^mA<=}dw!Dv zf51*U-(TV2doV*jun!9K!9cO9M?L9=MzHVD!}vSp2>wComDL%8vKqohm`;xd`5E{H z4W*Te{4_s}_GXTs1IBd?Efzgk_S9{ZpNQtMuU2bWYuZ^_Pg+?*)@om^btSYRCF!_( zTUtOzm|@h#5wx@gwA&N;&tWPpZsD-@+TBW~) zR`{1gs4Dnsf3->Uw7_2lEmz(L?)W;iQhyC)wP|RD{~(02|B0*phe7gjkk~&q)$+DU z^q%1QMR5IdaJ|nY`YUL;zYbbSZIY;x_^vl!)LV;hnYn|w+MREJR)SmFo!3Gu{5MAF zirTY22w$!LZmwcES0P0~{;Exi@YVzBg91c{}j0&Lq{Z-bVDZQEUrhgRyX&???j^HUA24w|xB|HCAH zdx+;hP3}Jml23xc+P?v<)PI9k=~!rmPJ&jWLCwzu=xUt^t<*E170g93*YVJD z=BAkIPoR}L4O)S=HP`9Ta-9KPtuvvOdJeQgXG5#?TxhwT2VJf7`*Hmpw8E{U&Xz_) zoaN{c_ei)J?yRm1cMn2K-3I7bXSr!9Jt(q2X(M#sk+hWE`2Vf|OAE&{24z+>SQj1b z*Bre$gkJhE`b_pe?Rdsft*>-=w3xkXbQ$}R^qZFQSD#r}?MBb(8vg2A8QsE+?5*s* Og7zwfTI$9`zx;34o!sdF literal 0 HcmV?d00001 diff --git a/font/Roboto-Regular.ttf b/font/Roboto-Regular.ttf new file mode 100644 index 0000000000000000000000000000000000000000..ddf4bfacb396e97546364ccfeeb9c31dfaea4c25 GIT binary patch literal 168260 zcmbTf2YeJ&+c!LCW_C9{yQ%b)g#>8<(iEkL(v>1zZlrgRDjlU0dJmx&=^$)IKoSrV zsZxU|AR>z5Z9}l20?D3y|Le?7GJ`(v^M0@XnBCdk%v|T{^^C+MNeaV3m13K{+@$G& z#-8btTz;k`$-SGkZPWhzu!d=pT=54<>VBbF`;Lt#PMbAOk|!OIq{t<0+9%arH9dQ$ zB>NA=ReJUr)@#J+`|XBFa>!jtvQO_bc1&#bosRXATxJBm@6dn5fMMev_1q)Lkpm@( z9UahX^a#mM3djA%6E01XNL~&(`)Lu;v*9K>98aP zR2tT6{0K(_#UJNc_{!c!Z zHiyUi0&y-VDU@(;Ue%q|1a+I5&)Nmf$Q>PAJ_;}cl79l;-c zoIdo~XNRV&S8Ya8##8v)MS;?a$X>x!Mto9awqs zs!N0P_4{LC{>GByaS~6fl;iyg!TwH9PyrpCbj%KCrRxO)l{KBlJ3TQ49vlNCWazs>e-87}kwAG)TIKE@$ z&Lf9sj~e&(ELLYvyYnBc$i14gZ1#*yHts)fC%<@Q^VUxyzPJ^A@8ZJkliut1o>tvfy;HCik+H8mvxXkaO6vErLp^B065TOx}dv}4AsZ9Aq--#xEO%VwQBt>`2_ zzk}I#?%+lAN%KyfTQuv+9fRaEgVd}UyZ2-?o4I4hd`Ihky*svO-M{~9MOS9*+Bv`3 zj9okC+uQW()3IfnzI{6U(O4bT7+R-a@jdkq+exXClqe-jbN+=NDgZwf3=t@UlQP5{ z@fCoiwLCN6Gl&fN}^1L;6Nwe)o_s{CG^0hX6%JhxJ zJ0Fj3+~k{9BiODolctYdq zi(foFIrqR6<@)QZMzAjY-8Zwk@!#HHvHbgP1bJ&|nVO;=k^-S~aWS%LAh^Ah;2uS2 zzQ{P2+XcPnN|raUOg=c54`!LUO7MQ3!Y=G*yXaaK`E8aWeE}<9hOU*ZmKqhhu0)7V z6iOz-K6}s`>cKwzcJmqYcP#C94u4%mj*)}qL*V-`36>+9mBK)(H#JTU=4IFqa?C2a z*AiH^vCq2e9J+_h-wccdcC~o$MF5G(KU;bEBSre$;clYBy?ByHUsU10k~&?p{s=AB3TS@ zX1hvZhw92MQ+kS}IAwRdtfV@_lIwDw$v)g^5?mHz8qFjy)t*_8C<(NY;rQz9WAxduWd2H z#>m4!lKEKW@>YRVps=s0im zywy2O`TYDnxH}W&FJ{TL-`Uu4)Ux#pK7RCB_H}-pcLjWJ6yH-G1HJ@lk`7-m)*fuE zy(~`3l2Vj{g^rVww969fu5FaqNG*xp^^n*oPq3BegPjmA82{{qQsA}l1aja!Wu2Z1 z1vr{@C8(N=l{m>NxOGzk%}CZ$jjimnoX~`cZZ>=VjLhQki*vjuF8wrV@c0?U67SE8 zb2Hzby=dL?`AS`R_9!OJ9r@mOH$Up3)kyHXbMn8p4~?F;V8%NcGI3!lsL>WY8vwn~ zQeUsdLl8=W*30}=f|ey^%cX1Zz+GkJ|7d>pKzywQi(e7=k!~U2ESbf*9Lnr-=W@M+ zEXqVzkDgN!=#MtEFgoB|si78wEYNk~kNB5y=k7l-3g zOZg}7`!$ASocZaGoB0o2`&~=MPFucl=7c77dPYcf+R!*o6{ojl270nbCX_G zt9ZA4BzG;kr`)hLe{$GXCJQ=v1aK1~q&^P5sE@{xpmC&u9l>_QX^H-kM7~5wRwC)3b|ndXH0mdb<=>ld!u`gnpIrz ziFewlUL)@1=l!y3?UPl@XG~wge;PJt*6msI)RbYnYu7nC?!&L|936YCPVL=858t>^ zw0Yv1tVfF$tL5g589sOJ?FHb1zQx7LBeBxTQa2roA}li28IDDV(>j%K5*Z3_Bt^Un zx3a2L(Ic2JuNM43?vYp%@q{bVDcRhq&>B_h!Xz3Vx6+{A=ALgK=|B8J#*N3^!{4i% z_}yRpe)sj2H%yqgVzE56Nr%aIGM4=`nSaQCOyiyT1lv0G`zND1v^;e8$m*5(#l_NW zSjJ)M%g~2me@V;%EBCiDT7qXp=1mA@xdvTp*TFBJfxYgCUnb%=Un!%RU2+CV#xI3A z6TbwXHJ45(6V;aBvnUgv;ajMB*lH}!776nd$^7I|MVFw(W_nMuNz2$o3bmyywph8T zTn1M;a4$$ddt{=zz_YP4y744SiG36May^PPw12nCQ|5V0;-en;5?e*1IELtq+9SeGA zmoIfBG^sq9EKPL^$^Un&Ch1lUCM`YP=l4ds(?D#P0S8>-(pb8mT=&%(9o`(&e{zoe z?V%5^ZW-1h-xpf188@%PoF2mljT_o+%bD}p`*#m*m&H$%#@d7V^Y&}DRj>n%rJ<6i zuI{z?0cJmvbfrKGt?Nf@8k(fp{6guSpELV8xio5uEb!EIW|ud8f`GSLfu~whw%hb! zs584!=_#=<^saF66VlVdXjRdQ9V$3IOp1$FWrsaXrL$-e1jylGVKC=v7_&#wr|IDo z1=!C8-8gt8HEn*&Ma#lNCmbKtZfe_<@Z}>H*u!}a*FNTF4+I7+VTo5>KlnnG1{ViC z;aTqo1>I(oA3SD#_Z9vg(yq%3!z;5|&o+8%HT&y#{=?3W?SHtqjVUXtH}qcn{_6v5 z7Rx%rGyZzSm*>}Tk4~(6hwWhHSvdRP!PoqCzGP8W{~rGA?~3<{D=Q!jtq9%efGzEy z1q22Wt^%A$6zEJ*>TVluAt9KA$PR4VNhA2Flxy(#Sy)*M5T6nYD{vu6$12K2?}oXj zuXZDwd*9i;`EqJ#Px25Q#dVgRpW-CMsVT%qQnWh(3?w5yhtr&vuHGom z@7(8{f4r0h?Eit4iOw&(BlGZ;)7qvz71*Wk3)v`^w%|NV*~Y!!?OVrxEnN5u|6%C? zP@OP+8ki20A`LJ8U-3-13o=0o%m$a9>Znx1qT!9G4#fq9j%9)!R@A^Dtwzr<#N1oxGLbnUSiYJ0kZh=o?NOzGa z{V#m-KgUs8CEW&BN;+`7(&b8W_XDAoV(6t|r8aoUu4qO^6);nLWjPTZSX^B-+AYT+ z0Q2z@85#9fOa8Y<sEeGf;v(VBKC>o+%if*A;M9ATvq&@Iw-49&$|H@w; zsV(-WCi;M(Bo2yOM2w`QG@vJo$D$sN2Kl@h*}_5p_SnVH}`R;HQh* z{cCDkTq~K4%ge)0@mHycs4n1bsFbAtmBlL-E+#>Y2nmj*Nl3r|$u2#ErY8&2mB9SM zE1&2cNO8hAqtjEuaUFXB$?vYMy{69 z>(XFpqBKuhgFrY}^6RcWM}eK)M%uYic$&Sby_3DaeXM=9J=4D3e#q|M9iTb{@<4Cq zmdk5E-kcx2C*;BZmAB>a2%xaGT;QEjbXA8Gae@a~%V%^*|5ZlJl2N-(6%vDFHdxk* z7Ur*qyy@4mzlL`qQrCaMtA#X%@C%}qSa*^bkq;;1!z2<(&7r>ph?m-R{N-exA`yOk34(%U(4lXEO76B7P#bi z!I48(l&d+p7ZiEdHJ-n77klo~pifxiJ-hhv&t#^sNdEI*LkjsF7V0IBfounfNC2u> zZM1+05%$1i2=aLh0tp6sjNnTPRD{8PN`1rXnT#OV5om&LLc+l9GslT>Y*3zD_5lm! zfB(&Qv94>jZe7gR$@RRjUk^Y2^t<&-=T2Xz0Ip%h0X92u7%9aAE-q@WqokD z;IFt0xC~~}6hD#Pby>|XoW)qP>O>aPVRKYL=tBDQpSX<$YT4`wOr60mHg8*kUk~t` zck$T4E6No%hVXlpU+#2a!o#o<9Pj4&pE3LwO*nqSzxLsHCvZ$G8G?LMAI(-qByDU? zPt^bFl^Hn)&8d53PK&M50)>Ehz&BBr^$C+jh_^csu`}HjN{o|_^WFLEo4=U<@)@kt zCGVRoaq+IrS^TE_s`q`H=j&@3=jwVhgXEu9OrEm@6;&p+g>4%JDkMmKH7T)bi3C{; zfl;RN*eMHxV|GX>G+IJAVd)dBab-DCx+(W`v`nESrOckL*N_+()tZz9xzpcwSop2X zpQq*TT)k-HDmLU|AAaxqOb)el;@zw*neyCbm$UZX8FOL6%vDo{cb(LK($?YGpN&5I z&dk-5uf2tJ)d59Tfg%pW8dw%oqMET3i)$dV#>CVxud8^C`>@Q4y@Sxk*3vt`&FGsZ}6?2^L~FD1ed>UkBHx|{LhTgeajUHRC)&F{Wv z^AyEj;!m71lfO~EE=t(2f8Pe>3&4N~K=lF!yY#FkIVft(@tJ{1>rCpT4&!2#Yech^X)ugiio{9}3|O75ZKY zz%4bq{t_%+u>R;4UD3D@uPH9YHEc7rG1 zQKrkaytTaX^0VHv@@@GO!f7ZVJpxGmz?Z@}T8L%w8VpE%!0GoRqnIrBW0P<4fIJ>> zOa4s$qG-7HjvS*brR#UX^(W%`{!&x@`j$%?+-_!dO_f9xhzy3!B+LFbhgc*z0;t=k z#znH{lotzcDV2&ID1WbCzeJtBVIkdd89yrr+NVOkDoaSsQ*zWINS53k76Efg9=05K z{5YS(CfI&>JU+{TmIo$PMLpwLz^=ePQSF^5WXKazsNj&Q9=WH-=6OtBjXyujW{CSD zCxc(JBx*V^ErCKHi+dlA+or<3@MjbG?EHND)JM&;>=|_DM)Kzhd?rXzqD7KQ8NNVc zh?8KKa2p%x248Hv``BJq{T)_qk9vexlCOK8!PV5_K??P3C`N6^5IZwsYS*z*dMK-C zsIp=exl(Ft8JL#n|B)vtZ>Od%}OftEDBq%pGa{d+mEP<^1 zFnGN`sjX3Mttw5{qMxCvsVCa$iS=2YXb567C7B4V25*((m_$^L7A{$!ctLD~Ket5b zVSyq_hYd1?e!{;ne(dyVeftlg?EN4D~im0g?*UvGZ< zOy}OTX41m3z*z|THu`H}<;v5V!<-%kYxdI_Ncfw^vJFCrWeYn%%eMIuWwn4HLEs>Z zXG7&LQ)vi@r~G}Qg94Yd*f5uq%~B~oMW=3N}&zdL6Hn|CK?+1wA>c04d^h3tC7 zuP&Wpm%JzD^K0B|`|#3kUSszqQ2alj*ga6JqSQ)rR*C@(y2y%jo&mDq@0fXqoFk+l zQH?^Q2a~$T`At55V~=upEkBhyGfb@>G`hl+m$l*Rd=R zYk+LH_yWrY{F+Un43!ojUeJ1E>GrVZo+0ch@Oq8SlG+j=4B8|ylDUTe73pTLdRzu^;Qg=ZA2e2FoJP+0U z1fB_jhDRm6 zdJoczr~x?Q(2pX&dW+wi^yRdxKY88i`}2BdB#+GCpO452lPmdUM6kHu<2QR3^Pjl) z)lH|`HtupoIrr}JkcDeWTfKl~owG+`Mg6qUC=yAXZ^TMseG+b=h%nDjuaQ{WR2HH< zt0_eU?db_G0E1Dk2#J2I1Qc-)1tKG<+V=gPJ-NFZH4I2feZBYh-z$3-58rppmFYjI z_o&519f9|ryp!@f@Lm>nVYU`uC4smG4LpH9ePjVp$f5zDh>#kw*7NU1_A)k331 z?E*^2lw8pw#h0Y7Oof-FU^FkQzF>Ue*Pr~}xAXAjS@XJ2Wp)4f;L1jJf9)rr z%>pR!uOKTfsihVW7A|Px)MZ2%Ut^7iHz;Hz1gbfN)~Kfh$c_b=H7ZL>j-_yzl8AN@ z_p>IGPO;8P4jVN5^^Am^9OZ*me2OBHLH;oaD^&)J_7_)NQ0 z)MFg$%U|%$0~f6WAR;`4RtU667htxE7kl15`K(F2)Os1~%;E*G zWT_i`j}$-^ihi0VT2O_G#Oq++a38M=1~YJLm_&=wgCAw89FWl?b1hL9A9RvrwDAcn zcAN6m;xCzN!kuNe_=DUX3l?tQwP5Z}IdLPO$1m~V4TTF>-6H=3H@`fieR&hmE#N)X zN&>oa(g-bFx7p#PxgLuoia6B(Rp8Fhz5>NU`wHjCF(_d5LoD=odKo3=!tEj(VR1r!I+Zuv53XMB$scpp&)U|x z%a++2oiy(zEb zZ_4Xfh;B4uYKrKnq?X)Z(Me|(aNx(B!mQx*#1&A}Wo3&rr6g1~Iv<|y#1;JmdgqHG zkL2HPYjbD+;qP*%_3k%nFpJ#V{)e3DXGiAP=8qcm4vT5k{)G->+Ri$BY{e^Yc4_v~ z%MChB=)83Qf424PKCC0H%fI-Z+{xAmUQjPB#N-8ufZD*RXnrtGj0_vOHlm-8B1BUs z8TIa%icoMLsG%o})EZ(|x5&?=M}id+QpqE7u{r0?rM(#YY>Ot7-#&H9)`&k@?Ctg9 zi$R$Yne*h0i_wq3qzqvH7W9P^x(oS_63SZ`)#z#v>dIn%L?|FUgJ2P)KkXS%VlzSH zj>vt1qo!0HdgZ-?Ea&W}O>;a$-ud{Hoab%w*9IlL@HC)_gGtE+H2<10GSDPg&p0Vj z0Fr1*Ey)<6<1^?(K6xP@|6!rhu<*35sjH(VeHCwmq@J2h_!~N(TWDh8bBhERHxqa; zbhsu3itx;)zXXUEz#%e56b6TfC#x+Ba`>rC{+rOcl693OMfr;;7;=Bm-v6recSc*?=JCQ8Uup;Xi9t8 z$Tj_=cb1Y=?B$g!`S12)1aCOt9p!`9=7SgMkuph|D^U2jt|TqS1$e_u@Y=$NtZ2kd zLko2}V0I$nh(gIdIWnGXyd(U)X7Ubvq5_g7RTSs$b^1vvU7w!%x51!hacke8j%#rsN-m|@8 z#1jlt7J=xEO@Q9&ph@v=!6#(%g?DN&Xi2)+QDEj#>V-j)Btj^095DwIfxaQLtrDpc zyFMTygQvpu0TR7iL(iAA?2CMf{q&NY_s^co&dJQP>*`{Qyy{uIwD+;V@) zD#m^DRrIHsM$&|#6Hihp_KK6<(JDL*xlzk9jJy^TK_cymNz!`6uut#+HB6F2!AqTiJ(UAyINl8yk7miJO zG(;Q284eZ^6;)R>TPJ{R?P{BiS1xayJ$?Sb5zD79-*DpO#+5Tyz1e^9%%Yy7PkwW9 zFT73S0{}Bl;oST z@|B?tqA(#RiKx|Nw+w0-@evFXRYWxh6H!n}JD{z!-Hh4+{Y|GJ5gLKfJA_IgTnacA zNUgvNi6mi!o<@$H{)fkmoG|^59DjM1@)=*sZ2TyDnIFyPAF&4b=ip0kC}rhU-r7^P zP3Ff~#jhnH++dnWh zXXpGyo1dM-Vs?$J=e_fKtG2DuX0Zx2T6dVw_J7#1PDbCIXP$j-@HrO^igNe83= zX8=A35z~*^E)xS&XjFQtl^4}JPnt73wsbPhQw#E3dg?PXWUDD(W01<%Jzgau45I~M zXgaIxruIuz=3~+H;Ol}=d%U+{{fEcbZrZ!7N4GbI4t?W4-MtuJ3TKU2*rpBqm(82_ zy^W)fuvTm;YkA}VKY02SKX^#)xO(%|LvMPnZe7`@etYncBb#$RrqE||Y zrRBjv_E)Bko4#Z3(8*2OY~DL})|zsBYxOP_MzrrL=f@{>nml0m_>?(m$w33AFP_a$ z_G&k&YWYR1Ve%Ui`lS0ytCYUV`%(g1_Jm6gG~&Np%%Sz(VdIozN-X+<%8SY!gHFOc znI+%^ghDAP$8x=sl!j~^^V1TOFa4T?&cbf#V8-OSrQB#EMJ(E$$z6+%bSI=FCL|`( zhzyc3?$@7YywPCIO`BQ7`t|&tU`>{{kVUNCHFY9$Ee%neqdn`IcWK>sp8WY!+;@h! za~F%>yNAUQcmB!uDeY!Vne<}aHT63sI4kG4da6_9#%V23if7UyTa;4EwhdlaS&gaW zF^EAkxB$lNGpI#H#aiB;@+MoHHP?E(?fd*k#JPFYi zJ#pkAid0lY)by2u2QFVea8PD(TFaJc>8)C+c>~w29W*#IGpgBh^;)$V+7fr}g{b0B z^$*-R6#e&NHV>X#Neqq*1Dw`>%<54LZf+^Dg^L-~pw z{2exJ2Ya#TL**r<(<@D8~q?Kn;`}4ckV9%5m}@?=DtjSfdwOHCw-f z`K=k!!NV5IYlpIO{hQRO|H^ZtR=o4(z#(mx0>TFJ5_t_EOpq36v8D`-1wt_h1_(8& ztjOa_Nr#3@??{U!rMuP;!(fL((SepkXJQ}>5IagC)&fHG=`l=%nPeI1RYqKnW1NK{7Q3BVqm>S~hRk^to2+-<>>nUDL)ZcW2DpzM;)a zO>6YS?;~yvliF#)Pxs&$(SZoxjT4bh zF*1S%E1Cy4v_MC&PE=P^lrN=1705(r1lFDn7;~mU?hgO%yO*~^(%L)c-E~7m1A)DlWlE}b=uQSaE4^2>US9Fme$qZ)c?aNmjYTJ`|=up>TTrXD2``dIKmysefF zc$RWv$$%#;kplys?7{jQtWOxky6baO--4!@C~Hb0bX*YX(~UJn&vnDcc0Of$w1D!W z!jCb0r^zHk=|z{G3PcjK1C>ut%sVC?U9w$%2Xl*mpOe<5e#bpAj@i!}^d+;jhZ?DN&%)w46l}i7{=r3KL% z9y6@(lpOia2Pdy>8rIl1VI=Py{La|?K2?T|9@%a4g^%BVZ~w^F%UFFl$2Du92q_o; z4rF%*$Av;K_$F$NAV@H|h2xD(pN2L(Vs+P3Ea1xUc9g)UOiwst z>F7~q;1t#sbM=SEVE~}TIDVM59LEpxgE(u;+Dziv;=nzVSUbKSDhz$i?_#>>9x_g` z$ea$;)N0k~vMPDSbWHHcmSyy;1e@iYB30@ZFBC?W7kw(`+B~{KE7O(CBg(KjA^<>p zO?rZFb|yMK*%1|Pi-@L*2YPu^5*ZY;(Gb07Mz2Lnj!{SSwG{&vZk#I@)#xp!^xuxg zXeIJl?-$)BlypbGw)XoxHn2VQM^D*Se1zZZ^KhY(F&yo?!G~rPEp9{&yfT{q(EA7O z35LG_3D7IpK&GKf1os$v%kX2-%Pvv@=-P7X@6fz!o*PGpp{vy_|D7_rR&Ct&Vm&f2iHTgz9zXqz)O`^25&a2X?usb}sn& z{f$%3H%acXB;%EhT8#>8V{5$eT1wC5^V)U2+~JKO{0s14>*9O%$*5da!?a+1>6|9( z5eA%sTA12&dY<#~prx~|BJ^2B!`@qDy(HTvS0q{2f^4FjEeI_>L6?KzZJ>L^S-Ms& zJV-R0l+%A*PrP{Q;n(#p*F(G!SNcIcCK5cA<16w@YKdD7|wCX^s25FyqB<7VbFu?U!G@IdIT|!@nOH?Wx;v z-=I%^@K$x~Te)IFQlkw;{>?Ykz5CXJ!AjfFD_wHA*%1diz46|v_4_&wne=A6@Wlt) zw{O##7ymfgbNrQBdE`A#vR?}VseN)xpJ3DIBByK_G zqN)$?!X-60t)xs6T9(rEG{5N*@60VYlozwG6GLm1sCJ8zA=Vz9ATog9sOa=)1>5>i zNUYlmCFSv3H)hYdHDSc%Y41*`z3^s>yqO<7_hA2rEe6VQ^Z&DS%Z{m2R@)-^BR-(} z2Jez-U(a6t z9D27tR*1+1M;F#9TQ>3_t_v#hhU_Kp;1`J?j65+j&Pmh6CgRhcWTX| za>{?bn{-Fb=dN`*%<2h`twDn#F1GoA>qgn0iRd#pEc(|H(D9{;2!V7klq!yHA2lrf z21d_=xieFXbCXtvIi_4VG_NTau9Yn>W^J)KL@b#N(TN~bF9xE>|0Rtat}9`?PY0)^ zcAIo(@tbe7nB4!we;0cFsYEl@iKvV4$k!Yd8!uLQ6N0gYmFcFVpX6w)k_QKHnCQ;L%K1#|d zCr2hDiEebcse6y=EtJ$viEX|7a*h@aHM%L)D}_m-k1~Y1Dw%CnR#wq2qoq=YK9FoQ z?Hi8u4%3Z};5Wl8idctM7oiVuN5Cvb2=*c$Qg{NUj#UqeG)NlTM0v(xT044|1L((8 z;6QOp)Zu;Ge86Z@0ba}wQX0S}&z_y{b?4(Kf0|)kU2f^aO{nLFlw2DZ+fQd;_np`<8I7IBE5Eeo{1bK3l z4-u`Tsi}?E~ntcW5iym%09JW6ABl++7Q)d-@3JH*N%E|#ggnpS7pm5Tf< zQ*Z&{jRRE@*nGZa@@}OmO_$T8dEtVQ z{f7;G?<4s{WF`yU!&3J$*Qy8%oUiv5l@C!Dg?@LLpSk)oG)S-FdzfEsjTos0vf!&V zd#Wg<*eO1OFnMbGFk(>_mR1v^y;+zA;k%OJbOZ?3vyOQ2)JZZ&59FqrMlZDp{kP@x z-&Piuy_!jl)-18-QNp`KWocrgTiwzr`nSF~t%Gor3?xxN2=4?@G_Q{NrL*~kfoA}(f`t~2qe;%{@)X=wQ zj_BKGB&*H+Ke%!I(xK0P9CY zS#+XDx;8P-mghS}S55vv-M8yl{R@hIGe zqWRhq4+=9>qBGJ`#VkMx1ssvda?kTS*VL~YQt71^o9)>n@8A4s3G9zc`$F2*+tZ;xsz@DCR1@_!c(U<60tvs#FkK}^A~aZd zukZxWAP$emLLZ$|-oyV|iIQ00-e1@D?7o9P z?!}H>{!k27A3v|pRqtdCF8BR}y|{O+W5!JWe*L|Fsi0SsFr!h;`5&{cqkC=4{)j!i z+QKyN`dQ%I<)2&$^1gkB7exWr=CN1k5A;;pLe(XhEa{~=#LSm25C3fTG~~hXNQIUy z$pb|C3EW3gkpT_-;>6n14%i87;Y^#_EF&ApskYGNn>=c1v*pV#S5%iASgsZwF?U_g zkloFPk_;cfWJEt$&tPK@2BCNi_yli2M9qo^_b#>7kUQ3Ich>VMBxcPqQRik*$^t20-w{%eGKKVbLnAm*fNFI2yk|F#w5+Srj4MSM~3 zJ`l=c7_Kd;Vw(f7uOIEem7W}lO_5WRS$^gwKC*DVt>f+hexHQ}AcOC#!=gGe0=f49 zn%2yg6>N5mdrVW$%QtM-VcQZlf1ho`j%%R`e0=}X(wiO&K<05PQD^Yg)8rf5_`~h1 zUTM*^jqUn`m2E9bkfPv1oeQN zXm5-9QG`@YQzAuK6aGEz`K^d;t{q8QL$q9y)33KHiGWK~`zUW=6G<3R4wMrocl*zz zNrxx#gD=&o{qjq7>Nd7b?fll*y%Q&PN_x3*?JQYo4WhO;SHs8rXh-MQJ3KBdB;F)Gx*lX+10m!3!ERz|WzjHzXG_!gLD560MWN z=#3O9xk@r+HkAgG{`1TWy{cDurrzWU-QCajOpdAkobA@o*%1wb8`g0QSrAb#?B$xU z0&l1VN)7NB?G=apK&TlKq07G%G|ArD3c$)Gks$%<09QMVYA3eDb<5o^^FMYCJ9RVD zR?M%kBz}c#&D(qk`>gn&sOm#bl%z(1lHycimD)-p#nzodHvgnX{5tKM z37hbceaAg$q%Yb?;=%<)Z@6IVrYu9#Hsr!4=UOk&N?fym+ zH%=?pO_5m94)rE)4hdDLvq^+(WwAgABncuGY#CAJ%`u|WLLm!Krv|U^r)buDkw>l+Sp~C z%e(lcJFGbKuS@D(7Qp{v0a(YgdUEuw>aWTS487A#U?kO*AQyscIyFpW z@Ss)6Gy+JTVIVONvRl9+E?WX!N#`27bF|+ao~Oeqr|Ylw4F0H!wS^5j)K|}j4jm7A z+G!0!e`X_(Q5#Xa4H1>F*1|Lz{zge^1+J0Fl?6PacT%nGZJe*XBev=AketLIQ#Be_ zqbDHL)~_c_;nUYMXFW7{Ksu+O!=y?alV|UiUwX2a*_BuL0NV3zy^7se6=?wcy(fq< z6yVVDmqr~>g`tCL8dbo_P2d$V6NjMxhE?<`Ak>-4m=YQMc zh7w@D#<`L$Zmh0ux{~KDlx?iuV*V(*WRsiy%x|fz?;>>N2-V4!XHEZ%f3&+~kDHzR z)a5{9A0cCp8)$Z5RRLD*|L7>9jF*^Tpu`ECl=xbb*hL70qKOUcScS(3T$01~%HfyQ zxrNx`i@F>X;srHM(8~ec_L@#HfwO;5%tU@-S|N;Dk_~3owC4k&&LaqP3f=szHQ#MWH4+T@&SiZMz zp4!IXN+vbIDrxp0NNVseD>Tv~78bzrtV@BeBV=M3sn{(PFHHWOzodi~F?NT?D3`pI z*%A2?vT=*$mU6Qt8@%XqR%pLn+ZfzA5`LmvdQ%I~c@~}WWs%-1aDwLt30>kqdC}t7QW01(G(_ZSxNk_Zvs42j| zPD@i7Z)R-C;^M6z74oxF#?1fVBk#G7v;%p{u6*slarJLy-jj73p3GJE?^jvUuPg4i zzznoE{_t5;!qsyJ51vzt{#MVENANmUN}Nr1K*?jX{oyGR*7_!h6Qr97+f)9mm6dh*@KU-^v+Th{ky$yq-CiE&f>@hx}NSn1hHBa}YGF5Du@C;I~9Z_n0{A=tpA?dRalyeFN?_jMK!(*&St15|oTdO8n3dr^T0F| z(l9dy( zUS*q?>C(E%-n0&>9c#Yax=hX0)26dVne3%3K)#gs64jY7%$^0Ax=RJm8C0<(Rs_2n z)fthGC9BDtg8jghrlv7)zposFei~g;Aqme0jz4>BAIlj!^*__&QGm%&9zfa@u>&n-wy8gh{m7H%_iHKV$X+xr+CTWlUWt%TxJr{vLaUrCen7 zS!;fjU#yY-?Qg$*dpYsDC%=9Rx|}F}D7OMGg8ns=W;iQmkDheD(DIZ`aJksz^hUK4 zS<@Deq0+B6Y!tLAoFyo+#I03|AE?hG-YX})ra6rasII;Zk3i^h;W&_wix|nwoksVU zpa#^osmu)^P<><2$9hsDAyI)VObsrSHM8{|AIJ7Y)O07ytDBP2rsAL6I>C{$kSM;Z9`}x^g@}eNX+>eh_c7Y>mqF+s^l?3UKJkdJL z)nQSqg9*%zspeNpbn^LGI@GjE`lppFHAJn7zuuory?2ndI8p^9b!t?!=mtlR# zO1_+LBr94OHM7^kP3+ZKnTO6SVWE>_+YD?zKM&0_srRZOYfuBQrfppcv^u0i^51Fy=jYUlu*)IWWN!yga z$WNFndr#SYVxX|-XtDhmV1tcUe72ovBe%W$Fc8~4pBR-p^5V?)d*);=o%PldwKe}Q zZ~QC&VY2s;a(BbMsYPd(pEz;x>l@e#mN;jgatBbyW3L`b^!k>xu2=vzwtoRYNNW&S zCZ6|{w>ZUu%?;ZT>9iT@nHU9weB@@PrOEX_{C@xJ;WO8=MzedjmHV{pom8i3r+bga zT~}LwcHqq!U%Vg7i~1x~?Af;Ajs_jmUT9jqdUy(BSF2?e&h>c(lfV%!S1y_YTk&+TB}KL@-{;Mu$f zgy2)dk{F7MMz+mxVnW8;l3_3{f$A#BkS0=xkMcQRIH-D^YOf5Q@)qOUlniC7chIbI z(^Hl&lb2K7bur-h3vke$r6DGZW+Aq~mjRR!Y?z%6+}Y(Mr!qlFj&eCADk8gBi;t)6 zwv9b8k{93n=&X#{hzb1ilSALLxZn7X{4vk}`nrtgUdd8t9&dXEFq8$?y`hEb9p*^A zmV@0YqiZb@Ya0+)Xjxh;FQ6*8+1rOZ2Li{I*1b`gt&AWu4B8gG=FxiBDwGx`4BX*x z7N}kkDG$Z-i+-N=PQT3o2e;1~IsMLbew!EOvdP zVbGL?k5>M{uSfD^xqsB{t-Ef#Msn1HSGBz))`YHjUpgGH>6d?#!3i|4UA(2h%{XYJ1NpsD(pF7oA}XKl$rm^DdvT_^7bt-Y^}?Dr~San z-vj!+ydaW4$38B{(lA2#Umo(&-LeW2ZDK!rds#s4mbz)>MJ_`Nu`Nlj{1^Or>RDWpIvA5KF@;1}7~?JpoMWgXf`kvweKYKKs@K&&gh~ce(=`1-8OIo9(UMs28REXl4x#Fm|*g-ga?G+9Yo&jWd zDAYP6SH4qyNayA$m4g$TR_51_^BajTB?ebcY1U;(HO0;f`*bP4%CC)gocFZ+f;^{< zUuK04-AU$KqOM$C=$!;aIDUHnDl(*%d~~twPH50YFj$FMM+(%W6g5AWpc%viQ`Be& zh@v3K?1XAD0b+OX%B0iXQIX`4im>06k`AkmsoOYG3*bfCHAe)=_VO8xj_&!befwW` zf7ob@?F#2=%c3K#)Sg`ijg*hbBL{ctschbRia+2NA3R{SS;TQ|wfC>xXU^_A|Lu+~ z)Cad^$2X9vYQ=xrvPI^pFFK(0y-i3JSO`&~?V-lZ3sa*-iVej{=zUY>k|^aY~-S@OGEGUw&iJBHh0|Ma6+^r|}?_TgHP_7PCMP zJxC?5?2c7Amt@*y-tsh+`5&{?9eA3`-VOY>pVrIz<5a+#bx>-4UQjDe8mIZ|87hCu zhnh5@vHB8Ug78ur;OW(JDur2T27_d3)Pg2AZ};YbdswbOcRE~gQM7Zu15Ij*EZb4Q zPH!NmhtrgZaGOx;8FZW3Ilt|_%B6ClUH2|&ShaiKl)y^LIM!pqmi6=SyodA3ujfzy zq1wW{$6>^7&6U^7jv+t&A%Enp>CM|PbLu*oWD#oLk9LU&gQq%6W4fmb8)IbTEWIA0 z++r-g#H*&o8wLwIR*J@6RNz$c;9{z)0}ZBW7h+xWW^qVgnfm$!1EY_(1OZ@Pq=k%u zm{IbjJT~|nh8@wr@?Q1U&CgdBu^x*yWzAEbL$lrn<(m(W|ES9AynTTI=KXWg#4!sL zvTO~I|NRu}jFfsY3cWuw(1F;=U7;jtk=9j!CyOcG%nzw;2cOJf4Ee524Qj3x)X<>g2#9P$) zzp6)beCMI(ora6fXgpa3n!u9}9P&o_ye_INzu3Z`wB@VW0OEx$upgwUs1gWY3`@W| z;fpCg-nU48iN-?6YetV8C^Q!4B+RLCXfG2B2qcw~xP-iFoVPI>e3wbs#@hRd@(#{= zEZ(?!ArSS7a`)t^pHxuQ>HRWm>ZC=2d+YKwn1iIJD?}o%AErYLL83iniSeFRSEhO) zRpqe%j5#5$M}N8z!Kz%P`V{~Jb1qbEktxTv;mL6%ns(WC=6K=Hd2HMp!$V?~0mllD z$ftRDWbhEami6OnWMwex_nAEW$uH_#yh9-;ty&(_h^c}P=jaMW;L#whrPIw)jVOVf z)?^`iNtzSR2&|tIX+I~_>SY|vgh8aH`5CjBKoHt$eb0BJu5veW4@kdK3%%Z6uI^ly zw~hDxmHotD_?FGsmbZb;_y(=!KRuAMyaVYUp48#-X5i`U^sik}F-aLcGh#4oMpfx8 zO%eW)c4pKQJ+i#B!7XcTzFoJYT6Oi0+6K;TOz(t&SoM&P_3JxlFBd}A@#33 z?_XwWv1OO z;iI6)hU*Z`qV(-+9Bw>ro}M=2#FO8WvD=nDza}J2SaY{BK4u$puFB#Mx4LsH?BEYp ztzxbn6>_f~{o>~Fa=8_bU%!6BR*7ZtKeuh?zps){p3GuFtThYDy2RIhfAP|H%7CKP zKc74M6XAS6f&zNFNg#FwH}=@DaDl~o82+@yVAx9y2D&<2ar?<&tPXpx@Vd`n{D#e9 zu&D$djUlOLaj!7!V){Qm^F-Xjps&G#)R-cSOOjau18d+m5i`*imgI$}yVSG!gZ94p zSQyTCkDVfJle<-lzVQ{i%Ijv$PQw$n8I+7<2Xwm4Bn@dOPA_UCc-d*0*EeJBui6E~!L^UaRIcpHjIe(Ik2|8aXG{QBqZsbdSnPO=3K zK@FDy%kr>okMXn@VZsTV?|A^jqtalUO z*GxKqtmOa6l+#l*#Dkv5T?Nu~7u6|uW3NA8D(ByLukrpk>#=C#>IJah`@TDU>Sx7P z#=FxnmiDb$jHR$67P692p#>Ty5tT?%Bj5(h zf-rPyExnYuBG?Sg@HENo9980sT+P!x5v6lpp7O>&d=W2g@d3=g>+_)WCu#+YDI-rX zbpZW~u`gA2|L;)t`6q<`gpRm$IV|%-5zQ^rf=tnzNah$wG$S%(UHHof<;jOW?aznq)7qilXOEAs=M$+dV9_wKyU@04ek z4lHFMzi#-2MXcNR9aDDj^B*t$m|xgd_&w3(17sX-V)Zm(uvNnYNr)@r$Ys~*V!?vN z2@~ql;44F2YM}ulU4ohB9-%-(F%AdXg!TwU-E48_M!aZAp;R}cFYylE7*5SaXhOvQ z)xZKdXRsA%`r~JxdI+5TCJiiX=Z{zVUCGNUP?oTOe}59(CRXhX)j7R=FR}E0eH@&O z-6bRyQIpUbeKe=8HJnbUAst5+MK1KKftHeTqANg@Xt8MqEA`5-)1cUa0tp#Y^oxEd zXbU&1>=L`&P%;c3#M_m3@s#MR7ujq4zs&UqyIl0kw&koGf3R+wobLTt9y**=D)|0M zTjlZ0O-ydE0<^`VWs}1--LIPM)`ITiNCNGd69WJ8#owrHDWH%C-8pS#QSNR-d|C~EJn;GPNzrXkMM>E@ZZ#nnW=bU47F0o)Oj2+UVnB0^oIANkLMxmqVx~M%- zpwOZy&}B#z4sc3TLwY_VDl3YQH2XLIa~ob0?drW_W%y5rocLrwLSky1D>-2e+j8}G z*UstVuD>S=Sk2L+ei5HQF8u9P>*XwIH6bo)R*yH=vg;zhQ=5&;SPeUP)k;9qUch{< zm`}rN?pLKBkNH$y5JCBTx3ZzIC%yvo@uYZ1T`E^EoNPoL=?ndk8ac^FG!zl*&k zLvz~BXNZ^=_7K%%70*xjJ#_y)in&KX5~>(&gzXKJ$S}qxS(EX=;wJU43dz6!!#+Gt z_F)lS3`=o@WwQU9rKtRr?a3CGeq__d#xGb@mS-v}`-RxRrvJ!36;Aua>nVHQS-B?$E4PE6UClGrd2q;0voROH7$VY09MB+PUNRQ^KNV%zizDSPrFX)TkdL$P;jx=4!fo~KyL#;m; zkNno?e(BQ>-N`%lap#wges|*VpNAF<8k{|Bl;_-0rSywk`Zry$Z&OZ-iIo~1dGqaq ztJ{u9Z_};qYCFvueLPf#-3`ze3O7=q>W7!p8^r&y11>DeG!2K8k=9(XYj z$xaQ?m)Ypi9D>fw`_={Sp?=Lp)T$XzV7uvF3VkFaFe?yZ;&Iq!X)dWYj|f4vqTfC2 zLs1j4x@znbGwoY3)W*mkKiL0-p;nnk1S7}a;PU7d2$@0k^PNDW7jJ;^?S9h67n+=v zkO6MlybtVJM$FyfO^;Yjk@CXs%3I4Jd;5xB_CY|dMMHC}VS7z;K2?)g4`cv*2Dny( z6nR|FGs{j$_3}|5m>i`)f(;I5@?=r$+N5*1s}#6nsLByMxe}!c83PAb=}-gw0WQVU z5{Z53t>+RYyh&!Z_q}|uVg8uD~veY6;@Jxbds_E>3i0+bXc=ze3*sGQ9Bj&=cB$Bc+wl(9h&d+O>ZnXA7Ua--I@(OCEgVfrW`12j9#WL2+{GP?L)N3!T_}51W_& z;D|AGWs}iE;|+1#F$}*QVtdiAuvk|5KmYuH@-GBF&aKc&A3|>FEf2tI^bIgJ0Y48- zDh9myIPU&ezk;z2#?=3R`4x19k}L(oE{|akSlL6L-pCiV#c|vZ8#pqfFPO|ceq_VO zQwpj#h(SYobRETYz1g0H@s@z*OkM?t?p1Ke+-h8n7?&KXF>Z^BWtix4&kd2N*@6tO zf*A_{uY${BCZMVU=?~at^4280cUzVY^ky`=n6$ARb;U0Tx@JGx(?#kSKzquFoAGflU7|fOhFINss z?bKsOKXLKzSOCht*xG;Ip$)l9@<8!x;5Vp&S%zbt>$M>1Hz9wHfh?1bCWCS;9M6vk zC2mn19SxO9GRXftZo7zrw)@uE_Si_yB3qGsqOiqm4e|Veo;E7xtBf?06aoFsFk6@( zmKrB4p4=ujKmsL9J(+|WrPIXu&}tw&HG&16|Cj}rWGDu3N&M{+UXO?6Z)MS&x6MaM zfQ+laEqwKDJt_te`k8>y>AkY=vzuq~Zc-01L>ZK`phUtN_tC=jT8O~Y7?fz?N){c> zufLYo{l29wT}d>jBDpjaI8$KQ(AW}~tOZv`@w*7l=8GSS-eazT88`E94(-B{#NPuZ z(!pVy(LnEH(z?OR_A{}sZEwZ~^aC#Dd(_pT9*h-juWLa*Tx0BGEI$jDNs27UY}t21 zOF{DuErc#HWvMZ%J0=CmGiJ7~@v^cW1q8X7D`1n%utIoYbyy+fcU+i}&kt`wG3py8 z25NJ~^FHD$+0$`H?lZMR60(~Q%B0SYZ@uMVF{(!h^mi=0;Y<2g;>M4pHjk<&cMqy{ zLSo`{{v%K4I?L&_pyv$5*>W@$c{H_h`k^a_blh^W<@m^b$ID$TNAy~5PdS{>i{)GcIip+_-mD!j2j5?~OLpIV;Y0XTeuMdw0>_y!MxT~Kk~rE5naz+oov9r`T!2DU=`9CIg)`$XFDs)*;YQ;t*7T(b5HB`L97gTl`dUgx&E%2^zidZbLUJ}6CQp( zW%isYYDHST*U)QXH|7(ASvXAfk1Quz%3OosEtyl6Sr`Xjb418ln2&X|e-;E4)U5^S z+BN1-C)B?C{M%=`^!#w^3Fcwl+NWpa_v_xJA6z`%WcQh6%ieYK8{UNeW5y5Q*SyIC z#*gWbLe4f`bOZEU=!itTKALJcNvtMtMsCH&o8%V!%V!-LEZGs<>t(5foKRN4> z9qtDB89_Ufx1AI)(~*^=44&jd>uIBKqMsY_oE^&Kl)hVX*>P>V6f`_&n3)AsTw3_#&oK+PJRWJzm_Y~KSk`0%To zXn+QnYPTOEOjtYI`wB$>nQaAX5p96vtzA#EwVbTQ->-Gqe1hCnK>3)w@#CW=34AqX+;O9^R6Z_WtG!pj6+ z2ndni1GZ)k=|X;)Y!!<2nK-x>rT;c!KN53^MI^MZ-ZWkp%Y>7aQky61E7<;NJ`^NdE~9*r`FKElX~FUZkOPf10X5iRkfHjzGH1t;wYjHx&`z$N_O4?~ z&$0ueCH+Z|L08@a;|jsJ5;4M(@IIKwW$fPn%eYY60U9I5W%7>FxI!L3u4E_wd5mZB zxT7q89XonVlw~Q?%9LSM#1;CJdhSV9ze^X4?i{54Us$y;XgO2#Rg(iUR?ULmd@SFS zr_ZoYtYR~QOVW`b7{a}np>p6eFrb0ykCbmBhC-_fxQJX~L_x^*h*#KL_Bu5&?;$5DygeaG-n&w5ZZF`+rT0CP))YcCxYXm?^YF6XkAAxCE!?Ieo8A z@(Hj;d^^S}i>nX_ulx241-cv!v1b*4LK?5d=m=wY_kw-AU$OvW11+N8aOcQvGGZer zwN{=cgql-kd^o~Wmq6ew@WQK_?nhNlHpiAcSf%h23!r+#F_yt&CS2m%Doh zXw}IpXGWY1n!Pq#J)zwBv#J=cYTk7&7VSN(RQ>p>$Y$dgXY&Ma4j&siX@Qu`re6J+ z&+<-W-;)jwgpi$bGs{5-AETAmb#TOH!+mqLIIoM-%Aj2s5Dp7{YURTv&cD3WO7T6; z0t+9DBC0g|Q4yP@o}ic!GGlbdnpxd=98Kmc!MpSyUkCtwjv!Ou8WwU?iJ(xdmnis_;u_(kC0o=#_t{E9SR)5 zWIn??(ZBtP-W7aI6m7p!6&uf~rn0j>_B|e6^IR=P$6J8L6Mg$`agthsC{l+rmcp_~ z7LSTys%s@mO4k8exR`t)Zd6@D5OiEtkA!$EjR~t)00#-1jZ=&&c>J?9 zuZs^^H6$UtHY$6L_~(mS3$kNdPF%2gW35^1#IY5#Si{3P>&3_iYt*X4r{!MN2E6q| zmEGB=zEy?|Y7#OfZCjs-(-~Vffd$xemCe3Vdc-ka#2Srt)R1emPJ2>cBMd$kYlM72 z^BNfvz)u+eS|geAQyGBh$`tCVe6cclFe>kS4 zCGffSe8rA=Eyh)9vS-;Iec9@4>y2gOHJ)s~QOQ**7|T{%dnyzXGZtOLRGrg;Di^)ejFGI3G}WC*UK#{aEUYNWaPvR>M?X5ExMFcccP(j zM_-I4N{QYRP0DpNDc8}YTt_#g=PyRz!t)lvW6fcqB{A6~h;m6hy5BRKW{2$+S6lY) zNJ^p#t%ge$^;wnj-gQB5F}^|En6fd1zgl{eEYxavWm6wMzv@svpRj*v4&dkL8xH;S zbNjoP^9vd`#ml8+HFjD$w2TM-2{VT*H3Nxhs*VD7fEqYZ1EQSJ2%smY^5^0cSU~Em z0Z+0*9l}|_#%8~!G|U;#b~fnnZ~_D%MuOJiYDpkELTMx>47%iJ#%fzUPewMe z#_Y1fH_op~g^?o(Lzq*qz#_-Ou1A$!(|Xqn2@ydRVjH-`l?7t@QP!YuUmp8MnPmYr zo+#W0sl(y_9Hl;R)Pe??jA|YB%2kM2!kT>SIgq{<;<3Ovz_;%zusHLeLLnE;Bsg@- z(q+@jRw-#No9q&8L&pf73?0M4Wfdj(aBG)NQy&QNwdY&$J7dAOJzp{9_=*LdrJLSb z;#rh~`hTB`HxgdULU(7D(2G@KV`ImTPZW#AHRl&BFrjzfSn^SPkMW&I(ab$SF=na@03_6I!M?%Zcb}>J*@Fcef8e+;> zNerf(DNh4cP|iM0QC3<>OYQct$CH2U^8=oJ*Lbr&V@LP%q>miY$HS8^v#J#{GvdV6 z&s|r=)e1v~#&ZyQI$qn`T;cM3pXKJ--xidXi)vHJQj38Io$?Q>mGBf%P ztky33P^~f}rezJU-2C`p(Wr^Crdxgcp5H$8p85E` zYJn|U(yBw9Y=BCkE_ZX^s!R3LIJ*YpAk;2a9SIXy^}tdR7YsP7$%8U zrjlH5s3G`*ItA`JDefl<+)t$BRX45i6E1gZfjc!NufFNYIxhEf1@7lkFfMm<^V%EE zMeEXIVPyty8U(>I+|Pi%X+M|XJeJS?;KOFeqLw4-|4sV8cb z896O0qe{zz!$jl8%Gz%A)#tCjBW|7i?9Em!3l6iIC$Hzuo-A%onlpaDPrnQpGkXe) zpFEqL&5C=uWCpE!>2~GCtTqh?%5~?u{}s`$IQTneXigogidb&4Z@n#y+TwbRgNYDl z(7)mGASZ&egiN?Z*vaJJ13RF^z2pLSathirk)Bvlb|=znT~#Jc9Pl|%v6Y1VH0!^U zm==$22{`hPch(j*QK~bsf7^d|+I~M|$doC>y`<+B;vxq2((9T-x0m2ZNbt?y5`4Ef zZDnZzgAxs=E#?pZKT37WLk%CN*)a&l4Q?*yiHv`DQc7N&X$fGY!E#FQFTsEG@G{>5 z{0C2O;Zmi#BKB_oZysM(a>$Tr(?~{+5i`^y@RF8A<&QE(rE*>EmwRe#u-~f$K8S)e z*j)3>;M+CjAYl_>$5VL{!iXEbPAP*@mGI+N#l3~hw*DU$$4~P88`ghtdd*}pgAFau zIu+f`V{z-my)V}85``b%Jue=r7-L_NEhGE?X^h4u{GVgA#=tN}z1Rz3D-#H+B$3il zseGd+@8fY-=I#A$&!T=aRxi&U2B$)13`@F}u;TvQFSqrZ|JnZ7ZP#TM?`Y^4i|x-s z`i0rt!TQ1(YAn{l?o3n?!V>G)zfZ6hDt| z#lnz$0Eo*;LBg8Paxpd|Yud=FPh`v)+hFM6lP@?Th7PY3oLM@h9-msSeJJV$_qRui z4vtrVl`bXg5!-=iBccWmjBI;uJez--BuwtiP=dQ@io1P^yH^T{O;R}w zk7Hh-shnO@Ql#8XU3o8>o`ipwKxcja|8J&!}$OWLQsTzLab&qD>M>&k0b{0s&w zd#3s52MN5oCzjcK?;pM4@#{jR!P5$!DM9qRC(yV{!Ikj0cCQcaE6p* z6pChb>=B7LLuqzaCo#&-oc82IC0Risf~YX3B2r3D?A5GZDO`AkAl6!Jc{nCW>}6e* z)tohYUR*EylZz8gSyHvoWsT1$y+W5YIn^K-wcL8E8-tPGv0j9hnwT`Qh{ zuW(`Lil*=JZ#Zk#RD4qSH5Z3pVAZHcZk||W-|H+3se#BDX14)FUYanc&821)9VK2s zQ}8?6f^ML6G(NRjtWx*GHcGPnrhm$|q38~MN_p*(PZ3X(pYq4%M#$LQxW~liq#9(b zq13RA2Y#^x726V_D*k|1ms=vmF0_hv$${cUce5*~{dfJXyHW2+l$7ZUf(> z{K#NxdY~toO#Cp~_z3K4bRG7o={^LS^=G*}*>acQ+ zyJfH8-qRH(z&hZz`KY6o0E<2hG(Ao$uUChH-`D8AYQeKulm{tJ4altl3(&aCA=Uz2 z6zkW5U?IPVxR@|7`qxQ?J0}Q3D2~lU}e9`;*|b;SAUIck}ka0xX8S zA?wJ^ZGzHbkO}B$MZy16H9_$rcKH4`U}`n7kA*Z#@xzrZUJ$=9 zhwH*by7*$>*D6g!U_QI&(Gl0I0gXCO+)^ils;F8-37IeEPdT=jYknu@Bb781y?!(# z5z?qlmOmM!E=#lm^Fk3&6z%cVw4o?WJXLoG(uFnn>l^;YV)p)r`(>?nks>aN-_Z5* z_R@DRT=>}A8zZFZo!=_Q;2Vgfs(})@W&?sj@(qigX*k?rADR~e9WrFf2*wI!%p6L^ zSWUW_Trg;1uLeSW);1@9$(48_aLZ(tDpeQ>xAoCEr*yg-$KS%={B%JK)^B!%z`B5U(3jZQ z!|XrOnBLO#$Ur|SK@3CiZ|RgSs$(CoJ&G8R8s!{X|#T~j;=$a#_2jLV@fqn z>7K8`DUurKiHu+*ubA8Vu|VA=RRA^Zank@##x%N$x7oO##7{Ms^~=xix2!4yG{P&q z@39Zwc}H)^_{k^iJgxcji2BXLng<&lGA-x&@yb8V!fr=WFP*a`KkbAXmZ&PWg$AA;^kdVTiK8GBeEru~+lakh}q? zM#-lsiadzlRG#rpKjE#2z}vHYWbT9SsXr;kB008w5JnpW{I?v49F?)~a#Y5H$BznD zwLUNuH$m`&U8JT)4H@>~BD=-l*A8Kn=fn2U{UW@Fo`6fA?$KQKWw0y;49WjCrB>{B z{)Ct>Gk|zM_Q{IEo_ZD#odLJF3O>-i#MU{Wp^zhei)!LaD{FptVn!NP+VA z`g^RR5`Jk#jmeXatba>Sh~hILP?9!%S#C+(@+nKUiV8-C6t|5i`o_KyzK6=T+Q71x zsZ*EO39^T)n0+sX5Qv4lDb{%4*E*!Z2&AM$Ktr8{bJe`^&>hUKS5Qv%Vkxdg@#>^> zB~_Pv3|Mbd<8ODYD=)S9y)Z&#b-qfzE(Cg3HBd-({5}NTF&!z}MZhnu*JF*aZ@jX1 z;Vw;lvu@1g8EovbJI9;VoiJnI(Xj`<%jiFFf_KXJG3f&*^yxjZd<&=!O-}8~V-+`T z7T31i5m$nGvxpsEukcU+_L%Y1^4qlyo|zTwqdAevl?C1DnX0d zs;M=eq7{S|ZA7&#r&7W=44NojLGV)}#EpfN$PFwc{H2coY)!f~9l_+{#nB?elj(=C zf~Kg1Rx!B}Jqsw8Y0-^^l*?9Hx~FA!dYzBF@R(fl_4_NTp-An48{H^3h7W(Rm zpYDH{{`Hy&w*Ax5qw>dOuU#+^y!dJG+yqAQ#MfJ0&A#$l9?11l; z-g|IrxLdK*Ce<8)RScaf^9A0)Vcd}zpTno0)A%gl5R0bnKSm*XV}OtpOBrg6 z)u({Q`^E&U6GjO;MIWkiEx%d&7+ z^gm{s0}V7EYfX_&yD73M4P}E#8pDwkVSuzz`$ED~?3RwbR53v&aQYxvl(jkMgy+J& zKhPLv&ZZ-%spNet?dmP@B>NzDRvqt);5`kCezYHjFQWqDegm{99Z`dh=#_lj+Y&i2 z#-hdQ>5s7~W}!mch@LC(LV$&soU}xrrleEw4%l3POi}uK6!lHUL#nhH2|gUI1W#*RVF#)r~S^R?vZ_ip>l+Avg#5kBh|u z1d$bV0J0}jE0smsBK($fay;vM^5jg;zVhA!c;fzdeDPv__N=%Al3T<_cxOk7%MV~X zf0KLi-1*ClILAs9zNMPbk;uIW@{QQ1wOOM1mc!}ifZmt*R3$vVBnc4@FF5o1>Oh{K71iAb#&2DJYOAt!h=#8{h>dvOoxAv z{2Q%Qf%iw)w)_1X|Kgbz*O~MH8eS*Ac!CTsr(oHsZi{)5@44#F)Zoc+zdXL1B z+OK#;TSu3+bSa{b?4e5vT^e#WlGI1DssP=2$hn$`fb<}%W^bNrRFr?RFhV># za~sqO32hMGq&c#T^dba$k6fpn4eZX7sWO3XEv~X3mNX%)MbO0Sk|xM^Ojr`1wFsZ_ zH2M5?vC45@zW*tmR_v$c^K0}=Ht_hZsXP_GKP zAMyuh{Qbvm1EB|3#~PHg4c1CZU$V(WHRj?^E5ojtJc7hOCl&CO{w4=s|;ac$h9BDpI^+nKK8`wNpm)BS&PE4 zYo~~q;M-^3{eIA~?2#*%j9;@b2UI>tj8Q9Nx1v!IsHCq_y03JfVQ2sEgDzug9*aTC z>>=oxj~O(fDV0***-AeqMt=OgxO;QPm5KRlr!06&oLdif##j;R`ttO9xT5_*U395TYWltE494*ysndX;QR4ObZCI~(+}^bnszU1s-AxitH;Rt zwP-aZ@OQso!|UdV zbt5FM28MbW!zJa<97i`W-aw=*&vO$NEC(1;@v0AS3xPGqDLbyppPlmHk^2JodWnB4cPQwIlo zc+WO-a#XeP-ttvApKxu?A8m$SKk*Ge`|^g@m%TB2YkNCNjG#&0bl&=5bkzu6g7Vk7qP!&=<#Hw{m z#RUYfhWuLi^L2as#-nFp%K1?>6!q`3;%Lb0WB7!%eA4uXYuTl9-={Yfh3(pQ;~#ns zU+sK&npa#2V67XCUo7>ir;5H-zsGq?MlOAbX^ztMVn|v8B598HXwG1Az-UpGr5`3L z#R9#8C&dKj(-Om}tR3>K9lqIM7eTjx#*qW+C!P7KIV-lzn)dVuzbTp1Us$u8z0$H{kLAkN z+%+w0X{1NIEUqYj0Y4CL>!rm>P2S&y%Cd>kpx%1ma@Q7)hR zs&6xKZ~L;|?=@;ZYIv=ki>5BXJSK>5>+7Z^nTxSe#q)^wIr=Qb2)S)C z{S9J#WFFWJYzmPeb<=VpW5qI$gm>8WAN~?Qu;kB&b~<*HtxRt{s6)_zRQ?$|l*2b@ z%asA`XKZMZcK*d>z0W_}eDsv~nXm4ny?DOpCub&3Q-ZCZW;1nlu_XG&5x~q~Bu2oL zYz*_6dPGT&vj}djY;c^UHKa#zF4NqpYXRC4ks|8jAP(+yqN19bETYXtq?Mjs+Ggjd zykS1Lhw{U_PwqYV@0!vNcl8?m!I2Y}iEZ2wpOxnM`!KtPK#Z3`!&3Z}G+% zooS?0@H@=mb~DcoF$fdKfZ=FXt+mJ)a)Ur%VRrr;{^H4zK%lbJNy*An;;<==e^1x8 zLnjemjI5#Xp~uF*y_Y?j$RFQp!oi)|g?4$9SAI9)P#*2s_M+R)5!f?y^VY&+=%DKy z(4sF|8rT?)aydnRT`6QUn7mLL3UuPD&@71%g5^`RU&}-9?pdBJ6S~CW;l7OWS>?$x zDSr_++B$kiTe=j{JND2e1($sx&>oi0LycJ}HPrXt$PD}Me$HN(Hq})4Bx+V*QNG(6MhuGs|OEb6~;pQrcCRKwia51 zubK(byM?V9x(-Fw%_bBS9#dw5R?Zh@v!gzFa;O9lO0+#e*x~u`4>_1~&s*Z&n|v87 zvH8a9^=EC|btT!hh*hl2Zsyv|c@D;OGUfkQQ z+w610F!FvyKcRk18=ya%XD*Qu49DkT~`H_#z# z8|eZx0sd02t~^{T&(u@9Z;0QP4dfCQ%HZ>aWDYp%i6-`y+-l^He4PGQkD)LA^y;;=(hA( z&?qAx9i<_Z{L<1;45u55~A0{=6bkY87;Os#LX_pNCn3eg6G6rMHn?NUb1B%0eBM zRuHD-M$MH()jSdKgMmn4KU3NkrXi&cRpxah#6fvaq-3^ANY?VBPocKU{*|orMfa-r zPc9H^#6zGS!^h8JiOjL|ulXlWF4_9d?oFposmNIqt9MY7KqL=m{3@11m&(rMB<31u{TDay46M8+@`c^p{dJQ zlL+xHd%4@Bj`e#Ure96uu{;R1@g4A5Kko4+K2KesRJ1i?d#>4D{GbuN=M6s3eolXG zhOK}9Mr4@;i6P1cj8}ob3|6F_E7f!ofqNky!NsADgI0V5c&*KX2lr48^>&0c&ssWrbpQA8JvG!w_JV^fSL^pk zUQgd+3zX?v1Yiw=riW;b!?9ve59J{6g^|s(7cb84dluhQNqo!d+xFvoV*TTxBwBlM z=Vv${P2UpkSTLxY;^`y4ZIQKPY~Owoz0nq<86Zaklr4h3a%(UFxfjqe(U+>n;MP64 z!?tvBR`W*h^nRVzbD;VZKa90VVlx8ZZ)7vrb8;^lsF8dYzAcH(EJe@HWDO-nR1zQY zzP7(H)==A1S_v6xpiCG$tUy%E`q!AruZ^x0(iZoxLbxMJUk+m;pJO`ty~Rh(=dAF& zHT0uK@^;82tPLVYY9&x?NvbUPFLPOHNd_l*JnREdD6<&Es+g;3lDtPGCjh z-!zB0Jc?ITF5m=5X(fUw5yJ-Dk-LP+IME@>R0t4i@7#>;-9`?7wMT}czLGhtN8&5P zGddHcEGzm;NwHl5?|j|Z!g%5e+nP;AOq5)h$4rw2}0zMr9K15jW=WH+8j%fVl z_QYMe*M7jod7Y8fqXO+z7p3DRiEOa@$B_K%4`Wl;R59aVc7*($ovm zT`5INDl1c&flx-?ay7O1T*5(7)AX>K%l&kLyQa(C2w&jJd%^S)^shF>4{LFG-oCA1$t(&b<;X=&CL$b9cFQB5{P4Y|)Y&>cw{_c`>D#tuuW0*XPWBFO z(AMcQwr-x0y@L%J$j=Vk+qq@)POr3hp$ogvxdq*8{>sB9om;-}N~f01JF|Y%w@;X` zZQcvhw~rscb)GV5`i>p5o4>YW>%7A9P1KQ13hT7(*QaC4wtf3_XxX-3|Ce#EZ+re@ zn||pXTeRuluRZPcP}>R~r|idmxonUKz_Qxq{t$v6d75d6^u#c}KwM+V3wRRfc19SR ziO+Sh+TbEtQ(I3)vCh;gzAe3IQ}$>Q2V#)VM!i%DT(5?ja?;gj`k!TQRAsPShh_x-{CZFqTSkj6^931aq>6_j8!<#l9%|^(I6Z#8vjH-kKeQBBXZtB zD`Co1wOBOLw`DkZWV|oZ2T+&n2oF&2!oVMwD0aAFF4*t5P*@q*OR8k?Af_c6i0@Dq z46nY!zH`!CaYmG6-+6|4KUCr{nr`5I1JMzpifyG9Z_-UHv}_oPS{1$fXBBHEhZVC% zAvqanBvP*;9ox7@KpRXs5E2m^krJWw$SYl(@Ihyx0`&{Zi!(*>kd|1f04D**4f`4& z74D380;&K-H!T^N@OeZ4Vk=h%E2kKp@+nR8PooNg@5melOp}ZHT*k)F!iG2g}qt*-k;VxIbgqt-9ippvV){c73ZqX9-%)SH{ zB#pj=7M)ivp&`#KnQeYhA;~j;Fb$pvvz&$4H8t3U6PqY5q(F-gm-=#iiaAUMHwKYe zg%r||O)w%Xl&QaYQd%fFxjQ9T6g5H!pMcOYcq0W{?c#jx#tF4pi)NFjE(*VW_MC@J zIRA6_qWtp@(@)Hs_xg+r%1&?Z#*IrY4_`i)uRC~@d(rmm!~t}ud?1!A$jM#E!6&vA z-3f4Eg_3|jBN_LK+ELzu>g*H|Cz?x!|GNexP(7Q_p03}3_}kMmVF=fX1#}-Njks2m z*C*sP)wjYH`^-X@MjEshz$KE!P~a%+jHtQEF-P$=GY}o?3jGUuLV$}%*&(ZmK;Hrl zLlz>#5clCo!F|-&!FwRv@E(j5_d)Hr52=a!keaw(ReswO1zHV#9Qf**1zMW^0N+%* zKzmv~AR5{A90145?1&azM?XMT;R#$ViS8YYdoXIAP>**&%KAoOyzsLZQeP>Nj~+2 zwOSq$A;C6Ji!gafEhkq>HDYlIf%2>+SS13yEhcXpoy<~TX)YX2y2b)`16dFo8=Ddf zSrBKE1<*+W$pKgbhtwL;g=1bKP!b@AeY~tR%KZ9@B7pfv#49g}Y3jbsqx*-CAAe7L z?a=VA1gr4p;Mc>44Sx&toh7ERX}rR_mn*K1fo)rA@|-Em!D3@KCR{i&We#%3=nNjg z87vFmOaeIA5q%%!ZW*lJNDG2#YK|0Xl`6|DA!u@$mDq>_wo0x_ag{JVQxc8NfV9jC z^m+wXg}4edeUsFSFF>}MmKhI6TUFPwcNPB5w?o8y z_PpvH#@}q{-NCx-@;>A(JFFGkC`(DHk@ITK-5HrVHLK_R%?{RjHKz;vwi8iKRhY+w za*VbO($~$RMEF?|B)!RdMRq>Ww{pxh!AC?PCW|cjU{abbzN8?Tmw-toU}8@2>;x8( zz$lJWC%z6ETj8Rdztbr6+>^Pb|Gv(C{@VKsyFX=hg!kx^Jgmmw;&zI%#$NiRF>AGb z-czOcpebxf_qE3YWEaV}qF>Z#%p=COSf7V&=V@7-ed zIBzX}K3@EF^~`BjfeovOl7C#DSJF19wsEGuR~GBpABJ}*QsOyMEE)qy58?=$QUbbJ ziP#bV&6&rnOFHZj1QfOyQIgo=vx2s8qxBy$6n&lZ;(4LSJAM)Wc-bG(ZT$Wp z;Ja-_9_zYlL$MrXI-4}PFfXA(Ku?^)4chbZSYbQ-uJ-0=Z#;w~ne&$8y z+R7Z;wu-_Xa}7IFI0o^vgVdPei?_{rA$#W=8TDHCf4N1QelOPZ!pxMm=GJ)*zg_vK zwAVm8K<_An;gyO)#B6{TrlTyuYYfbUBqRfCVE9)wM=2?mA0Z?NEJ$f{_9W;E%F&}F zV~6jl>G9Gmq0PdoOGVCpMZ_(0^cItJ66}dAx=T&xT^AM z=;6sAl4J|T7!NGD(G~GFe?`7HBQ)wH)Qg+r{}jyyXj>jDwm>NvBHZ*4q0(~254HHj zI1rbX6i4(yXDBV+PXy!{(y4$z_~eR!RgN=;o)M|ew@_PefOkwjt9#h9dTsuuo}D`M zU_Co_=qZl8@7?3Mz&jjds~7TTRvkOMsmGf9!yD}BLk9Qi*L%p2J`Y!^!yhg|Ty2p$ zg1E*2B}c6bu2BlPbi?%nBrRNH1^gyE86PqzgI6@LUJRL1oNR$4={1GPCjjIMV0z46 zf{C&7L5APU&7@=wBKrrz8S{k_OEU@!L&qu@9>hT6m7DWx&F`AIcyVS|QF3XwWh~ns zFUGPtVjM3kMzBTR+w472m%aBA#-0o9Y$;+#RN1Sa#`Vfx(7TPAUKW3$GzCaYi!LFP zO`=osLZnYlFMooVO<3_mEkb`2m_uaovxJzyzHn64Ac{pSK0cHbF$U*Cd}xvydGPQX zcVAz8Z^q28XDD9VxRs}NiN!e+dHGSVj$Fgo(nTl@I`7ZL&x%9CCn{AZil11_2=bP6 zDEiC3*S^Y@%+3^j#%JMnne97>At$e-gu@HA_70hEZXzD0jI+S~Wpl6fppU(4t- zY_sn(2=E)9F~a%sGkx%x7WTLBnRr_OUnD;RjJ^Dw9mSt9z+3V&T`)GU{7ix^*7un> z-)CMe{!H=MurGrVjjV~D%H^O1y{bj%9hKq4NC1cSrAHW1DD+LCI2i1HO|i*)I5Osd zJ6MTXX+#vw0!JsU|4BkL0?;V2=;0h&L}5Rho*;z%fio`|DD4J4w$uwAw58W;t6Wcw z&S6d#JN_p6Fy3RfZ|1LCH+SJWwfuTTw0?g6wF&ieB5H^>VtCCX;?vD6;qTxZ%$0k1 zy=%wC``4cd={gu1!uFzS>bE#IPVg5B$P~qI>quuYeVZSr29adS>xMfW)}z@9g6@mM#Gt~aF-CDZrVK$P z)|n4i^4{KcYT3fGycuuoZJE1>zt1l(&h<9IFK*-Wl%EjSQE+zT;N|%!^K6$qQ$b># zCn-M_9#x*>^JFZiAw+U6MjBvyMpJyT93S%Apd0yher>}C`UC4T+0-;%SsFMkp4VVI zk9xma@Rx_xXXVvp$N?FR^j^i54ur}DobK|d1J=McLUTUzEKv&hEv#r8stcZQyC+aq?DwWlkz2B_#6?k%@*2yM#LaRpmv(`!qi)H-uR{6OLrE}xjAj>t=Vt<{8GDLmwd<~@3-4B zd!(mU$uc9Cw41fX{?C?~qmHBnMvhtBZuVv#vJ~;QLwS1-EMm5tGE13l$-%vO9&z%| zpu8JLHYHc>bE5YRPr%!^j&6&s+WT~`n}^WH#4TF!g{UnPVQZ*yU%ow2k39H>#Fm?Z z@Q5Yqfgp$pVGHtA3se@D{m+4g)OCcme=?H?kK{8U$qA)UAVXZ2kd4FEmbLiWwIsc5ur%V zZJ0EY=Rip6wNel%P;RL0@Y#yCQU1?KQbAcF&&Y?dbLAMOxKgr%I{0bVL{OR+%DN+TaiqllO-QLTir4CfPgDy%t*S64T2J7eUMZ_@+l4zMWTgT~%a z)H00pE&M%Puz=NFuz*isCq+Ycl6JOxQBU@Y?N{)@I8zLnKB%VbYoxYQ;oFwqRpTjt z?Dh1Z<~*0I zJqeA+;+)^P^WxFWov~9!j2ra%=e$LJzOr*s_xRH1>ArqBWSsMwc2xMUG5N*!Zr}Fo z+{sSl^<&jM_CDd4hhTIV?AYCho_SE2v|$Q;*2E~u=e$lIr(7vxoR)Q$CV;WJayrHX zDUyr_RbeSqH6B#KgDSM{G|>b+pavK6fiyzsL7Xcu-oywJ3rLrEWM8OX)W3HG$#7rB^1wmqBlWEt zJe0Oh*(tYA-#@uBl@W84gk2kRtc+<@rkMa&ZAOzP$(h7U&m7LlBU1u(!!J}> zR_BX`u%HOV<0t9cQ3~o6&(bJ?#_X|7H>|jZ(lIL)&K07%fW7lO@ z5@U82aJ}E_15YE|wTYJQU*uXa$7FDrg5lG&fXx9#aLc5SN8&CBP9-HLSB#KGk$&zd zNmX559CbN;`kDS^4uYWfuJ3WZ>v>DKWf6-l?_{4p?1htV)Fcq9dcAw>P)_a!;>L)z7c;oTKHRx(>mvEjc`UQOA*EIyb97 zD0A|QFAneg!gJ3*+iAipZ|v#5xmS;29bahGzCnl4?PeZ|8UFI*&1c_jZ39p2CPq4c zvA>OYNi^(eF7A>Yla!IL$ zD-dtELW9M%fxJE|ug&DOrM$M2*H`4Vo4odx*FN$(KwgK*Yo@%8lh;Y|nkBC@I_#O7{X;BMw}_bPCAFK~A*aJMSRYias;*8=ye z1@3f^Fv|V9+-)!kSKdpepF^_rSkuFCE;klyqRTzAz&*+3#*$5Sx#twPvs~`^1@3H@ zdjU9{?h-^w0_ZR@DlC*-VZiz0l0ZfHLB`}11G%ChwC+7j1+n8{D5?#?ebCFDhxHg(rS|<(BTm^XpUt>8jHR8j-(j^g3cF7o zkbZ$hdb_a*Z+DnaC5rDK=`prmgC2#ykC6YI6*J^N_Hp`z@vn~QIeGl6VlPi@(Yixi zP_rjivF1(Nv}u}27dVM$wdwIv`);+X7oKDF&yN^!UYvHsvI?WOZyznb-d=cMd;6DF zrR8Y(?|xnV`;}Aes>Fzo3a36OjJQ8lzkw#&-TR62O28;-^TwfKM`hc~dqYEAYPG#; zXn9;qghr{=D13|9ILwzA5I5>20%}@5MyW=AUtKwjclq3XK{n}0f?X3EBk#q++z3?c zNL!O-v9Wnh1Yz_YMSbijU=S#POMhw^<#=J^!speHm`W|XZ+&y|dVYb|tM+u=9^F#T zpFiJ9Rk^Ae-+%6v!Rk7u6DLsXA*Ds4hE! zV2MN>zW`HyuCxJR(o1=5sDF78rVt}9(843AsFkJ!%SzdVj5EECLq#SC(r9GuKB7i6 zRE3*5JcP&do!;%N`mja~TD`DlD+^dTq=TC+8p@*kH+|}v7oQ%vENl3{A#LBl_$ESO z{#A(pN~yLkaHP#)3{1KWAUrhHE`x?D3agri!0GoB5aUTqWxuYu%KDV%U7nschP1VI zMSt-%m^YAiw&t3mck+crX;cD~(%JYK!y!RZ*=72E@DtODJbE6Jsq$(BNf*8*cfq{X zY}LF4xqQvsnd-b5Dr>s&?Op2^ZhCvy8s~|9d*9iQF?B3@psZBaz~YOubuej4MomqB zXo&0GG*RdU7#35o8%BsFCjx$?HL8RM|d6E29znyQt|84%6 zF9hH!RT9{;D{ZI8%osE?z_*;R=Q`Q=wvC$1Si5NGuz5HYy^4NQ zc4O=jhyql%_0vZ$eZI!%{ZhYbfxBvocB?89AYySbCq`;YRf6$p!DXuw`-To+iWI@v zHA{54+>+Tj5cR;hkpej`Qt=6JVtoGlxVyMd$MjL(iy1$RNblI|Qghv=pbq-5 zX)&XFygEGNSf z{nRKY)CHi*dKz<5c7};KjR_mX=|&jR1V-0vb~02ke0b%-W|b4(@89K7-e$^FwbH{I z%H0)2pChrJht;K6&p7y}_1=o)xib9I@<HLdjOqObFK!- zACq?!!$CybL9PuFB9c(jT()=xdUBz5U(Al*zQRTUB&Ad7b>opCtIgRzIfLd44rtBlR zM8+)q1>aD@%Di4qCd+X-;D{nZM z&bONKZQi?yeMTL+a_iL2AA_{uY3Z30=8qcGqzZqIx7;Xh)wsM*yPTKPtyA`h!C6(y zVOMIv68#4Apbp^ewBQigb{dQ>5bWM>ej4*JoQoRMq2tcl|Sk*RI)pa;?Nw!5=oT*2m zBnM*@M#_@Lf+a#0ahKy%j%^P+j!JF&Zn0lc$ZcONOQ9QIjW&>m*^iD1BDRZNF?Y8K zPm~>Al>c~ExuTzxX(FXhn@d>Qg#Idgp%}meoe7E<=XZ^Z;^25oa zKYFGQ&CC*aM(|aAI<)WAt@ZqUdserdli29e%KW{{+xAKC_AdEQ+F&QnWA0@jw*pQ1 zDw0PUbN9lV4(KuZ)d@56 z|9bbpr+rq5LwxFVw&vA&jb9#=Ib`#P`ES1T_6G5T?!~k5HR@t;ipKw$QyAmaRGWlC zkQgf_XN@cLtQ2K-h%&w=U+iZ;MJs#ytV7s_+xmbGc494puo{qMay6jqT4kBBCKg#% z{3E0rn=-kuh2ii{bLf^RlU6z^*_BWcl_0Qjp~}vy7tVdgubsbeMalo^$B83806gsv$1sRbgj#ux$Q0{x%LE3?=eVhmexLwq8x-ay8{sOw11-RnK( z-Omn-ro0u0`o=I%oBel;s5EklgTqJ{(+4KE+8B%Uxflz&3A)JojD{veOnYVChqPQR}QkIc8!#Ag|q6n zQ~Es|rMzjk7Y@N7F7F!}+MstgT##0OK7LIG z_@EDX#R!iWrF^2?Ei5Iq0cLv+C;W4q@I^~APc@7T*^*~)<3xd_r$5*w= zjTkS*d}wlVH~zI`^ooIf(?V3qBM|s7EhDm#Wt7fZH_}*HX&V!`%_o}@cvta*hwb>} z_D?MCjQ5+r$IO{E^*8d;s|)gXeA7k5l;AK|`wqv5yA;({U%~o^LA!0M1?U);0Nu=^ zaap8}q%5LYB|z#2kJJN)Qf>-DVRUHP6Xhgy0BY<=bO5z}BC#VOEfAz?$ISj$CyV>F zn|&j`&H8@%k1XQguGPzSsc+}5-oYzv`Lo&jjI%)~VIT19cBae~ABg*oGnX%VI=*xD zqqDyCE{FA|iYslk5_-iII8aLY*4uLY46RiwsaI<+X<1?t6Q)=joe^j(y2hAj0Jhk3 z9`@1ufBg8V_?|^io;`b<_>%gvste9;+i@&+aNl0zsMZQB2DPh{TIEP;={4jbwG3p9 zg_D}4mf7IJa7-9T498`Y>*xZc)fVN{rMJ%sg6hh5zW-L-W>Me%z{2>!U8|Pwap$bw z&MP5L^AfAcoX?`#TrE*hWIuEH)6-4fy@J(4P8O)OGgdFq)>J1?$&(E7w6GCFj!P;w zhOBWrv3AWVZ%B&Mnh_R9?R?0)$>ZTY4k<$mmw}%wRfZ7{!7tj!;TMC!&zaOQ^&htI zFUd-bQ5gJ7{;)xR@`3-vFnKn7&DDJ;g^Fn-6c8E)h8jk4Zz8(u&iwsQm>4-*j0!u1 zA&pQLJsic};1Pvgm5ttCMFz$tN2nm*6Mm5@|K-S<&!#T8G41Dg^THxePLA86By1S+6}9UwX(DKN87mwG(eY{Azep0h8x zbD?-$UEuv>F#SLcE0EQf$5$s%0My0+PC3DtjqEA8*yyThd@j z!!KZwDwdOCd_^%QB~}z@BP`-%#K+2Ln@}*@Y>CJpBjH2!6hM?7?^__s?jH7s2*yfz zxq=Zu$5hjBS}WMnwGt)^&hp!SlCL0vl1LKKf-2AtOUH>-*)*%<=(!$UjBO*R6mi33 z*q<;R&?uZ#aCwO9q(Sjh)0+H{^NaX`vyy+j_eZ%yNq?=|;#q&-C7kR_%iFhSRSUZU zjh=jD|FsX#b~tvW-5w6qdd_1b60KUD4P@-C^{V5-{)6W|1AFxtsH#A^-K}^bBR4nd^JWz% zOgeYx{ezj~7R{Z6nZRyPmViQ{Y{M+LZHxKfXQ#GO61y0{j0_+>I3W{dsf-Xply2$% zmk)v|WJ#NAmk~@zIbfn;{YR1$pR#WN%!q(tgB=2a<3FmidC7Z9eEbPIcmnkNi%0xI zH`m3-XL)Ph$UA?6^ZD_ge?Gp|1U}lwA(WXIz1^0oF|^(`Yyb;G^^-a1*+kwLgQRC= zruUeKKP0^q-^BfTx*`!UTy#IBs;hJ zndq|O9)C_l0;?b z;KMLgks&&>db_a7_Wz=#C`x5r-V^s!rf5q_sqBMY-ifx>_n$uLb;IV5~%#i4; zIOPH&eoe*|Sy|W5V#(OKGvY*aS#<$yslnX=pH!%`g3<~*Mc9;*sBEUnjBPM0I#{?G zUMUNYHspg@0-))ibcmpe&2f~Zv7AV_yiK4h+De!x_zR=kR)v2mLC<-|@j1`Yy<9da zm$iWZQrDWE$Jm^}B`fphD216#99iy-`a323V4w7ex1AJ5AIQEj*qrBo>#9H!tqUVam>>xX^ zc!lUmGz&e=f!_}W&xkRwDUUY=LUfOP2;!aSajDw{D7D~_?B49UL>SOawg}6DAC@OJ z5vih+w&dZmbRbxS%Z>y!JF?b*f>&JMF_-xHYtN73Pv*Mq5do~>%FRmcExEZ{)X1O* z7d|Kq8a_P8d;a~xpt0Wl;%X=#8Mr)m#hZcNg(XnTo6&641DH(&<*k^|fN}v1hA!O$ z#sdqhH{APLm?Di(ASe;?g3I1qtTBKlQYjRg1`<}FaZvI~YAEKB%D-n39_5c~`PY84 z=d_@=oqEyIy%NXWx+~%SxAtj*Wj3&FsU! zG_>DdN_|6fV?P92gXZ;&QIR_8{>JK()%EVH*+EK>Uf>bolspzh0=-GQfI5mm{CSI; z!R;QlB7sja-Bdn2;p}hkxNET{Vz#|B@UZ=4>C*Q<`|{|#DfdrZwy+Xzh{g8b&U8WuufM`^@9WEC&HR9ke1DX-t-k ze6Jx(X0}J`!~EbRAZ;_r3^yx8gczZXRLl+SLgmVZPJQ*7eYQO?IpK=Z`#M?Y4!){Z zMj%Nvf8}VjlgJ!9ecIh()Y>=9zzO@dAAc(dThM2$6aB^!rDdv&{g_Zw=<{m@*Oj z#(Qq*KZfiX`00Y@va~~=SC6#wozF_!wh)IJ@36N}k|c)C)d@fx?h#FqKms2KXx;+T#=GiZa?h&sGD}wyEwW(7MnQI9L3FD~efO&`_Sk0! z671J#ZS5n0-|VBi*RFZfBxHF}?HdW>bM4L6*T6WL&#`ogF|wq|w}=CmDChIvD9-g_ zTposcKqoV$oJqYMF-92u9>ImqCD?}4jglNFpk+D;icXFXwd~n5oD>MpuRL8FYYgT;Kg8Bj;z6Di2CnqEZFAgmwWN4Z3@S)HVMK8yn}|{+Xsh=Lm;*{ z$)FaO?*S=d7H;!FPPeVYD=UYJhmP3o#rMaECt&LlH|&XS1%5bHtq#0Piz3#adEAsI zn%UWPYWi5Cni>x*Xg3B-=a?)^w>nhfR7_k`-rZ_Buy3NYpHA2h+8gaJgKGofvd>J| zQo?A8EZziQlxlR32v5w&cOKNN+lx3_m1-VA^v)2tbvcY{6L66Lc_M!~M`Zlf9@wJq z#@xOcp_|X^)x^I2klAK}`Pb8z_IfNq#61x_uTx+-aG4DmM)AA^^tFt4x^VqlsjmgS zcVCkfBMp`>B%(7EvcCHkTmaM;vc75&Nh3$t3*?O&fg?8#hK09KQUw zl=pcl{(Kq8!$-ZVyoL)p7{i>E!G?5O9qqvSdgqxww?x8Ps+pX+!%FCSo>K*n~ zq*9^?breous4jNzeyi;lNR7lPjM@~6Uy-v4nj5{0=W zICJqG&1x=@I8K-%s|LvX%t@aeht1E(W0~7Jm_vQA;z)6*Bn`suD|H9P8uF<3Zu=pn zItZf=teFOE&D`&^W_(F(4PZhxASaE{fI7(fPO13x26!ZW>?`0w-aa_KIG#)yx~!7_ zJ;v)rCfnE(MTjKx*D(ocvOsZ^Mocp@X^br7WbtMaR>r)U^HzoJi^NO8)r%(2ORG%( zZQmCnH8|n31^cn1Cr%r^vvSe4*Ty}#^Pqj?M9JzuX1=?VXdgQs%EbIAhB+@{$rz@` zlJRYApJJyL!Y7Ea>B;n(gwnja+Xp8WC!)Ra3Y6%{e-H4@v|0odtLTq_vL8-e!qcIK z&|N4Kez=eA?`wZhS@}Vd$oR0P0A9m?*w6;;_@6}`e+&=Vo{(*)c@N=^+DLjPch=wv zI(Z)Z_K*^5JYA*KMp9*)yVQIy2S!8!xmszr`E1>H(|gAp zepCFV66l@#m1tleJ8mZCF1ur8#6*nkh@BH?v)vMtAO{nGFKMxfxFJJ0eIQ8=`ed;> zvau2z42Ssj%6@nRkQ@gOli$v>8y)p|`xgS>(qTc1IXIKA1T9jG2P5#60&87t?b|Cp2bp3X<|IAo#Q1-?aXjAEh!MxBkN!#f zXUH0td65`hw*F3gjH7j#SKHmyWTfIn>q%N6aaD#fF_OT0K(43nK=p|`-vrq3VA+>S zvTqWK9kFT#t(pP%f%9t&xuTGVn&N5#kvM5v)TYHj%>iyY=D@7J#aVRk`($S^{ixjz z%A!5yq9^+z0Qu+_Ur2pX{QB;@q(5Q?&2X+{;$KO|l!)PpjQeKbGbz+2;U!QU2|pPn zT}{G%!VGZp@%F)S#c?zP`ZdPKVg9C!e|8VDS$b!cz0FJ*A|8^nAT|p8vPkQ^l<)9; zu)nn&b2!&n6v@bM0}RNyt8U!$u@Rp$%0Tc5B&A^Bwof{4pc;|A?Kw!`o${L14+nkZFMA?!@h$DOnxhX@e^x5bXc=bq}w` zy>zdiI3OlJ`raEI+I}wfKl}VUI|h!AKK|*BTZg|M%;?lvTq;`C7Xfir*=uj+eDh^o zL|mHpy|C=q&*$48M$#a6_Dy_1G(LaxYWp>44~WEZ24Ai?2(}HIxkh*U6X-!Oq3F&b z@Ifwi;~4NkiZL>R&4iign}2=bx5E5Gc5VV~x@sz> z^gWTj+kGqP{Pq$!ofeoqq>@Pa7P;V91>dUf3I`9CpWoVYk5;$VqtpwOV_ta7ELy`z9nD|1qH{i~_a^ z*p{W+GXHfppE_Rnd?G<*$;+3JPU~?yAurs$EYoqw&8~cEu{28-ErCg3cR5cv;tHbs zp*xweDrQ`o-1eT^c03nI*5Ml@>B3A7-_wPe0c42Lotc5hV)nc1o?krZ_TbEKANgIRoZkA% zbC=KTdqL;t7tVh+WG>#CBAWg);q~Z@xStlWqh z4utl5CuUd&-h5aY?9C@DBg>f~Pf9JmGRFVqy>i|J59H^28nNLN& z#XcNwFG<-m=joTuIrpZr8;9O8e9&;@b#qOPlOdM@kNkK~j$G1v=I*CZVAT&bxYa-qkCvN9P^=8dKqMS(8ENj})*eNF%c!%h zC)USD$Jg32@3BpB=InUnCRPuU+YMjXUcM`E&ug2Xc>$}XvPbfaNTiYH&MK~&R@|mV zb*K^h*h6Rw5<+K3{1^}^!oMioE%Lj?QxQW~Q6ww`FcEI3Fg>XzRP*ooh=yX-`m!cd zE(+C>gt%{k$tC3oe$+)DT)~kLWGOXwl^QTH!b^w-X6AqvG9?8{wd|_w%Su5`-9md* zK+LyC)@m*@Q@Um>UmI9eW_nUn%=Hroq)Z&%bJ3!-5@9X>>oTc^TvrWqls8#4;4#6v!5F-X#C;6iFZxh zcQ|`NL;Lu|yPz15Zy`*TW~xKmrvIcvo1#$**zYaW2cOl~)Je%=dEP`tiop_~2^vqC z)TC{@VWybm{&bVoU}OEuHf?!LNV~{wLJn#8ejp-hXw#;`%P>~RhbqvfZB+E2c~124 z7eu=kUn*Vv^6G&%Ts!cVYp%Y;}JhwD>zUan9X5W45*S>At`16EYhu(|*(FN6IRPNaU?|5Wklo{} zIKx_S#aTgW*z7xE#4`KgkeImAo_-`%oc-X24Y!*iXt6mBNecA`m7fL{4UC}@2iCO} z4$8Qq*sc}tmg0vKxljz{d-YtDBEc|MqrpQV%lFdVzmXiKCM8_H7gi|>5GDn66rIDx zZN(?{>N*$oo;rWUjEPT&mehLSqRX!A-K&$BCLW)@vC+d5Cp|X#wjq7Gce&zh(C>O+ zzA4ZczOQWDMZsH~6&i)RI%3Fh6)q;8E|nSXQ|d<9!2O8jM@hB^PweIng`}Lxyz_Rs z=2@xsiLA@Uj-R=F`kbdfd1rFL6{**Bz3|GOfyUF!kFI#^o^emDD=m2Mi=aKKb(5yo zURN~fa!|V?)_g6f9Wn|e#_T{)7^~e%%82D6gW^(E$;8E_=30C8Ix)cRWR5U*MlUs= zco1^42-H}P-I5Mn?=0hfIXYYSYIL@zNrP@4;+DD1^LHF+eyn*6eVBJ7H_vXKkAFmB zXwSTzKH>JVEDs81bMn+gYG*aaFC3>8jod$$(jD2}&pbME=)-&_bj=oS0JT!5LUVdhRH4WF87-a2)`Oy(ohM0;&q ze3WlOP9SM0#l@UdM=#IfMD?&Y=0(S!oK99|N-&HJo4mGep|$w+(%Z(*Tm8VpO9S@Z zg$t+OH?UPv=T4XR7TqqIJ$d4awNGAX56uu$ZY}HC`I^4hUUCu29fdI;efJUq)ORn< zz-i=lQ{aU-S^^2t>E&r)dS;p!M93 zzzH3t!?N6*D~bzc85PH0Ma7GU$38gpvhG7}>2cZ6>k67TYtyD#i?*S$&;PB*EdzV? zym{cI&Dx&VJhyE-%p7NaU@K0s1l%6XyVvLNQPNMIOOYc9R9TK66+U;UFRPH)(sNi5 z`Q$}CoYQ?n^apdq>BE7>(IsYvnPlT0TI6-(9#WJ*Bc&u2odsK>(@J;aNr_D^)P-Et z-Gz!#T9Aj$lZBAB6FVTe1fIly%$Qj@$eNNe3RWcg{>#;tm{PhWdyy<67}%xu-IuPGMHzjo!|CHG!4yH$SgNmmcwld$Zqj8nhYA5hr) z#<7?8zQ{D!UUT2Q{nBojFySWb%c1D{*$-4rf!*fhk@NYcXv*?gebAzhEN^Y=8zyC$ zL=rgLfp(`StVM|@9(5IZn3;n+hsnv+B)za8klu`M=SfF;JW34$5013|vFnS^c8mCt zP0NRmDUX&sKJ(yZ4IdvdW5LfT!ESIOnJyl-V?Rbei+&+aYQpJN^s(6&Ag zZeuHYNTjZ`qS2Mza;#`J&QyC`Uwm6jo-A2*gdyVNtV*TigV8S0G~o9* z=$wHY&uXiB7{*VvpiQtUAteZYitJqw(buAUrrF2s zvqkI+Ds>6fJzXNU*oo0f71_;(gsz3?!etRtM%ZvtWH_AfbIU3Z8L5iicrLUrk0YBp zxKR)q!VG;V(A-F-;m#I!t~xb0VDGj6C|gtY`isw5^B($j`4y+_Lr4r?{obx_yWRm! zRBmcRDb_aP#Dw<3 ze{nxjOapwj9RuZ(SZa)rCrXXviewqAO5=`%mnm45ot00)vLji?@XMR-8;MKiU>WF{ z;+_mdZJ!~gtuL8bDL`$yFuDb6*?G-oi-mvrWKh2$<38Mr^8V1>_kM5x{@ut)U;b## zTfO9}$vJfw+*Y<#m}&Myw_dk<-gVck4?+T_V`mm)Heql=PdOLfo7JsM_Y@~JGhUYS z##h^p{1jUuhwiC_ahwwf^oagG>P2y6o%rB|=(S`=h8GmoyHIcxo*qFz0V>~&8S-xe$%G*cKdS;Sto2f*2gi^sYp$eYKyW)@}QEeN>Q#k}ge^P=-JGmlk1 z;-Wk#fcCz@J=%|Hn$ax(+QB{YA86gI&Ad%JCIs*AedopT1-Y4hM)WDjwLhhG7-fBG z^$RV4#Z!y4bJhMgh}=sqCNQ9lvpNd(6caS@YSaKlEYu8T#08)#Q1vlk|!Bk+Z#}|pFdp2x*J<#;cz4L@3p#qt+f5PugA_Tfs=WBSAlk)L2DLW}YscTYE(x4=8dm`KgVG2J- zGLpz8qOn9zS`rh5(sL6I^w_v(&jyk5!b{?!PV<6OXY}qYtS@j_geFb1>gQZ?bH84l zE)I8g@b3|NANbcdW-|UBy1|{qgzJi{Pcac9otj9*46)pUXO;=Ky=$!^+%WWfoYN3;hb)wbaNAQD^>=|kt9R0d z3ak%bi4!swI90^lS4ky!7YBFSZMX>U{~zfj>G5aM34yv)ux6^ei&cvr+P)s?G_+Vqu=_wIG+<$Fc%i&umnee$aAB3M3f?A-&}ce$`b>LjafqfS>}d2!oL z;Vy_Z_ciC4%Yx4b!r~2Vu|+t)_+(<`jMTOiIHD+{t%JD;w^D9#Zl%<4Y^4w!-%1I% z{uu9RhRfmcQatB5;P_sI>jASVxC3J~^}ds}A{vW&Ceu4R;+<=GEaW`^H)kyN1F5l! zFxGOy-)zn>_2m25n?qwku;cWhGA0_am-(2vfT3G>->en-dISe{G9!qJe~EjfX>AstP4l;RZCcb68dq}D zZ2a?9U>n>8{3AOP~FrmY8tx zp|Q_Wer57Yw~WOM51jLnxN^srA1;_{AO1&=&GxTzJGK!$mSEA?HcMrDBa`}O=$Z@_ z4P@0(t&vyL_Ndn6=k$fdNforxplO>HGDWd6RN&Sug7B(1zrxyLzxwvKN4~>NOTT>+ zEZ=PJezVJa_A3=;FzwVF(MklGpB85UGvb;-;F?s>|FVe1^e;z-VYXufu=7b_rez_- zv^N5d>=l2uK2Y)!HVs`pOj0w*ze-$_);>6BmHpYaP4=hj%rzos(-zTWwRm~d8~0Cm zeXJO>cj7C@%vayF%PT8I?>7tI5k)Ul+S~U9SHYh)-(K2&g>64BI>m^xlVcX+Pd(Mq z<%K({(P`bx!C!AG;(p#Uq`L;<*hLD*rpLuVKAL8(>P>2&>2!HZ#T9cP?p;IMF;3m!eq(zp?MKa8*@1O)*1tv40fli^6Klm8 zz1NsN)d%-g@1ge^_?2{uKq`B`Ks9d*Hi#-p5<`u#K!HE|38`sl2ksz8<>O6G&lJ)7 zYX$DV{oVzY6Anz?`RN{ehB;xH*n9W2?XMmANJ;U_BcESXvE8OzfMk1h;1kTgF8TZu zWbqHXD}bZd1p)U=M92Ke3iKLH@UMF^86tJFWG^-@>_Vz8Y*X9|QOL?X@I|2ii|Dw( zSbJ*ZGkag`tpn}WR&4)U<*v>gQTH z@R_2>=yR_qx4hSB0Z(Q*tAW_lARb~nWXn^Ux$GuL_c&8G!H@Rrsfso-54{oXY}RVH zu+)A|^t#K+IUR4FZVy`BX0Nb<)?9dsK`ZB?9c6_2a-rOCj;HYs!a0=4Nh+7uf`(zF z133XSwoR)>EyPG>+>U;RlRJ!F@aE6<(VvfQ>-gZo#V@a&IlQE)7#FC!$sX6uH0@tL zJ!XG4cFt}4*UaD1k-EzDws!<5lPZLrN}4xqNIn*6Wj&E?_*R_dBI^+j@_$5ERGBwK z8wA!{%}zcM?229;rZLU>yLlk=o{@<7I_{2Fw~YTcIt+qXu>bh-Mc7EVo;W|FYerE9 zY$UKW&fqM*o4A2T{{-hZ_IzxRQl10O_gjbQHE5;gft536u3XsrvYx%?4ertLp4Ls< zTr>F6He^=?w+_=qBC($2Qv%;GX&;H0$ zKY#ZFT0f=emQN{g?k}<*P7DWz#dG5);)(}x!*y3{C8S3Sbelvy9dj9L60|wdpv3Ds z3}$d{3UY=5LHd{PFo1f|#CsS7q3R$$gv-DZTVzll$9TFPWcJD!XhusXpC0sHi%hSFL&-MLBl3<6&?#SVL*HXyQN> zW2~g$Zj6P{IWp*(c{p8%6d<&9z>aF_z+zf~MkLK}IV|1~+m1E64L6^JjHN~mpAD#i z0ym>z?0Fd5u?m>>aC*~xV<+t!#Z4F?mmxyNnm20!bLV>)m%ay^HTFZ`56&ub?pgMw z6RF4wW1fu&S2naAuh|-@mrVv4lFGJc*ULvbS$UkkuMcfuVX7lb$fsqtaF5A!sBi(e zxcO|?S#x>fwX6HxGIrL?Yk$0Q$U*CQv1jjy*KV_a+OXqq&)###vjz7Jx%Ik-MP3*C zmuSfR<-noqZnbwLub#1aAFLMBn1EG&7&|rr^zI}nuGiRSus}KUHb;P|;?Y1M5L1jM zMgSRaO^kG3C%!`KVf!1qsr{7@^~5LmuPU-^NyOZ-{x`=1tp(%@w$pAXhu zgn7>eueC!oBn7AU?U4*ww-5gYRcSjl&pPf)bkM3dec%8E>9L_i?xB{zzu0-=iY#dH zXY?Z=8G#GlIhQs~b}S6jm4+RxcBA)B{Y!E3ipt_Wdw;H6J@>i3*KMD@+Du!1dd`9e zLPqqpIrc$27T$hnquqMb2gHdtgAOL5 zf$N8$jaZXqf6VxB>zy;-eL3fay`Xb{&>71I`v$%aC%d$~zKrfU_t-IM6~a0rhUm=4 zCeztsr&x zq_;~%<@(v%uD)jTyr<3F<)`O8Xtq0POqe_O)S^(u2g}9Jix1HZ?CUK9bZ!92FeQ>L zEgalws_O69q*v&(Bvv2zpr@!d%|+c~gP0Yoeg42nq>g-whAJLQjvNBM^vQJUESyzH ztzalqu#n5%+PB9nO?i8%Gs)jy-#0pWnR(*q^>1EQ*mZf)g4v^AdG%!Fx;fjfxn|2F zYt7tWMcXH*o=!SuM+@7|Zq>GY{aFu)^G}4!|GrXuzwB?A^-Wl}?oM?1d?^=v>%_vF zgqt^HT=PnxLo)H>_F-wUz&; z-AfV1EZA0LQiGqI-P?B5n-A<6Q@K2O+*_~wRO|T-^VeT{&8E2<&D65fbMBA+7X0x3 z$Lw#v%PaqALG;v@`u>v&$)`Xw3>@r25=RZ zWYpqcK6Ma0-(*GWTV;Q7twCU*ps=Y zH9@rhHN5r66K3lCBbdVgNT7dW4jI_nw?*RQeXtNN%B#YdswGgmZKR$oe8vg*a=raH z0jp`cKFt<~j%TNHYJVOgB}D&B*{23Mv%<;gH^+Qwf1DsDeLVHA-$a|oy}EeCgbSwG zKi$13)ok)`hRFK-$|b+dw(aBNmc4n?l(B)gRxGSH?I~J|!S?mAjIP8=w7?EWJ^-yn zv;TUBXW_ihB54&2a3m2s+><08$&^pCd;vw;;Xa>-UL7`L93%OR4Lh@}HP;I{01eoR+hDJzdsus%U#M?%9JY z#7|4!BYo@!OnXM~ISA%VMDqg1FjO);2Cb$MWL#V0sYvUBc0_~Le1jH#`n*x3{t=6+ z73B~0G5Pv5`BsUP*&oS~zrw9@=u>(Q-%SBS#S=`8WHe@}UI#Hmz%(@YQ@sq@3Xvv_ zx%nYPeH-^kB?jPYD++5;yr{>L3%6H)zPcq&eP zwFO$*BwE~AgDOrvDRGGAKx%pd8;CelPz(V|XH=|&ebAb>(BA&F*%tGnh!JuZV3}p2+W@k6 z2t`;0s%sy!q~cGVxMFfC8seUjWce>l%IiUc!R%AH(@|~7;r(W)woK4!io`=*h%Qh#QuK6wMxCp&;$WAy779tm-M2DzJH zz2K*+TU4ePlU!MEk-DiA!Hd-6UvPqWg8`8o`NyU*xfH=LpJE{Dq-Ijl3AD>XG+Lv= z6Q~IyBTJCdgZpyx1ltjTL(@?e{?xcW3#QCRPv@2QUAo}(o(sj^qOl9NnRDCOoq7*n zx*>3Su^sC|Whgz1UwTT!ccm83R}PDgT7IXwvD2YFfzn%}H@8EhXRHXSQLJaMB6OGi zJkOm#Du+cIc_cAdHC4;w^L%I;q{~G5bgWf6+#FtaL8kmurmtfLdGHV_MqN5=C;UWU z3{UF7O61%qiV#xTJm1`f8d~_XtY`XDx89p>qJF*oPd{qj99&0wzP=LiwMh^}l4{Y34Lr5mfOHGg zF`$EfAggn`#Ae-QaaZ91&u_H*z=`nMRe=$z<@Krh8=iq2Z-GSHM01CU>>~&x6OmNI z`U3V+G9nkL-nHLa zXPQr;?lynANrw9j#%WpF++mQbjVADJmq z)aCn@L%bI;Q&>zxE_*~SiNv~c3*eUabnq1?X;Sw{xfu$~fv4`I0~#%<|_E~2z4Flr4MQ!QoO9%fe+Z&BtQ?)X*^*!cC!hYs22Shw4)zYY|h-Ww<^72V6qY}CEQPQ-pN zi-X<3+0LDzZ2l|tk|iqi4>~>#@GZB?^VB2Lb9VV7+aGepiwOj+mv*ec(qV~?%3yHZ zhK;Xs78`B=#Eo(Y5m3@EUv8b-o(W1*5HsI;>)UTTT-40j_~1sjDsph_DtWf3;Ii#DPI^k|nc;Nz&M9e08~an+$q za1D`$R{Ogm@lT-3jJ+;2GaZec9P*CcRAaK%*dsbBO!U}EmIpd?N<$Af{=nE-)$mTW zPH+svo#|ux8>>ZManTvyND7@`hIPO3r2K{vHuJ+*PIpW*dwP6X&#L+}DEcnsfAQ z6CKVOkFKrp;qw7qAZIGB7U!!~QbF#D9A~E0p-a$K`b_Aie*#@*d|wl^vr2O8<{Fc= z#@C|r8JINl;vI@==;5LP#&*_J&?TbE zh2}=7w5FMV0$m0U(~>}so#o7w$L8HCF&S>0aCfVQt_R$kIk+cdFYRR9(b)`gsYJ|j z$7gv<*d#dE+y(ppyO06eTqE8F+6rXpq&-c&$*#H3$Br<^T+Mi|y+w13#wF9C3VE;*DZm`B;nBMrz)spU&Md1#mk1F24Cux+LSuFAr~%fC2jfh5fXjz!oHZ8NK3G~wiSuwoGc}iM=w_fu zYlmlNVJ{!%S)wH@X(YxQF5c`urL|0R@U9MBBCdC#l^eDqy+&xva6v&X^Bi%u7TA7Z;@?YRnm$$0z8JG5k6`I$Lry%PfrE z#2KA+uh8e9q}0@kjWg5*L;g!7YF$i5N19N5&~wC0oi{Ij~ZzgZIN3 zc_pNCp@P;UC;CSIz!^DgtVCvlP|$kpvk4luQ$xFmYmw@};&ALhF0q63W))L4y5$c# zj7Hi#+qSS^E5e?XcR`*bBx^*nGB^VXg;t(~gGjwP;d$CA`>c-ki`ZQ zQk&q3^+YryX$fp%wS;HIGj$kN=eiC^O3Wy5KO$U~%OR%r?V1-9I7tXj?p0Q525QUv z^vI)Bg=S62LD@Vx{J=9S-Vx&4?c>G_nNfB3p7A^1w{mk_BF(yCX|u=d&Evwkm(F;0 z&W4@W-E#NPo>!Ij+quS0TF@`WrP=YDF8kz-8*Zh&a1UhF2jLsQCD4)i{C-{BprjA` z!|43ZcFu2#ApXER2j>w(LpM{ehpYR_l@mRWV!LB=%+Bz6v_D$te0O+w~{b-X{?(% zBZEP$*I64!zX(uoxltxAQP5gfYT%4K2>S2YA=Ah;E+c4B6?i|Gj(}>MC z&=34;%=Z1+N3zix@(BHD5dA2^)5w1OeS1;%plnnG4kp;6^b|i(41u*Le0qeAfi5i> zpJL;2(OXFfrH_{@c_Pnxl2p9_4M}ygmQ?DbFVA*l504zKuSZ^Kyo{MTS>Jl(WCvP~ zOp=O5CQ0?ME2;F@Ili$iJ@zUw#?uZ%WUN7Am(P@!7;ZM zkcr5d6>%<_kydGZ6?7{^;acr)g(#dSFjJ1(7n-_K zF4986)<=5}_bFswC|ux|Z-u!kYak6xw`5ON8J)|`4Sd=2ZsE@K)(`Dp?)&(GgWpA) zSYVo2pvvJ(47e{r_6CZh#C324Ctc=b_qDb_L6?^(#Y_44WMAEz^B<&<$ zx|aWPV}rW`HSeT4JCJ#Yr5*83$XI^2-rJgU)ZV@qa`Z6E(X*L{%ZxW`kVs9PkvV?I zBlqEv%b-&@QbTK#d5+YOBjkZH{Bck)jG^NHpWJG;wtf>U5Yi3 zbALjOVzv`!8+tw(L#6Xz#o#lb6$3M4K4r$w?rvx8j?O3LPhl-Rd=a28cc2@=cNA}M z+Nhj!>^W?Td&B1fM*1z*#yX20KI+qmVL9j6GC&Uweg|l2DXfh(#h$9ZR$;kTv3-CZ z$r4qLT^#G;N>t?&DGxfBL;!ttaFZUpIMy8TRcB?D6{4Yk0Q4m+Q{~tzVcpH zV;=x?%i!Pi*ehdKVHK*6?LZrK?S5iHupH1B8#t`_DjxZ5r|cqe2e052^66e@4~gkv zlD`sSa0m$p2H}o|k*=WHUSx$Xev*Ys7fHkr$4w^%LT-P{rY~)Cs z7`tOo+~~L|ae=Yh_s1O}fFb}y_R}|t;s&8}Yh*ADaU7spfAGw~SS4PF?g6fZ!3xAK zm=ajqRpR{U4mk(xW%6M|ZV)}iFRrhMB`U40@?rB{W=dS`K=%hls_(oE$6^FMgKBD6?(g_RI==vm3K5suRI0^36CxMb!NN!{52hdc6+>SF5?VCqQEYrGx~V;?JcZ>-?_oayTZ&|% zL;t#6>tD(~FK^_?b?LE8(Ic-Eb;Jc{jI7QKIWnCYG%}qTPq=4>9$V>la_mAq_Bz}k zcBX$bH|~Vayp`jV&qt2G&iDpv;door_!Uj#PjK-XNr&~|_Pxw0=*h*LGS%f2{swVM zedXf>za3ysflq;;jU6th@Civ)Ns7ZM>#_6pGN)i{=7=H85!cx9W+`(DM&X_J8$Yj( z%W`A6%PAU{M$XAT1h~ATaRH|s(ww3(Npnv2zL0fe*~4Av%yORVh1NXS(wQaffxu24 z8M!!26O!o8e&D=Enq^C8met2DGfHb3+ripDHf%3!wKGOmSld@vWBF77*6fp9&XRkO zeQ97P(+~LZ^8vBnv6g+KF)uviQHi|*&j&?+^}^$M0qY0VF<)+M2Jbr9XsVq3CzvlU z*7VjiQC{EkfC-|K;KTFf3X9trllW-Nygt|s&-=;e#vXot9%kx+hxheSo@u;&g}K;; zHzpd#_&FRYRw;8W#>WaRH&(iwqA^c&Fpo!c@`unwNJ+ev>^rd_I2+kFk>vU$B`)Gkt350C`T?HsAZ7yw3Vkhuo7qTR>_f98pl8lM*~qOR(T!Y$RvYkWRgL1 zTp6Ut&hU**F+w@^YSG7y5vs8(>L5m_ymHe2NIrQv{%YfE*C(&WMiO6@X z6W!fdh@uR|SrG7*zC66cs?9!V`HpqQQSg%^=QQQv<3z0zdEEr^Q}44jLOW_=TqjR* z&iRFGJJT}bj`3ufA?35ldl4(0nj%)S(U8tk?fz9Xh2m_2QY z^0F1xIc9%hzO;4a>Mii7jo9&isgfKq~96g;|wry)!;3MEZ3!b*GQ2mgN zj%AFl&(FraWtEAk>KYhKo0w#Y@>-GeFz8yEj2_9rw3_;1zs0&vziXEaPi<6x_mJGSzjgghX&!S^++V7L# z$OXT58nPH}WJmgGf3w=XJTk}YfIsjUj@_%H>sq5q0aq&?#o1=v%k%8Q-zY)fT#bvi zr<4u`$rN~o=ZTmE#xD^$?mYFbsqLIOJa*`J>=-%rDlw{tu@UQMnZ{$YS5A(-O2pML zcB(_e>Ub|V*157(>oI+ZeR=Z0Xau=uWQd}sjcFvSH^w_ zO$rIjafSm;ol%|jQA}Ua&7sKn*lJ{LOa(Nmd6|CSim3{k{e#3+t(_CTf9zf-ZuGDN zO((1~w$_uJ6F&A5Rua9S%v=)|1oiKa<;Nz#N{(dLg<5~4?yJs4D1o-BVdN(W>6v8)*S9g)U5CX$_nuOFcwqwzzwLgJ@mB7>mJIxUiTRnG#) zLs~LJjb!Yy4AQ?6wfKUbt}^m zbO(QRhvuwj%e?EQOm*lI>>v%z`+}f>53i&j8g#r!!|IVCSzK7A4dHB#0ylPN zs7H>ISjV0LbZ5evv7jjMmF|ja=mg-cqQIaDdMANj#3xMewZ;N;VesmvOmBx{P~&Ue z#Mpey(A|xnTx|H{BWPtgREI7R_tya3*r7^w=rZ*B(R9&cH*wZSLKEH+leLgX8gC7q z?ZunvBB6&H&$)Eb(9J*>6$>7RnbzW&{>_=`eis|o!)d1J4(`>VDdMehSBcKb$7A@R z!H0KfSUs}Rd>NK8mv~SVTXUj@t_RFjRcInsv>{`DkTEY21uiapHWTLRZ07wLeB0U4 z6s^rw8m*E5IDRiG{F*gkI2kt!6cqNt!jek8jAPU9o^;mww(T6=F*37~_N ziTl6Ds<^@Lg|05XB>I-8*{^1y*>}pmzh|6hScmJ+q?d}$3<0OL=bif{uUQ$+97hss z8KvYO!_qnkf+g#6sml#6w6hWt8Y|&IuM%`FR3E#A!)ZgJr>*vMHWM`alZxzRaxOS4 z=#2Y<-&uyd6g`SQZJiS|wg6|5JXz>O3>#}>^<4?A&$+OCXKW*&dlL^jCs%Q-A^Zql z?>=#YiZi?g=p3HC9D8M~mpglX&V`-Xhv4d-I4UWYe62;F85k8%{Fgp87~ z9z*M9PA)2TLzwESfE{3Y4N~m&4nNJY9+A)4$1)jrzk0pqpp({MJR`toeS5Ufw~F$_ zFA?+Ixv+&r@=R%}v+V%Yf8c$9W;{mPYibx}xv|w<0ll}C^}?q$udY0Qi;aie)m7Z8 zeZ3Nzcw0iRMDHrSYZSE3;9%%bBMrTd*T(TtXlQ5|`hhJ6k@+ z^1gZuy~4^yISbNTTS<&Z7j(jmIl2z%X{5hU#-7&Fv|g0y>@unAkPMKp#kXq-pm5fH zI|)lXBK0Ej@N4W8w9Xlp8W-#q2YNO9o7!%1p}`}n;~jk?gSkqMPab})cWKNKwUasf zIKRf(G{HaC)NB!sDJl(zG_Vv-FBq% zeBJcUAJ%j8ADT8FHx>W$-l{fd^}kv=zV5jG-RA>Yr%z0LV9I@g+9U~Kfdzl&bw=(o zOSeugcWOzU<#3!|zXW$)sJT;150&xGu&lk3E($B^E=@xXn*!Zc`EroW6gbM59B}rD zM|aiVP_l`kS%OPw%7W6k=<_Jm;V%E!*b!H2e$lW>UNrjYq3I#=!Ymq-`aUTm3TjPA?9#Ts2iT2<2;{%8lG6}D@2enIwW90 z29FcV=W*~Mn9a{GL|QnFKY*Wqi}xPk=T#1TTYkPIb^>wHaTxzKrq;GttJn{y4Pu2! z`|K!B6wXIDSyi?u&kj+85X}zjzX9XV;?KGIGjfLTcgz!iXHOM>?xOzAk;xFv7k___ z=E;A@Jmq*Cl@8$s2^pmR~+ab?%2E^30h?fzXmSo z6Kdgb0eH35S88O<1IlkZA9(!A9P>6t4jJ2WG{3MGL*q6Azo2Pzqm=SICklWD0qAJe z5$glb1?bJuPsGJS6#c~Kr_jgYC;t0*{+xqdT*aSn0^bYcCi`-0oYf1O{V2(sd_Lk` z%&6+c{DCQ_#H5m$k$^=P$41i9xzkK2Ko@sNQak)TI5GiX?=aef;TtK4}hc%69KG5SUoAvTSl@8D;X+uq?#*G2|4Z&L-BW#n}`?5tR&?55-h59x6m&Xw=Z%a3AZm@|J$b`{qVx?A9!W` z;zyR3iV^P&v48k_p?!MZE3d6yv}&RGaL@gVHh(&9-^A&q_m95uzT59xGrHohcfUDh z`r=84O0X}g$;JOcE+Ka)u#Y7ha)&Ttgw7@UY)Z+Cl?evJZLp4zv{;!Eai3_T){!+? zqO+>xy8<%nZO*rnn*U1ULpR@wbs8derW29;cc%na)@sAJ;a|Xwxr9H1OYoWg9^Q{(>rB>Glui<^Jvyd_VeZ`FYHLq#4g zZZG?F7B#MVdsnlaZ_yfZptm6M+%v$rU_5NRBn$fzDDr zA{6IDn>dSu>2?FZZyvrw`FkPCv4af8+56vBJ;h!|kGvasge&#Bb`5@y9rUxr2R(hi zKLej+2On8ELot2O12TVQUtNEqBAMDkfo|f9zjilUyRkz)#c6M<8p!JL*Y08tIK`oh z&a+8!@Tfj^iMi7qTgz7!;j2D&iCNzrTc1^GjyyI;U}fBRIOnuE_O!;kKBRp!=VasY znG0m>csN(%Ip5H@d(T4Fw>V~tw?S@o=Nf8fwn{cs)N=ec{`fIL&vo%vbgxKtXfw>p zG0A6WdsIL{A!3JjA22{Nfow(vT!J7rQIM{v=xMRA)w5+t37#LSN-TZ*ga&H9S>y z($F5kQeRplVdH5rh2MFXJ%TEOz>Sbun90*rucjn1pT5|yOx=G~4PL==n(~pQobE2} za?fhL7OLLWpFvJ*vB$++$w6JGDWxXm?MSKF-T1`a?HW44SvhEn!5o&+l{}9@_F2J8 zSlYb9pX&hAk3-+Uim%7>+H755oK!j&^MONS=&v#=cM9J~2)=@Efa5`l&4Kr02h}(D z#3PMY=dXZv{p~(+-5{Acvkx>KU_Hw39&h)FZ^JqYY!rGPoXXGp+E-b_`CRyjo|{*R zrLpHRPCxtVz-H?T%>0=1{E}EDp5Fs_PX#~VzhwBE?W??Xv3PzpKj)poG{H{6_s@l{ zeS~T9B7d%?V?f6?!tMFg{g7w8QvB|ox;g{1o^!@$f)6dm{wD`|kb9~ro-bF>)!)^S zw2?67L$#3KuCaPA$$6Ko3nbYIZpB*ET)o%Yt9|_2E(@MM0`dfBDI~}Lw^hwDPl9Rt z0Mn8)A^5aGCIo1WJq^A0|4hvf_E9h`^mlM3GCM$x;l@Fcp*WK@aN=ceP5rIV1;4e5 zAlH^LcKkW`I_QbdJi<)q(vH;CIy@}U@^c(HEqR0b8%W+5E~41!&NmcqR5XHQ&vm{b z`Gaz_C4USTL$QCo-%u8#`UdIp!@~{n4eSTT>N(V3c+UcwR~x)T)t2|e&Fhg+)`zz~PX(TP4%z702?cIl(#8!~?MH&~DTuX-uST;n}> zhqMF#eI4^KtiegraYPn#iq=L&UH;5}51ZgU{GHYay5EIrjWi#4c~9c-2EKt2cvWX1 z!t1Q6wne?X_5^S^5r$VDS&8x;iQshS8*JrQ=WlRU;3)G!j8|&-B+Un!lBgikJto+i zVLp)7OtG=W{_iTPysPpHb8|scQ^oGB1CMLZhY^*cZ;#_g)sdb<_ z$iz%@cyl47*PCp$f9kA2y!#H;%4h|=dfYJR<}b2`;t#n3L-k%~A35zrZ`C<$uuz+D z7-!{IIHvN9b1aj}i#96eQKhgP(7bi#%__G@Zvv(R`=oU3NzTI01{zaz%Cc7+hlleO|@ z;T?Y{cs)9D>B`$Wbdzs*S-;_YRO0KMtFceQ8`9-BVsC+7--RC5Z#aKsxW+e}=X=9< z`VAML>aT(NhMq%hyrGHn4VuG^p=b3QF1YI}1+VAO3Ug4sVdNY>)NkmBIfT?Vd~Z;l z)8#6x)oq&RoJ~XJdZscl8ZDK6O%2=cA;T`;PX1 z`VJ=-O=?0R_Z{v3^c@`)#fWp`C{n?@!o1Mjf{D-Csny_qVjfgP+Y%zcZsi!&PsU^H|D#`9jzb6BfdnJ2XLuGEZGL8kqaWtx1$`8R9M^Swdoq%?;qVZ@4^ zH(X#mA@XW`gCjMmtFzUMWtx1$1p~B9({DfriSiZ>Z_^ukgul{n=!iL_)%XVL%1PWH z*Oz3Pd_%{ZwM^4*KqoML?lHG1n;tyMUD%jM!F6YFk*XeMUSyeuw~$O5C}o<*f1=-} zIv|mi_j{Q5^c=}~fuSy+YEOpccrum%<`b4R^Bm~jxRJ;?AsL#q2Fu~5)%GSanfDCF zracoFVW2M8c*x%DsmrD3SMXjMkNq6a_NlbGw*;jI+uvSgwc|YootXC?GzyF(EBRgk z?trIV6HF18DQvW6d#-~!Yqfm3OU-%{qWAiAXDLCrw{mc&u`djhj>ltTCwwW|s5RDO zHv^xxbk>;0ui~9Ru)gOc+!hiiBz$TwUtOz#BW8Mkevz6s(O}GG|X}yxaN4rjthEy~%pAhF#Fs*##1Bvg_p7 zYmLv{YIZ$#OD9)^cL1L>G&W9}5&ERj)K*;~Zo^(*7NRO#j7)nv7|(mUzrEP1vgn+} z7`)e)aL2g0kO8PP!}ANA=kQTL2F$|wdl()vu8U)M_*n5=R=<(v4|tLd*0(~Rk=ja0 zcc>z(>xe2hI?xZfR8gLT3RTO4u|thmIzQ#C{rj+h9Ex+~l1@qNM9$2^*ui9`3Q13j z+95wcLF?RshR$kMAG^D_)E!$_?KS+9vAY}ZyJPeEkW6df8=GXk#CsDilh-!x5m4%!zq#`PT9$va-qX1JNbDX8#Q{&DFd8w8peiz zQ`qMkCQbo7bINlao|dKIL$Vw)|^5 zRFQrKlGijgsFI`i55v-0raL1Kvfr^bBgDbom5e<2YF3&(fzKtz^4L(^YaYpO?9|?# zhaKgq0yeKQyD~k_vKKq__?74ZAN1?|d?Dc9WctCAK+jEB?VuBGhYP%Dbpm#OIPkyl z^9sOIWH^^%zy$A96F&v`o=k7V{`fia%`i?^`?}zZLDqt6 zPfF%K%}Mzc32qhvU2K=cZQjrg-_zWU+BSRA3Xnn244r)}Bn*VnTb(Y z@mG~})$vbp@J}`QU$}HYc<@JF1g;H4`}hBw_*2h+&IB+L(UXG_R(pg$uo6hrPmE)l zC+0`)O{|uv&%K$-(I6X_oz!wC1WG&2>t(MfbBg`XSGH~qOo?7B63xxgYt7B2=3Qm> zdDt9~LO)`@do@j!v!m`s=EXKAJ2#ePquN zTR#%N*a_AvrR&$1M&A*e?eyp~z+UAB!Unk^QY)EWS;Jx|@277Pk&vWsDBpWi0UD`C zYB_(Pt>qGzhziOE)_uC`qbKdF4vTxmqz~<@o-*U3znSr+fs|+q^P|(pfs+%n15QKF z#7Qxzij%uIsWdo_-#d{JbJx-Z(R z-%PMahy|sAt*1W>5y!^;lKgpSEEf2kKR06d znfN>1gdQ?7`0uix+6_3%k>vqxqC~uec%GxLC_brBUC8}i;0U@8X6QicyoNkS``iGD z>Fu7>UEJ>GOlZ%K^2MuD-|0ex%!7S3jIH|ZO0Q`X;LRRQW3zP^db>G(E4DpIheKC@ z@)XAoy?{LBoCyRkAZEJRN{dC+HsBdWe(RCl4 zW2M)pC3&v+9le}ynC?5YCk9=vRc=kJ7T~_)cSvu{X7h9R9n9M_4|K=J{XhH;n5+XXXgA3(i+i`T7ARunvhOf;9xahv{T>EHXV7Rl)tM0T+)u`K{h+l zsjXe!-a20#7=0X>4!d`(M&ht(3#sryA6Ww?2&T^PE!fm{ptm6BaQAOVCq>`x6H9%T+hj4fZK-mpaVp8{#>FhN{nw33R2+8^c34o9XmLu-j+^rMJ=EX;OwuWnj+&SK*J_(r0XuUOdi5r44QQl=3nR)yRra%r>nkp1{41O;C%@=}W zeq{Jg4#ym0__MKx4l;ay=lM1K{CbRkFT;0n#+SRkAI5)%;Ria;ZzXuUEj&L(!9$P{ z1rP^eF(!)I0Z;0H3X~!}iCpfQMkN-(kO^g1R$5w>R5I@q8-apA4cx zR(#<+-^=jRL6LdE#(4g@^ZXtA2;l#UYPO*a|1k~}8ox2aH^4ak`1wHxekebOCk@gJ zx4nvgI`D<|VTC7ZAwz^GKFv%(b7Wehg8isD!r~F#KWu%oU9M`2+r( zh

S&jh1n&JeeMOkU4{=4@cYr6k z(ax=L|s)4kSST31>;ptckO_^V^T9xBEDa;HjTYh`K}oyXu_h<1|<>J?*}M z_pN2n9G6MC$WAI?$U?FnI``L8W)^o)BV!P5DDM^Dp<4A|-bV83(N%hr%JcH;Mvmvf zabV=(>{)?-Z6BOhoQIA_ZSX+;dyKbxbtFaJ@`~Go>%13f#KV-=OQ~q$bHK<788q}q z!3^#&4)djubc-6QXmCR$?(J^ht_2LE4uQDY_m6s@>c_n*5h&F(Lr&e$^R`oV+yy%Jm$1XX53TRMp- zTrza)av|@CQ{}l#7(OW#Vfbhu`!gXB&m>3y305S)ZQ{N3ES_8=bjPfxUdXZH$+=XJ znuGretdmtM`KG~}D_7Zz#W1=F5j{YnX5e&fuoLxoru79!-6YqGjZC#(KD2lj(0T>< z7E^c>)5x#ry~UT%Y3P!fAgSc!zFxHNxAe-i zd6O4EEtZ$goR=Xcu74xL9xPTh7L7l@Wz8S64(@z#+}ke>e{uilY2e1{a?iA{3XY(v zaT8V~dvssmY-72Hr~+H=p<-k>>KbgdhjhNA+}(gSc0AY~E6v>(a1wqdzTxNbfcu$rJo9co2}{f}e$KawlH4&%uLYUbl(0V=f z@6dW%%1)O&4%M#1$BYxNyI95W*qv^zGoF`VztTAToaMNLA1+4lbEX{2JDeCajdfwl`a34YDYx57q$G&3m&&*5FJa`QdcW z!F&sx@f-8=+4c=A_4xeY@xM0W08bJ0!K`u2V|b3BgKv#sk})5;*R`m2z$g4K31=L7 zO2NXX&$sCRKWS8oj$*6X&3xbdK9Cvc8W>}?c*23e^;wbt+BN>B-BZ`C1Fy+w%S(hvucm84bN}l zwTV?pSxM89wk91ZMWmB}6r~Cxy(ghd5s}`J zB1pHOVn750q$*87K=dsj5}JUtP(lfWl0blvKmy6V7jlz((@-hd?{oGh5ES3<_viii z{qf7@wX-`rJ3Djc%$YN1_UzSO9RK36Qa+^`mzq=ReCZOUyOds7`iIiVWonffQs!{k zSId4=_V;on%N;7`EdO-{y@Fqb+7%oX)ru`EeqQmie>eXL{=fO#0;~aT0;U8U2{;>& z9AH!`UFqXW36(2UUR~Ms(%>roRTfvdQ}wN?d#cr}How}B)$UhsUH!A_7hkUV@{U(J zzw+rTNi{mx*juwy&9OC4*37Qex7OTRKiB%LmaVq6cFWpRYyVa|wN8yX?dy!F^In|| zb)xE|)h$r>jk?n_ll){nqvOzE=CSA+KF-(5At64eq`E z?(3^w&uloX;hsi*jm9=Q(b&6j$Hu#x6m9Zh6L-_VrU#n6*lcO@0?ns4k8a`DVswjR zEnjW1SoVK3^`Ubum7#Gwt=;NTM;Nak+!4KN? zXt%oE?e;C(zt?_shpHX=cUaruMo0gSA9lRascNTHo&7t1(mA`!2VK(M81P1TSEcLl zuHoHkciY+hjqa{0_*Mwg8-mdfZ)VFW@&ashY zM}~}CFmmOnVx#7bT0Uyys9#3?Hu|N}UyM!|?HZ$v@f}lk%;+(Z@4of!sj;t*T|M^E zxOd0Jyf^s0JL6l8UpC%0q0xlx6Anx`HR1lh*>WaYC;Ck+GqK*pUK6KHTsv{!#1j)Q zPV%1AZqnvSk&~`Zx;weh58-`pf$*-|zH6jSm)mSop)AKl1x% z{zu8vx__*F%-hPJ4E*HSCvl(L|J3i(S3X_y>G4k=O)oXQ%k+fLDt)&7Gj~XlkeVSQ zLMDZ*2{{sScE)Qn=FBWH^YfYaKR^EY@h{&0;`Z!%vj@)pX^wTyr*nRp`@-Ch=7xRQ z;>)F9o|so`UZ;5<&pR={-2Be-=gr@HF&%*nQ zdM#SGSY14F@yW&RB~_O!ToSgl$kJ9zLzdc?wOqF0Yw`7@uN~hE{AR~D>B~DV-?*aW ziX|%|SC(J-&dQ%x+E=~2YV@jIs|&9#y?W&8tKYu$?Y3{-Yu;aTZEfSVE7sLq*L>Z| zb;;k=|8DAcw)Gv?hi(YiuxDe_jZxnReZS}XKQ_I!Y5S(QANu^T?}rONHu!PczpDJ} z%YUW*)cmK$<*y7`UG-)||frQ??2TXt=&zIDRZeLt7_dG^n{wyE2OZhL>*ylrc? zg>E~x?fSOVUpoA9V7vGBQQI$Ve-zp+bYrM(NAn%ScU;+#vUB{-sGT{xe0TZps=q66 z*IT46}wODsj}z8J=cHj`Rls9`rdc;M(k_5 zuhTx;{`&jB+wb~q)Ne5dsydYke0t>TBU_Gy9l3ra z<48_efv|wEMqzJ+jSTxVY+2Zru;XFZ!`w%`j+Q@K|7hol_b0KH&J2f~o9m!JIU(hfZBSm2&F$)7t4$r@Nf)b9%(-Pfjm8{oU!Ur;nY!b~^Qpex}TsSI@LRGvv&) zGfU3=bY}ONurpWBq(9cjt z2A}PJcJkSIXTLvt@a&bdY3Dkh+jTDDT;jPuB8x;;i)<13R^-^o&m&hyhDM%>ycwB& zzR>wP=ifd*=KPHFE6@LO{>1tC^Y<^P7m8e{a-rFU9v8-32)VHQ!nO-1FT`KSx>)dH zrHhR(_P99uV#vki7q?$Lc`@!{=B2kUy?1HWrEf3oymb1~%}ZHP1*6`NS`f7{YJb$l zsJN)i=mOCJ(T$?pM)!%H5dBH?(&%;3yP}Uo$3)+b{`0cmkE`OU_f@~ErLOv4t$wxc)ecvOUj69m;;a9I3=!BT%EXvaV_II#SM)c6E`DnW!$E?(701^H{-J73&od+FBjh+{*Cy) z@$bY>ik}`oFMfIay7(XCe~Ax|kBd)=e|WRl%^EiYZ+5=f^X9uZXWsnw=Jz)b-n?=% z?UvWAinm_B)%n(lThni?xD|S9->tA)*KcJc6ild`&?KQ-!svt<39A!!C7er0N>man zBvwtVm)Iilt;Dg3UnH(g{5A1>qCL^LUFvq-+nsKYxIO*$irb;L&)mL!`{A7ucWT{f ze`na8@pnGHv-Hm9J7ITX?zob?lKhhzC3R04oisCPb<&=s3rV)5$H_j)FC+&f*Gdjd z?w33zc|r0I$%m3-lGAKnwu-hUww|_kZ8L4FY@xPOwwtzWdqMk)_R97;_NMk8_R;nb z`wIIn_HcWgJ=0OZ;qR#FXygcVbaRY!Om}?a*y;##oO4`rBstuUM|ZvNmbmMGxBA_= zcOCa?+nt0C~aWcn6&rPW~MDnTb;HwZExD~vtG8>KYl-V8*KSvs>zwPFE6L?{d%G*T8@OA$ySV$fN4O`tKXrfU{>Ht* zz0JMf9qzv5j&~=!Q{CC_$LW^z!s#!hmrt*fUMsypdh_(4^e*Y|q)$&@p1v)8Px_(s znDm?J*%<{hif5F~sGLzFqfJJ~j2;<%Glpi2$(WQeEn{Y8;mp37`!kPbMrI~uKFCtD zie#0^s+v_Vt3_7FtX^3|v);>^mNh$TY1X=|Em^;!9ZC&Rtz2yL&7ax~@@;y53S-_d&5V|Fi{SxaWv09(sV6?Rs^Qn5(CX5A^BcJxiPz zt^X-TC?mv3^`Q7dJuRwglSDcFOMW?JsVJ{D5LMK&VzJg)6eIm)umubN13?3@8q5M+ zzyvT{af{i?4)L;HRJ7NJiWT%H7ezU*(xN%{ zG}g}xUp+$fQY(t3T6mNlw>N8reFMO30u?!o0U&=Sa+ClWwmWgHB zQSpxcp7_k#k@ziQq54?N)Puzk)g{`17V1p#u|8L<*6xb7YDwOpeki68KTZ1{go$@( z&$p}%L^o?=F`lvndQCGj^gz)^(hqy_kNmdFeCtv1h8`-W>q|sC^6ICr5#Q*i#30LR zQC@irP^<`1XGGA;Y%@VH-Vw6{)7@|B9>n(Gj*?uF*GF^<&mr|d_&?8gK zRW)O`-cStHPKeFgSK=#et5~Rw5i=xB)otP<^%YT1J1IU^ABll#Uw$)8Jo=XqkH-_T z%Jg_3VKu@=YGHVwpqR+4co}8cVc8@qLc@`k7NQ?91b91vYBUy8n#9%7>Y zg&58n&3auGwJGmly_*=OSD~#byS`9NQ&))N(6x%@q|7JaO&Q;XW0|(5=&lbCi!9%Z zx1qrx>k2VV-ylBH+ZeId_M)T35U*HM& z{~^$Et=12^Ulz5k{^AAQPxRK;i8Xo;uwK-(xWroQ9&2Qrc1={bM4I(oqxTUbtsjat z)^*h9cTvImI_Y|YE@HTK4&iW7M;~Q4I3H;}BdS@G$m2`nvCPNIoBL{r8G0`x&Z`Dx z`9rMIqeON1&DX1-IAbj+4q7jXWtK#-+(#F`dR>AyDvNH~4bfS)hd!USFCa?lny77_ z!%JQYdA=m3=jCmwC#LG{DC=V}h4vi@Cep_5fK5U?!1Dc|rqxI6pii%n<-pqg6S8N4 zK1^)X7E`AiT(iJ`8%0^`5b*--vs2$E*6>#9_sE*n)&(Na@;U9n+KzsmI$aPy(GE+z zUxnUTVu>XU-iQ|CpmSrq+g|pnDPFVmho26L<@$1XYCPc>@j2m_);gl4e#p3K=G|AU zw&XxN7qs6=8Q>S%Z?o>?8C{V}EyP;c5A>hJC@`9}o+j{nQ%h~0(+d8&FW$C(E&4$3 z;nrT_bxRHLD&Jn}q4QhIUPncsb*gCTg&eeQ5Q|CszFuC8uxt~RE&d`H-dN7>EY;C7 zL`}_KjMqyD;r%jp)!b0s&u~xxdETG6&p58j@kStp{UwjYS6@SJ14}D-v?cQU2T@pA zB?g1K$_nukVORCC7^U72&D5jfCFEGJzDT^S?iBOrzh7JW^851BMGvnnB2axtj0Js= zF-?&*uWN(pAILH5TcVnWI2a6aV7DD6gB8dL*lO8Twl76UK$OzSE(pTkJ zQdG3i>xeIOZ&5?-EcU32#CxiSEYC!SzsGaui)Pl#9_VR=-wNIjfPVEwKko*jKjoKl zbQvCXg*JtTh`@{XyaXpytXp#%CQPw0sTNP&;)b^ z9kk_Q5#!=I%|_ZLqJn;047YSfcD_X4E-sc@niKkprFsb0sV}m89`e07AA;{k!m5ZPAfueXcA)Zf>Fs`^8MrjZ%Li%pCI)tS)Y7R9vCOlGnNxx00-bFkA5X}to%T?lKNF1 z&}T|Ym^viS`&%gM#q*?Y`P>kF(^8r?l698l`46GgNo70yV+f6;&IrBsQjfKS=LmNa zItinoXDKD!cu2Szono&Ri-$G3t<;myRX0l=lYY8ENk`8?*Cl*ghUy%~LK#Lf&dFy$ zpTCCs7~`O97fG+bhK!{@(Z3ethHvJE!%UqSz4bqaCJ&+8<`2V#k8F3@7XNMNg-$N@ zV#y=2-ArCVH<$Xc)Y0>YQcsn7saY4PlZF_FO-LC_-~UG-AEX?~55-{|M&2$YJOy@{ za#!}7ze39J{E#v}|Ffj*$q(dF{^#T!OPT%*WZ!fc-+(P3g0fh_T#a$S91nJjne=IM ze2}48J~=i>^BRHmo2-EhVW{0%h!g@ zQqL%1d56Bg!uZy_j&5T1586rc9z48P^6X#dW0FT?yK=2Dd0jEvSnA?Z|3+sr^=@=) zb50@qww${#U!v__=2^*7?>759p{aMvK8emRc~m~r%u~(>85`vDrEdMCd@>#BDc_D~ z%EkP^oGVH>K>1|ZO!*}Hyws^>IdbQKrc9wMQXXhh|E7P?55JOf$(#qGf6Fv^y!_WN zkGIYFvz%8~V17;imU@;flbk>1$*H`unzDy-m~$8A8y+3nc<9|n^z-RJpO$l5^#43L zX3~^-jiiSca}7B^Fxx}UrSr+bygrmC*G&DC`IwxO<=yurZ)hfIA!%oZd9ox=ug?pa zr^va4q`N75{~9vCG3Ox6Ys|R^bLzbMK0h=fo*(}ldH$FB<=vN8zkd#S3!u62+do~G zv}64FmhgMp`isBH&wS_Y^u@-BeCN-~+r0Vje+vI9Hy0yEnJ?$foBt`yg%cEEp?AEKX=Ti zq5qf@Bke!3|C#n5$uH6lB*WbCRQ7W@Hs*!7W8>^6{VDI+QZFgw$&2U6F>mIRu`PG( zlFvndE+=J9UYOU1^Tr^{$((wo+^=K|vs5%f)R&A|dN{Idq)}FXWTctb(A}iGH)W_P z-_hsM?WLaoL~cv{U&`d%xq>q#GB+>_6y zUwk1al3rtTc;UjQ!-KLp9ewJ0rj~LZ<@sYZThnJNi-m=i?bR06#+8bmP`f1Fk>*DU z&tC$rRo#StKhFGROuMdK+lf}JqVsjY93kHETCH+H`A%Nbn^;0Q@t3|#@|hMtp8`6W zsj6nRcv)3V(=D9nyvLgRr>Y$2-d6rB7OPdYSosEc-kCRXs;=s~W~OFOa@AX&Xl$&f zbBV0U!)j5j^4#J@Vol>B&(w7~v2;xhKM#$9sFpf8gy^ zz{jGSk1)?T@;uv1#>q$dm`7eb%*X1(O(sobk>nvy2Hqqgt&B4deZ0(@C=Xwv^Rf77 zl2WRoKzl2{$t_y|qF6NDs`}6hw1byd0WU9EI_}YZWOd9Z@pRcFB-Nl4CAWGBA8zq9 zfq8|Utd#l3y^z`}OAYa5>21Cq} za+7W2xvH6jmicp!>O~tp`IA+XT=V3Q`zRA_tI8+IJIz0NT_(*tmlyN>nYlbU*K?ak zmdD)kC4LfTW+(4cd7h+|>l&y+>VQe}e@s<;(HEvq(E`=|%iQ|cKt zUd_}LdXJCht5w$KX$!Qa+6FCD+pnGBU8E>2Mz5mR(S!7^`cQqm{*gXg|4!em@8BB& z=k=?4qMprnq5Ukyc@eHi5KK=#$i~7IdU%|hUe>MNQ{`LKv_&4(p z^q=DYng1sLE&f0I@8JEQLjjh6;sGTCDg{&xs2wn{l3K}H$+uFmN@Xfls8pj;`$~f= zji?flrrtmPP|q<8`Fn-(PuAL>T5q7%zlbn#N?a6nz6fY2eo8T=s#0BfRq3e=Rz~I4 zdcCrpT0c-r<<|NnwZ2BJQI>LRJ&Rf|)Rt-AYdf^xvl^otB;}fMOgU#9Wym}N@Cr~^ zHB_(~DLcUy@Uv0U@G>kpf8?YnX_8nuBXRSeJe`adhI4JDU^x+}V)WEioXGcO4TlKx%?5Avs}P!jgnV3A1i}o$ygY zms?wI{(NiQt+lt7-&%HS&aHQE^}N;nMr!fB1%9_4P>(C8On#Szd-|8eAvIF)yHo1r zgX{Z?9Xe+M8 znR<-$;D7m3KT|{08R|^+b9ENh&5zYZ>SA??x>Q}Jel4(Ds^6%~)fMVWb(NSUzED@I z->PfWwdy+cJ26|#QMagD)t}XE>M!bcF;{%4hN?T%o$4+X%Y!(^Qx07K_AUHC@e6Gu13LTfMLTE|!R;>L2Q#>I3zm`bd4u zH^IKv3TcHkKdq=%Oe?N_s!mtGP}gfUw3=Eit+rM`E2tIGeCe6nwb>$6o1@Le&b@S zv>&t|ML1U4ll1k|yk{OE&Wdx|zeJ=suO(^QM3nZ6h}O0vwRUJb#bt3t#Av&cRLUU{B`Um=lT85U1^qr%BrgT=i=%4D-wJa@LyU&+wx+>iizF>^B z@2R|njGm_SQr=d2>k3i~l~t+$x>f0;^wmGsKhewT<&=I(e`SC&5UKX1GDt74&r`-L z6OeH<^Zt)BI>{E;{LT8odHT!^bZczss#7wUAd3{JpXwehR^d4_8&E5c&J{*47o63UY|Ptq2kTn zquKZD9T3{8PuVAjBl`4dQY%!K*>DG`=k+0jF}WEKno)Hg*9x`N?BpM+RqfWhd+*SY zz_OvO0{fH=2=EUI4eQoBG%T=eK%YLfLak5ANLfD}UCL8hubQFOS89cNdvbWQcWA4! zp`y>cc{0B@dshw!4VgEu>^y3acOLfa`95Boe;bYpTJt7J~cbN**l0L22jKTHAAZh)e0?GGxTMGLN#|(Q|9{5d$afcRzerU_IZoB zJ$vsL)iuY2K4n8IQ&|7G`~09=Zk()T;hLeX=I-+seS7bCSp=5d&#MWJK;jG42*rnZ zr{bs9P<9qo*zYacq;8e6e!`n2V5NQ2+5sgv+^hC(->ABIr1fa~T9uMAP9NB%S&h>2 z$ntv4N~L|}k#*vL;Cf}{k=K$B`ZuX8kG!Xk?AEx9Jo4!o^mTRnT#u3!mp5zRUQ>-Rn`22+lh2TSLP{ZH`(KGnTN≤FHXCuU9Qce=@vmZ{%9^5??OaVQJ8!4u8-w;yt zGSf}w{_*50pIMtH))x&$Q}axgpy4z3*ZNBi`SbQCA6bGSU<$bp;prp9IP*#G5;tDF z!#;m)YCm)K?3H1p9|vVdnN*ofj-$y*meTV~S;HwjPnNth*M{+|_sp6PC(Trj<o-- z)0y^aD}vzUH`&)RDUAL@>zjYZ7)trPKdc188#9mmW|KTF;z^`d=eQ4^87+tnf_oLX z<|Fnq*c5&#`g8WP*%a|5n}RN~oc&6Al_EB=Dg5gAHlDtNO@V6KmJIQvlwAHGw@pm{3FdkUYpWt5%3{x`IDO*x<(6{-@Y*oe8O++&}qWU|jz zve`dUa)g5K2xZ3dM444D)r-B4T9AEFwJ7_NYAN<*_=OQgtxh{2=T%Cs)>3P+udCK) zKS&+Mei~Y=q9$weg`$0hE~RKcqAMxrNr%`UVa-y}Zfej_OVI4>?`kRR)3glsSy&$x z{WI3P6{)GP|3arV(H_W6|3<&aK1xq#pQ%4!|A@DM6pS~8p}FJ}f&3AoCdWl+>7T@A zwk>R1*?ty3OPVuptHyN;e6tB&*d}K;oD06T`4ar82L^ZUDQXYJl?&T)Ka%M?X-b3(XHe;A)T=VtjsV@my z)bBo{u28Z{M754+9r1#AVQ8QlxU==e;ERFNUl!E;mZ43s+y|Abzq-vP&Nej&e7(TyYhKT3nAB!knQ?CV-d|IoHlsKZz>ejd0v*qZnEWM zyaz3~cR?P4LdUyH32NW^V$j0Y zw*v=LU-&9`TANnTX*HBtDK}YWPb-l_3zJSzMz(QlN)S9PuU*Y6l9F7Nn`~`K1(|1? zU~ID#Z&926wjl4UKqF_m+F%8(}EUCt_*4c-$w)_(JU|vpkgE*lb=qV=OX5Jof+kC-2Mq zli!8*{23oUEit(~iO;+K--pJ^|D4uC4U&eovo00jvRPvW`{?ni5PR|qbKld;9`93{2zA>&oJ$v5K zv%HqS)t{I8?{WOHZlZA)t~UADup4~!;lKSe!f5O7^HLi}pyoC9*Nu(FUgNCcG}gn% z#f+`SN#i!{j$g3+<4N>3NeiPEa0$uh=W?&De?E0RSD&{x?ipAVUy5~^x4l(`QOAEDYB=rPnEQV-g%NMm(u@D9-cICf!)l>lyh)F;(xf8 z7BpoPvj-yzT>5wE`JL4N;mVUf8ROLL|fvTu?iXDH`|WbFU>@Bbi2^K_s5&v+`kjNS5gSD*JMf8m(A-8EKA zt;YC4_Cxcpm^m&YTmSw?{~ni@pYbJknPWHU6fA_^%(hF27nno8$oDNvi!yxSxtu7E zW?WTN6V>^8&MVlIYhp2~jTK4S;p$c$FXt+G-10ZYfntV-ouOJjX%wmL_h ztA44@Q|GG-)UVWq`PbH-uB)lPs{7R6)Pw3_HB3FGo={J!r_~7coO)ips7CRnju=*s zW7RnIrkbGMW-Tq5b+o&zprx{Umd>hKHfv=MSSQP2b(!~yc%{Re6|q9Bi21QvRzfSO zmD0*+<+KW#zg9_mNvon&(_Uuntd=(0vr@(i{CsmYdXZh4!|X?Ol-eejQDr5!(y2a!Yt5NCd+j9YjYgE}hx@ zMOU@}(VefKRKjlA3$E$SRteibvQ*b1?Rd!NzA1 z0RM~S5w4Y_%&7BgJ)eL1-^YOoT{N3;!jMY$N~8-m3Rox z77>r&TR-s_-Yq6_;8}lRz_(Qt0pC_rba?k=#iG^G8nN=(R%@sDXN^9e^@k#~lL+wK)04|@eRN`HWwMuo?ch@V8v`yM3r7^b1AC)HB7Hx~tRD-vb zW^yf8X#sEVS6ac*hm>~MC{HLIwUgQ@r5o?2oKt%6c1pC;3+}$6^oGN)Dt+McIHj+4 zOG{D)vO;T9hRan~Wdxk>Rz|Xd`cN6AL-kPQI`5S1 zRATkr`fth&{h)qOxvd}3k0^KaWBN%YNk6ThR_^K%`dQ^3Z=PIGoLE|;lvLh9xu&G) z@p`=C(jB@(aqIW=drG>_rex^ZdN!8if}#;m?+V@5cl%2}}XA z!7bGDG zSPRyH@4$Mn!AMazg73j5@B`Qieg@mXFJL^2;{TkTMPCI7wTvyX5; z;ctWo2oG`35pWb72jSopfDYl404*nqGh&u9$zoqK#0QuLOfHE0WhKqnx7 zpR_CJ4&Zqmp4Z<7eLz1j01N^{z%b*UJ_3vcqrtmi92gHKQm4saD)@jtw~8{rkNO(! zTMssoW-}o?$NTdo^y8!p2WO2iJqp~z_D~<0aYZ!2=Ghb-v^jHuD9&SyD@tkJKx<4G z1X4s1#<(JkXGPRiMm#d*3NqyiGUbZq&sUx+5%&_PCyHpK(Ov@$!0VtPXau@|H$Yb~ zpR}vNT5yy+qRHb5xC*Wlch5-DGK_eA0+UL}!ed5~egd523z%m`5j_gr zW3E@8+JsV@RBCdOnuJo5P->D&O+u+jD5bwh>G>oeCT7$MA2=oE{zyL4=tO1+BR_fgfDmkH& z6Dm2Ok`pR9p^y^_IiZjf3OS*W6AC$@kP`|yp^y^_IiZjf3OS*W6KXi2h7)Qyp@tJ` zIH86UYB-^W6KXi2h7)Qyp@tJ`IH86UYB-^W6KXi2h7)Qyp@tJ`IH86UYB-^Q6ACz? zfD;Nhp@0(#IH7penziS8uoKV0C1)Na8 z$vFNdH7LcCIb&>bGPXDwTbzt7PW4al5Ip9|)CxK5MBX})kxt~J6M5uB9yyUmPUI24 zMhBoE^2mt{aw12Zk}r)}jNr2vC1){8&SI3D#V9$;5vR?>o?FV>O);1IZPWHWMSDzl6X8H|vbjF6d(PMM4_nT#Bnj2fAY8kyP+BSVV=w?GQ#nMNj~KqjL=CZj+mQZo~& znTgcQL?UJ)5i^Pj? z=m2Je#b60o3dmFa8XN?NK^Qm&E&^(xVlPr-z#rfNctj0qfq7sBfHE5QXsN(W=?j9w zz!!7`oxvNR8|VRgf?l9EfVz5rFc1s|-+_J9trfMmQF|M;w^4f=wYO1w8@0Dldz;v2 z9D@RWP{0NSY*4@k1#D2j1_f+Tzy<|uP{0NSY*4@k1#D2j1_f+Tzy<|uP{0NSY*4@k z1#D2j1_f+Tzy<|uP{0NSY*4@k1#D2j1_f+Tzy<|uP{0NSY*4@k1#D2j1_f+Tzy<|u zP{0NSY*4@k1#D2j1_f+Tzy<|uP{0NSY*4@k1#D2j1_f+rq*WLJn_-`BZbYMFCZS^{ zi4(?c5o<)DMYNkTtKLN`f5FG)ftNkShbm)G(47Mv7DPr@}@g{kj7Ex&r;W0{yxIHH@T&k<>7f8b(sXNNN~K4I`;tB(;mA zc9GO7k{U%)i%3czNvR_#btEN@q{NYwIFb@aQsPKT97%~IDRCqvj-Ss)wyPI~&Lpl^zYAcuZ0fM&!g7Q#|S3{oqRQ6`R2CQd=7D6fNNpap0{ z+?#}Nfwuwukx?j)Q7Dd4D2`Dmj!`I%Q7Dd4D2`DmPThhW*lNTe+hdUJG0659WP6Mn zYQ!^dh-cmq&%7ZXshEgVOk~uGW7LXc)QV%&ieuD@W7LXc)QV%&ic=%O1#k&OgDc=F z$ly8DQ@sy<2Y>R6hv2ah!~7tg`9VDMgLvi#@yrk6nIFU>ffE_+;P1&JKnh_@4_aPH(d9b|HB7;%i8 zak>hS!+HTw$cSMSjnn-IOBjhr`9!@GD95$(#5V+uK~vBiv;?g|TMz^~abFkE6?6yi z7UN$G<6jJ;ZXBa-9HVX=V_^&ho4CH2y1@q58>(uKpLpD_N#GXBNF@oqTY4ad9TcsCsGhQnQOcpMxa2Zy`i za5o(8g2UZ#cpO~qr04j;(f8nJHyrJTqffxmE;!l_$GYK8H(VJ9SGwU!H(cn3>)ddi z8?JLR#>O(n#xlmnGRDR-#>T>lZaC2mC%WN8H=O8(<6Ll@3yyQaaV|J64vve1G$BxG4^9a=}S1ILQt7xZ$2SxW^6G#KARj+6^O?F+Y|uKNjwB!#Qzq4sX(fOpXma z#}%N0BA^7VP!f~^xGyVe1HiW& zA0j-4+>#?Mx*DzNPiv;rg5k8(dRl5dEw!GOT2D)@r&ZEvm2_GqomNSwRl;eNaHyLO zbvy6I3i9qNWd*>ory4rSA!Y&sMThnnF~GaPEJhnnl5=6Wa@4h6%ZSU405 zhhpK>KAqa9Q~PvkpHA)5seL-NPp9_b)IOZrhg0)(Y8+0D!1?_X)*d;d_9HBTM?y(Z3j8-S>`vU%1#|`70er?Nc#l!=9{l`t_C1JmIRl?a zzLUXJAZO$2iO-#p>&J-;2WR;r#RV)R7x_v}6vsCS6Tv->(}~Xl+1PFVBz(Z^O7;Ty zOh8Y5hc8v6nR?(BT6_~cMc$AW`y+3GIJ=lKB$=$6S`8!sXv93E z33_NVN+RbY-C4En0eS*vFqGECSYe~2Qm?%vqDUVDc(S+&B@)poY~1hUS{iBHgy|p? zWPxmOpEdbE2p@olAji0b9(xHL_LAabBx5tX%y?r{{Ea*4u$P$sNNTvyWiMe%i$Zpt zP?~Yv0<`5?AlHJ3Z-?&N0lZ24Ti|WbA3$B&!leu)-(g6S;Yjkaq#tLTVN8o)Op9Pl zi(pKPU`&fpL2W~k^+GDgzIB}=I8R8?*D$a3y2{8NAIY-MR%r;xt#T+dQesZCcUP33mq!))HOE9y1fv_aui-e^JOQY|VLM518wHWa^HW%z4{N((VC!!4a+<z|>8Z_$#ew4#evWaWdeXEh-%h_JJ9*KCDxd>w8#dfXnUP{cS4MGr&K z!&+s|e*{N4K0_DK!EkIDX=PU&s%m;)Yjz*Dk+ zr^8cr_$d{BvcXRm;T=1*u~8cvwXwk;c6h@MZ`k1tJ9F<0c*71)q)|f~JR#*mI&E*K zjqS9rowk*7BAvFCvLc-}O`|>2Xv;L((Lo#9X+Jw{XQ$RS+RZ_m*{QvacCyn(c069H zputt6b~QkK&;T?t5}-%|)VL)+=X@Dh33ifp7vXNgD6V5&W$oJ!N+s~7L0 zZB}PZoyHv;HU&s5=xv> z0blxnFMYrl3fkaxUwGXYUibZrWRjzn4c_&IqBi)}7pmIuj4uLJ8MV;yxYr}4h`UbQ zZO-ozW&rdjc-9x5_0=_Wsp9BeFMt<8X}-e8$~S!L3*Y*}x4!VLFMR6@-}=J0zVNLt zeCrF}`og!q@U1VcBS$7VD#;N^O15Nu8u?(yg%5pmC7c|EY`hWH2-{gRsNNhKV-&}i zIgTN`ZrVVe3uMmli<5#Wdhnec1x&r z2j05_@7;m-lAxBfqe>fU61GcTDE=7q_ zplK#FO`$Y$^yZxf*6ml(&sf_g*A#M1fhL*IBomrsLX%ABA^TwpbjXAbvae-Q=S=FH zNu5)ub0$wn;eD&_jL$tlPtYHm(NRJtkh9Y?&fOfR1IBxi1+u|?uKhvy06YXa=oY+} zkJjiy*T_Q8NJm?AF><@n6kUwmF0@1!TB3{5+l^M}LMwEk6}r$0U5wstMr;?Fo(m1n zg@)%s!*ikGxzOBPXlgDrH5a3^8!gR+mgYhmbD@p77@ggW%x<(UmtFvr1D}AE0DT6n z$t5a_rz=a@Xi3>=dmp;GGDqKj6IxX4xssvQwC4r!bRtGm}nX7M;Q@+RZFlzDMFVErZeu zm}ddZZ~gPU8{%e`obq2;{{H>@8c(f)p7%A}%!b{}hEw!Biy!adFxyRGwwuCCH--0G zt;|#l0_=*kaws;jP;6qMyjNKm8xJ4LV0Ggy!lyQ#P_c^h1HfbB31!`8E3F@ftt=E9 z?-ly&7G%?Q@jGuhXh1i%E9OcB_MUR^Sq0+!vFB7GYy-M;eK6q=FpM-Kz`GnzAbuA1 zo-e>0Fc*9Y{=c>Zp*g7E0;r{;J*c!KZ%w(?^UX>0`FgH7NE@VwQJJR8wo?%5CM z5$Yk*@@57$z)HqlMNg!zkkjbcS(8XEU#C@{(FaHhE=}S2lTNlb6&KvdJr(yt1*_ zc;1`$7u|U44G&R&0o%c!;30TyglHCHvE~JQ zKtWI#Y$AROI0+(+#q{J5dT|K7ECj1=2v*$?thymsbwl*Gz}sLDSP!-de|Jc9-1J$t{`OlF2QZ+>*&HncVJ@TQXnJ z@RTb}K9>(=#PEIo8}N=IDTx09Z-I!`@YE1A%Pwe9FY?~XW!6O8tfr;1zEo61FlUx) zqLJiwp7LF1j(CsR$$jH0Eqj%gjb`2#!`y(izE3WA<}o%C1?#34_LXv2uWB3dN?rOmwjZSxVAYRj#43Y~ zL&|8QbDikD@+Bgkcn?PF~ch2)&^wA zw<45s4E5_k{R&aimP(hLbm$nS49aoQV_v1l45GdTsIP^R*M+86@Fpq1j7PrnSj4zs zzW3OXRjyvF1@tDA?>mlz!k_Sl$PCs?XY)P8FVTGV@qXt)u6y3^IEnX2VOsgFI1cqs zVmUiy{6-D$(%NTf?O0m-l=2m=d&4+O3!bF~BjJzAP`5V{s2{et)zEt%e6b&jdU)kD z{85}12!{p{)a^T3pgVQ@n7T#Ke(}^TgVvZzYedi*U8&OoC5HEDlHpG;?s7w^bf}cc z-RV#u6AIiVRWx6}>B#f?G4qXqKYb{xTo>HOJ@>fBZO){c&}XEroHarZUnu;Ni?lt! zZT+CbK1vpWC*eIjzwaCSxo;<}62-lzdERxHl5f^WZrfBep304RR`|xNFSEgkfDr1!{K7)=q9&@nWgf^=cfp5^BrxG zEl%)W|0$?=-S~<6M8M&9o>}z_rDUE}Pbh$OrZ+xi{owxkLZ`)h8G9(tDKRc*A1z*; zT)WWX3u*BU)b~I2&n0B}a zC2qmJ3A9%N(u&!i&}r#Y9A$7<26yqbA7c2Dk6h)YO;kg*!5HV+DvSPn9qJ|12dtXu16CdH zuifNyh^>~8Uwf);e&?wU7QHKYh4C|>@Ix$HeG!L6?=}3sZ1@^F*;??8#xzzGerIci z@7E(}lp_q$Mt+HrUm%y>U%{sLS3A@DtGy!qzB-wHUvHRxU)@c=uO6n~*PEu_7hYfZ zec|G#zKzpr_GA#I_uNDMRmzDAfHU+cCYv5#Q%#Sr5AgWd3}eM*9q~1>CN@4^f44+rZ?Ad)0^vr z>CF{xdUKt`=P+5EGJU#En?7AC<)2^y!K;eY#>zpROCGPgk7j(-m*} zblo(4x^Cf*a0wdKEN%y$YGWUd2pbuS%w`S7p=J>m}3ItBUFCRaJg% z7=JM7c%)4waw^zRBX{kwuq|E`Xve^)otzpFd`T@RHvP0y}D zrf1g>)3a-Y>De{X^z0gCdUlOBJ-b$#o?WX<&#r&zL-nD`Px^3uxU$*w^4em0d4-x@ zUOP-LuU)2>*KX6x>sQmuYp?0$wa@hOI%Ilz9X7qZ!b~r(qo$YF3De6f-1PD~ZF+f~ zF}=JZOfN51@bL1A(vRZh6^)nISvyGK) zh0Il^nEkV~**{C0{WHMqp8@pGD)iN=Y~}F4tj4%iolT*yy^Q>=%~k+Cp$>P}Wh+Jh z;#*<#uh;PGlwO`iun0CLO%t~Ac!f2^7q}Ul^bSMQr}wo$N(Zr7@dFJ;+O%VP(d_@; zX8+ge|2>cqZ?b9lYW5_J^a_2!lm$M>g8q#01K50!3Ii#<^bPeg<${7-*um^zC!31b z@Gi=>n@u$(g^wvIbW>7znUccalo7>{5I3oJ5}QAA!p2@oiZZ68zz>Yg%ajxqkrX+i zBGSSTUZ%XLXv&Ld`U}~azsIn^yjVttQ{0n2I5Ov!t^CAt;|+tivZK3 zw6ro0DHC8ynE+GDR5qndVWdogcu7fA5=8}MO$wuoQ*ok;^0p1f($lnpDR(NEa;LB< zcYIB`Q{0q0^80Te^ZhsJX<7ui^Mz3HiJmQr;}<RBz3?s-o10swpN)$y9 zkSHoB5*=2DRX~HXh=_oK#)rN<#034gA`*j3UU-`5%rN8oJf8}P3b=qED(<3wqM}h; z;ub<=f8SGm@7$RLOybLb?{$Cr^y%8Vx~lqAb^ZF(L0kF}(pl!05pp?rhW9J{3iwCC zHN0QxSHeGr_G(lAIA5j3L5H=eU&YsHanNLK>eujb@n_tiRem9`mX_ES8+;5Rvu%fa6o17`uYrd5` zE@Hjr2I!yvl8}pKF}j|2$Q?ZCopL9!?gE=?!ivqmf?py_A~h+`(&}jR@!P$^iajbuKX^TN-yj>%DtwOFjEroQjZrIl zl$UXT1?|;CtY3kmHw@j34r`IT#XnXB@;0+&E$L(4g}yKE^F&*~*t~oo%+<2q^oQgP z-D1-GR6dOkWmV_Th=qnRIsBU0l4A6Xzacd=O>qAcm|PRtF5Ah;w_tN#cE}FW*$GDH zWtZ%N-z$4bb04!T8G0|)*ksLSiJGu(v!bQfKpA^!krrW#1x;mW6K#SOz^0&k9km&A za|PNQl&_<w*l?*+7@duv6h1Rc|A-IgKr1==e0dnZer~K3g}to zza#wNpn+bG&?DeGfeL#4f&KvgNYFvAN9j@Uok0n`9<4{i9|KzGu@BG%{#Z~$uU)k( zd^ha|f1Dl%-(9=I_s|~jJ+)_)(&L#o%4jd`1^+|lk1~3Ko&bL$^GF%(t-aw-(v#p> zyb%5r=9MzqNBh9{)xPkj>Z$OjG0&9Ie%cTIbmp5f^y_7m)c_qpDGk(t@Pl*^{9qjn zKSYPX55*3W)3fv}_;M|WAExwBI$VdtkI)hDBXuPFC>;fl{x_6SLR4s@1n5k(oQw1l zW+4moQna0mbQXWV73gfVo{RJ{{(>v8F$DPQm3n2=jy1Qhidu#BGgYtFtC?Aur}J<} zYn*viw8n|GSQkgCcYvsRy;B)?>s@*m&-H8lH91+LOUMIyI3=!X`)#UMYB9A(TDY6+?VMx=yF|7$Q8N*&Fn|@QBpwToHQAWklQu7 zhNpWHo$W*QDdtcM^l5ar4^{ryg{v44y{dem6g>w4xsH|PdZdk&3D>}l$Y_Fo^G zmyK-f!YZ5Sreix3-E??#({V>PoiOO8^CjST!avdf=e9i8ihj9O&iOmf_GRL;A(C_Z z+VFO5hiOn2GJU{YH^_!*9L_Bnk=a%n z=wA{`wK<2C-XVdS3rj_?CWyJ&W;Eek2xD&flBl0!^D|$i_7Yq7RuUFEUp65~AW5qE z>&Dr)&8I6NPSRp>F|3Si%jYPGowy!|wxxyJDu>Nz z=SJLO<)Q!Ms8R=aAus1Sy6!HxFJ6aH?h-Wit-l+Dll=G!|5cQwm_t$zdw5epb z`7GOuvoUh+mSS$(pIgo^e@;Rr*O&$hd(!@M_poyZ?u2ceu;nBkxLTd-y1i8LOhw2Z zduppOD_fJ+wq{cz(NpmC<#$pu?Wp^@TauQigd5W?Ofo0ETmiDsu5xlgNY_d^cUWng zl(NmJgf?YY7Rxr1(1~nLWTC~dRUWNB$hAKUmuqi{{7qbvwvx0EOT(TkqNgdSJrWsh z{c$d>zxSNG-+|9b9&+t-|L6QWlgnkUT_!bQWuHHiwrTCt${^pzBqf#HFGuUHEmv1c zosl0glX>NK>Gz+hV!Lv z=nV56In(I4Sr<>3>CT@u>)5Vt3bdP>4n5AzhIV&zpgr7NXiqmEdc3=Fw#8b=d`;{c zVkMvxb3VOXA9n_pc1An1EjH6#;pV#Q+yb}QEpZiCR$J~?yJy`d=54B|j!jXz@0im| z{ruC)heP|9_bqk12Mrlm>PHXmJF?UtSw3VC{P6Mt_)<2x+D+=$QC8+=eW36j7KhQ8 z3oS~KTLs~;)?mDav^UN1RvWdpXzz+q8=*L)j!}~}w#oq8g*DYYfFEshf(&ek?vd~o)9PWF67_B=1^KliWB;STZ7 zYQrVhbF%+oia#B%E)TF9jQl91`Y5!U3S$qh>tgE!RDccuK7p&R@|p>2fl_vJeE z9l7@L@Mx#|*k9saq7}dBUU1L5jqW+O!L4`CGNahdcV%|5ll#d1-hJpka9fy ze#ac;JML}P!P`vRf5W}*UURRySFmfb)qUb;`OExlf4RTZ|HPML<>E7DF8|=Za9?7R zkCn=>c(Dz87u(&R-M4Os+v#>;_hOIR%K*sfUW_?nbJb%t6WbRVEMFA*2L42U5ObZ4 z{K5JyRxq0SX1+OgFp9D7*AmNyt$iDRsLhWtJJwB(lkU<(ddl(AOMWOP$cfThPLh-5 z6zL;<l43)E_T!t~1HbO?qC>bqh%NQ9eF}%RIS8ekMO>z-C|48})hYD!!?eM{fg zclfIQoxZE@VR>Y|*o#dVs z>)uFeT;HM2zp^LD*8EP(Z?49%OBscH+v;`ut@*kwWxuVUm=L)K$D88U;}7Gl@#pb3eDEmN zV~f(w(yi0&(#NEGr~9PK(&g#V>B;G7>DlSI>G|pF(>JFVrf*N*lYSumXnJk>Wu*E< zq^?g-j$+m~&0yoWFtl-Vr|&CbPT#m9knS|kU-VTZ>GA1_=_y=~ci6vy&Ipu0iqhl3 zF*;*Q$EQca9pk&e4F?@L9aN>iKf|BtFYr_Rg?_5P$WQYZ<690wFx}7aGvJ4^hT<51 zt{?43_)&hOAB)x1bNo0z#FzO2exM)Z2lKt(+7I)S2z@ehy^H)zy-9D@TXccm>L=?$ zy^U~#2|0jRlcH9B7~#tZT}s+xNiCoH*`z#%xr@5%_uA^c%?RCwv~Sgg`P51KwlJzF zVRUf_VOkNgH8X*Wc=i%^HEn=1$1$NFv#%JRZXFf6b#ASD#<%sQ{xILpxAz@9IAhv_ zwVWKy!P3=ie~Rzp`}$M;X}%vN{{O2A-|>HNRbjKcRR}iRA58cRFyS)(h#km3vV)@` z{OM@8@G$V;5um}N_~Ykn5aF?)!RLSi|2)E$DOSR+k8X%=iWWo*`GA$x!OiD2NJ}==(a}n_tf@r3}HJ4CQ)u5BhK{in`@8DxJmUqAex3Z%4 zHc&hV+J}ua{I(1Em)+{Kjlo$59BH~ z3O+Sb$ALCE9Rp$mr&Dw~v8IAMT@E@lTjvmRj?UHjdLuo=V%!$!?S!~p?*<`Cffn7z zzb6lZ_4q(tKz$x1#m6|FAjE3WoQ;gr-h_uTru-0uCS{b7<96K*4^4UMykUf8_Ef`E zvs9}T{eOz5;#o6dGd_m(uLOyIo)PnCt9^GR_uQKTf54JC-(TV2doV>gun!1yWuREq zqn&g?BG`B65&WHU6#t;~$jXcXSqWh?OlL*|{8Yk%hSEz#ezKoTe>2n11mn7i9*Yqy zd*V03pNr(NuU6|>Yx`MxPkLEm*6ClZcO|wVCCR*dYkELOoFTNuVf3^a`t5W1&tVch zZ_|+Hri2!^$StC6KjYRBx-I>m&2QQrEpKz-D-CsxYS04ZJw-(qz+Vfk03jtV*r>(53tFwefL8j~L#%4}8h@j?=ox{(0a~HF z58UxBXqEm7%4*ZlO8;?)W&ab`_)mh%XTin(v8j>w%tapxzF!C5KL+1N%te0*t?;)% zt7uIURgvDU7K(Oj=`ApS5LdhNZO|%kOS|(5Xrq__<{SWgME4fN34DOGc zJ8KYIm{lRnn(TdUw@|zhb{~6w{?>vn<a@nViJI7q$NdF-r7VZ__w7)-pIos=u&Sjph}lOtHFKkj{A%X?;!cN zwpfGgz*kbiy+LqC7IODSfnROBw;WdLguq)%s!+zR)J1b>mHq}=4O(oeJ!LLJOC|g| zbCFk}75<^%a!GKpHd+C8Zl!b?v`P+#R)Q{D_@j&$M&HoRf%3NjIk)n!^!652=#QXF z{oBwg{dZ`!j)qq1cxVk0)WVE|F4c3PReCnG5?vJY9Sf~MH^qE^46V}1&`PAO`A&gW z=v3%Zod&Ja3!s%c16rdOLM!wl=u&0ekMD1xmF@}JY*{qOS&0sEtHRY#XKiJuTL~?5 zYoMc@m8Rviab$neM##RS=_$ML|6K-43nwxMWmYs;7ai%>0=YSeQTh>4OE!y6D(e~9gNh|HDcT2Km%d%{B*~Y!d6=Q>KngQDw7r-_) z)iws(fB~5n>Pz4e5=cTwBk+=t7f47*LI7(=|8r+n>lGzmzW4pU?+=nZd*;rad(S=n zo_m%+1VI!MI|zyxXqeqyUH^&seFAmdLJ;JsS+hO$E*GWZ;U_PUjWD@F&RH~$xbN>l_von5_Z-TK25=duXo{ystIpIyCnboG*~TXw_! zVrb7JNTQPcb;o~HeG(~gO8|I6P)o9P?4=fCPen@^IP zk~acw&cbs8@SKux5K)3yrw@g~p^&xIVzpSTLFPj^#C*_;^b(0wBH%o~xkznc3vBYYZy|#domRih^yqcoy&z@Z%v%127qj#b{tg4sFHE$l*@F zk6UpTPUJYv=4DMaBWAO=sHmWzsK{qFjaD};Gn<_`6ZtKbm1kEhxIJ1nTd7ha1b=3i zMGy2YeYmQ!9X(mS#_n*JSj^T#kl9?~D=Ar9z2t0VtR~=cZEb8k-uXu7 z@y5n2E?1x?R&g5E>~6sQYQVjiC`n@;MiUG&qlE!m^->nccn)G_ju`dEyHV%Ti;cA_ zO-3`an(a5uk00))&pmsvZoQ{^fl@3+sJK`J_(~Kh0){zK7)FJKJ`nzl{`T{JGqCtCRt9NJcfR9 z{Hu;*B!_?}UVtZJ@I=m)CjtWM=gE7?mnW}#hTM4!eFlxmow)7D)W@7YSgjg@7^U=L zJ;lrh&yoH4k5^18VOFePC@Y_%!t18hYI-Xw4)@N#x8C0R0x3C4Pw4IW`Pm4$ES8Z-WIX(EXniCyYO%OV zES6B^1xA4$egp8AQ*|!+ z@S~5C-Q4rZ+sL(mfs@I8jDts^y@qW+TMQT$vp9Yfb<+=1tLgjDLhkvKm-nAI!C7?@ zw;uuumq2?Z;8(!lD@EZ(hJ0Dt4GXCGq}60zQXO9mR7X9V(_9&QXl(T1s;VZEdww#n za+Sm3b671)Vx#v(%PQ+CV`oOGf8zcz@81vo=R^M}fIwvMm?@BX=;;rVAE4w1boZ^# zJxAW3?4ttYw&eewC;tm%$0zvN*WlR(c$N$6iRT_f+b5`Nk8wpn9r*qoaK9Dq7ee0{ zEfk0(3jPJ5u=)2-6#eTeR8aK9AEpQ_jmSE(CCR1u$rH(E0B0aV5wRDZuY~7;wg9&j zf<8ouQ~3;)eB;??C^E=BfB7X&Ludv8tGz}*bLnafJ zAPDAH=rx&!;w+>vpWO9e`-$e}YrWoZ*z3Etx%r;<2iuQN1H3x`@cF6IwP7GXtJ`h0 zj77q01E&IOBH>Z1#qF|KN5hddFjLHRn*lR?LJKox<#mvi*Lpy(fR)lIsf;!u?a;aE z>bAD(>T^T1Z31cID{NMm%Vt|1CmT^&IDB&H(tE?i{7!& zRHH?@?UzB~Dilv)^m@(a#dWnnMx&)Mw+iF`9E!zk)z6khT3-~Q2 zj4)#q2qR`n87L0U)O_SVqoHP-I#p`7FAdeMb2=kA&rxmF%|BQ&@6o!td7afY_jdYLdAyaNmaU_~>ZKNo zJNtREtG4!h|J*Y*HM3_|C+=wruJd|hXm!Orm0D>wo0irtx-(i`@AG+Y9~!(P5N?nc z*)*DYRb@RIjm8d3R5fyMG+Gr51nyY0Xivc3AQw2ax;~g)E8uk(;FX8z2xK47DreW_ z@1vKV;0li)|LQMXA?Eu%fP-pirG{2UR!O9=KpCkFu@~gTAE!u*$-KOwVTH+TMV#{_ zs;{hk^r|I~R#i2E;Ikw)xjo^C+r244{bJHG_IPzoYgF>P;;^pP022K^Uh1K93hqf@hbzo9SMFEkK;#pxh_|I3*KbDt! z;kgUvmS*K$IC(xVYw3ATUS;g^KVSrsigM~>_*+JSl}OBke)d8?Siiz6vB1SLms~@? z{vLXWZvQ2!LDfH}XQ79FL5-8;Nq5pf)+aBKzaig-r(^Im=6V=0ei{Y=k1n7uzJp$+ z&Qj>rx5=N9znol^{D9O_`=R|>Xdj05AU{Wu70`{OAfxNZcge|7^83jWl6v_V=lWZZ zU%qA^$ZL`~3V083+KC{Z?_dA~Xi&fim6QYxWYC&e%mP_5OQ|7=#LSpqtRfEtSP4Ll zzbGuOj+Pzo8@(qSDMy#|KrG?%e!brG`kJQ_iH4xhyJv22k;7i6q6+9&LLdZ`T8(4x zz*&C8VqL^{Z4RM-Ex6I`az$M3wdIB?sZ^y=s@i?sPN(bo`L*MY62C+w@~aG0n^baH zt<6y_Gw6S*D~EP?Uguz5HCY7kGFTpoH20)th1Fd+gQ^&*`}mow0e!V(~lA=>W$~Rtc*@YtSTpgA^LTGi4UYPoBjGxWUO&&*wJ%s2>uGLzWtn67 z%grr4<$mAJMpv6kt;4v+ysH9SOR+sW`^wv;p!NmR=C}09c)_M&d3#S~By!Kt$e~d7 znTg>3!Qm5;Xl%k@GOlW0u*GEBvaoHX(PSVk;`KjhZk|(Hy*7(TzEzp%YHE7pHu0>z z;ZVH0n$B&QQxyu`hR1OOjH3>4WFWx%ssjt(I{hMRP|uP;ih$WMiWuC9#jGwRZ#n*M zXiea*;g+kNB|aX-jh2<28((;#u6|ByZS4!a4T}v1OY%})*k&6k-rm(y-4XG*H_hVB zUh`68LuYG4<6Ao&Yra=s-%h<=>Ts;{=d`S|S?yT19RZ$PmQ_LkvIfCW#ACs>EEeEl zMxalR3bX4=m(WSN-)PVcR#mRBOB8;&rQJ4gG#rW1KSK`c;k~W3V>+!?TzDrhkITye zui@B|*1a(MTj;qQHN(lXW~>5`F>4g0JnsL=|Oa4=&>vH^^*7YE8x}ON+%31I;&Vm}z6*3xigYjfHd3wb?rKLX5(c&3!7A^gTZgN5T<4zrdzY*bDeYoU)Ti-iZ^`T}b7ssBp<0h2Ulu6gi;pcjy+X2l`eM zKqv{&jhX6y=0c_42gHI(&{C_x&{taMQz%OX1#-7kRi+&|77j(<2}PLyLh#nffwrYO zof46VOC90xNRo_(LnoGS5=iZe#ayncDomZ{VSZNuCco|tpwG~)AJaVaC5%u8tEKz~Lr`NcO_(kU_A)oKnYd!oT{&|x@+ZzFOBC!ADJanx`EQ`Z)dSzYiJIhO1hvJ-~`GGzJ;&IAFU1@QoupSRS; ztDai7_Q`lvZEaN?Usc6xCt!GM+uGJy@i49HTHDsc1!E^Z=PHF&D`c zFwh^Y7jJ%}si{jY{U8O%a+_1iIhzk%`nuGCauOND)#F%jU{cXOeW*BJS!Fz z+T=-^`-^2DyJWJI?AjLzSBpiWfHwIjeOM$Y1{CiD%0ggiaeOdVG4KL}rmWT(3;`;E zTDM`M8fBlqBn+GFD{F&;A-QT5r+x8!<^IdBkx%+NRXTlM{>6N7vJqqHU<(dF|AgCvR5)u`eC6XY_>^9&HJ2%ew zV-RY<;t`K6Wq4W{?qiuCh_xy?a*6&OQU%@aZH*=Ktk!C&LJ-tw;~IODN~J?b=@D+6 z{%UXXiIH2KE_-%1Ph7Z@lgH)qisiDdDiV|^yz>cor2L17>Ru4LlV2Fi#R|2`XjtDv?oMvo{757coZkU`55c;RLf^ANNJ?_=3LqmvYLI?Pt!Ak0smhzMFgu&uAqhdCR`xf2^QC{24kx5B(gpNy*RXA#~t9 z@-*6?{NLoSF|J4>DTSwN=@!mz7zd6;F)`CHV%;XhOXD9D72-Cnb~wsKr1>rSe*<3n z-M7%&&ACRIyvkDOlWT~Z?g)iqjj`B$eYGpC7Khzv><>9-yFDu#l7E5y1&IHALVXFN0Pj5^ zPHE)~)CG+hteBE6j7rbc3m{!WDLWTL4f`~@eplT(yUk@anZ~NBaJXrrsj~9)z|iT+ z%6h3v-QQ5&4e_9OS=qfqqYspqCoTa72g_sqI=unB{$=%)?~qQfAFiz%H5l|Jy?#-2 z;81C42rQf9WBxI>yICr4DRJ~VM(={9uL^|^%#QZy^+v5$H<$>+l?7|I7QpdhSchCD zP6@$DCM*#EbVFG6=&Q-~nCnZ?q>y)bX?Hu}Vq^R9YlR%35_ zZOs#7^Uv1S&X$UbgW7>7tE*aQNyOvX9B_k-bk-bXK1XdJUujUs;aWdsM9ZqKs>-3I&3$Y$6|3z(+qXTAVz67 zfNj|u49(Z+!M)J+ghIV~otabGe=raL4=`}BKQv@_)W~FwF87Mq$X#Fthl8be4ab(d z-3>CnP3qmAytiuDU@(~shT*E%nAv2r!E4CHkv`m|Ubi3`T?q7T)*JdT6&#$J0-cHD zwh54=MqpiFLE#KrG7~dlHe4gh4B@1_Vg`$7TqJtpa>Itk>gvb?t5=_nFm!Nid8FTJ zt(0-SbONhw@zajl*`2j@Pc80&;A6M6 zxHzaG`_8t+YS6FBz22?y#%*p_X{0oGWU#^m0R=7CN7s>elAm)QWvBp4$*_R@Tu2{( zm%Nh_kMD>kAA&X&Jw(vZyR&O_&2106|Kv>K#0=HG{QGugN|7|&p zqK4c{-NU`~|6EFvg>zhW#Sl>93!^4uRKyn^4>;`0qVd%ZyQf$z44afrzD!UP@2I#(wU(_ zW;wzVi8Yh1rbC1I4;1D*%$8+|y0tdD3!N15MdfC#8}N^AZQJT7af1`JsKVQ>kr)f8 zydM|?{QP{OP+aG{CEF@jl#?&Idc3Htb_oa`gH}5?kS{d@q6Fa1=}TcYJ<-zHYS`|S zi3*KEg>RH!l&33}Sc-&_6+%;CkpRz(Pd-LH3o~Q9u0$q7%;1#o_PP+?>vC@In01ZQ z>G3#S+h@U*%X^pjg9U0T+*JJ2S4b{q!ST)OG6s-F&Gi`FHm9pQc_1AKTAwY3+9^pRC=ybu9byUJt&v;##Zs%$I9L{1Xf)bxEzIY8)ml$cL7_+}@Ms@lW{C-Qa)FfCFM%a46E`o zLW$x`>F^yJ2f!mU_YR7|N-SnI5=sS0L9C~&PijZ`LZiW5;O1MTvT_S-Oo+opYKA={Z--tV5D7};j;rW^ zDJ&F;KqSry#!!KVU+f^)(of}N=c=UwpW0kj4F8D=3e>qb=E-vN3Xxt}>{Y1Zrn!Hb zbCb^-lt}V*BC#>QSX5%Qt*Xn02Ox$ilvF$J@VQ;rcGj)2+MF`bcWR|ms8S!2iHpP9 zk~WRnn5eEQu?h;9PU$*LnL^_d7xA@nMZyRgd=_B-L6A*q;D77^OlM?F2hubAn-um0 zX+aF`D(69ZDe9q5p{+m4`!j57zCfq`!>f?K+Jbgd{jCa^GRH;0XHF zk?j21>gtzw(6JpaS69~*@OGL@O7<>AuP(i{q{K{;mlq)t92de3BRdZ?Q9}$9<6nzb zfv|_={1(?7M;yMJOhlQ{2WiPLpSC=!-C95GQ&A(z9= z$>OSIg-%6Dg-9eIZ!iZ0{380(EDldAE;5kYUt4;nqM}+MFS1C*rb3m9Ur=BWUQ;N` z&CQ3%Q>9(oV2xi~a5Nf?%V9fICNUQ()zz`s8T!Ys)vUAIJ#KrkPpL{6UvqXC)Kv<; z$ZfZ;BNJwgw%yUqF;ppC?UJG*So0>ptb_YE*gt_C%0$?TISGe;7GRKSd#C9SReL+4?s@i4n^10LuQYmR&(g;e)lH4niHpk{c7&^j z407c-y{mbh)#h;6tgGv`oiHFFUmUaQXK}MOkL(IGN$%m>l{XC^sXDbXQrlEh^X$5@ z$EvGajC$*)*4X~8${(Vp%b%5dJ)0|=u7hp(xuF7CLA-Lcr);iLCWpD50=cvZ8G| zXyoX^g-4^2sw0pgF&qjGnJq4C|1vSlED0O4Ok`SwY>rC?nOeUyX341n zr4tjNMM2arIh9Gqi5vL)qsuku5`G12DZq4-2Vd+_m2J=&fx5H4>G>@{F!OY1Bk4!{~ z1!xd$9M)CIWt!q5zNv^`>^2xW3k4kNp>KS*29BkR<^S3CQnH(xahXxsRNO=gMiDdk^fIJZBDIoi=At!dWYuAML19g) zJ>Xt0Nd!i$HvdF*IJ~$0;Jm$le>hxPdT3GG3WHInR%!a<<$XG>{%KSZb#7`p*ty&7 z36zC`hZZ(3HyE^NV|boMt5fOqtLCh^HyDa!zwo`B3cv5>dGoIi1X{IUcgP0hLs6|QLRuYQ$V=?~mEXa0?)!B({_s@J!{Ob<-uLB@dvXOVvQ&h(Xuq-HXW zfN*Mmdiyu_r-NSau8xU;{a!C5op|>TOf>EE`O3a)hq~#yK%g%FHy;Xz8ye5`JNnNz zGz<$r{$qZ%*E1eZ?NlRT4RGAmpj%8n1vpd8=$VDxu zzn5*+>E;K#^9*|HCG5Bk4BTH{UXyd;f$Z$OJhQR|GJ2Dm0ue7Sk6e2Mach>DO;)8! zJ-=$+ZGiy9ccS->(U*^WsOvPDBfP_hMNXM{Hn_0}%SXn70}DE}PmIks2)7y`O$C9V z#Q&376KFTR6@Fhr5B!9F-c7pc=YN78pkF~()4R~ssD9Jw&E$`g7rN(??Md(E)0?2@ z6A;C1h8<`R%sXhbGSd!&*oD1@>Ah!|@ReXU=$_QZfq>fn6Qt2;hZ431h14_OGjLZZ zTz26C(&H^GNx!<#+F|d$!Q=MB*7R^$uSTmy{DK1lNvR(qJHvY?^@$;kNX+NoQOpnc zy!#eX?@vvEpTK00W?67n5Wyp_qJKqihjA_i_y$-X9McWztuTFdtkX=98S9baCHmH7 zvQh^6Ar4wd-;* zTQRCvbL83oQ{geD}z%~Uw z4zuf@xhGCOc1Z|P%JOpfgWjdyo3zq5yDKZ*-gq45j6S1pA%70~BlT=RAS%VA57G!$ ziOii4mq#GqBN{0uhbQgSyJ)L7k?^|l4mo6APW_Xk1giAmY^($u)CLWMl}rXf^1d1E z#!3q8u<1+zcpsXY;ACg(HJ(AM)elLS_lzz$7A>nq6Aq(cz6bIXRBDS_(_rvd@cFrt z^nC8J#+1rZwPPq6+1}WDi_aJExjomfvo{;vH74^ydi(UWN#eih zO&o&LmKwQ{6==+$QmT^O(qV6;w$fPPz)UZ5P)(VBvuPzymM0cb{?s4Oh%j8n6M%5Jz z^xEw$Mpuo=)Q7GEgu4Op5Lp|d&qD(FFPZ%yP=YXWDdbP?{DIB8u47Tj`NzAD*H<;b zb_zm&o%%CXjCgo|Ycvzne{E`PXl-e1YTQ)T*->6Gd(PApX@Iu%oOK)vF#w#vBL)aR zwR~y{{eeDD#Q<&vh@Dyvclrr9@d0;orvA*eARfLm1#%gp>jT75EVGCK687fc{a=vF z$lp*+%>G*D`@g`{gsF;r&r?<9VdxN{yGSYJqkhFiNix>}jCpLXu+kXarB*6hJf0S% zLZwnDTj5HfP*ak!l17D64fh&L%1WBpkP&SABI53;r5xCe0O}^{Aa2g_PA)rq7WPxYJDT;oLwOrtffWxcoA?2OdJiEVy$^#+_TH@1VbM z^yIA&oQLdhkal9o4!`v{x|zFw)pq&>=CUD%40gxX^NnjiF$wiN6)DeeR8t3Hg2j||=&}i!O2bSPx z-h^zH--7jvE$1|K06&tyWuJin9Czn|L+mpFKl9A9^pDARh!5~Hqcfg)XTsxI>WITL ztp^X!y`#R#w9xNg3~S%SDWXnhsbTGxPeJ|)tod@{UWO`M93FK`mIUssm>OX2tiW0I zfB_Hv7PW+UAKn7gGUGkw8859*zZ4nLmo0as-@n9@aSdZhVLmI1xQZD;3e)g;8N3&v zQQAR`XDtKzg#9|kPXyhPN-x8ponx_4<-7b+KM9mDHK7a~)Rys&I%I zLtXBj|6ID_(T2Jq)f#16AaQPG+plgY-1y_(!TS>NA?+H~V13<%<;$2coT0x@g|c?Q zT48#J*ojdo7^tXZS?Ky$hhCPwes}Fi*C=wO=n_P`+ae89n~4HIo)|?|F!$kb^A_t z+*e&UCu0mD;A<(XaZ(EzO!+m8)JS=DsUd*!hSg1B8`X%1Zu7XEuzk9$v1?o}mAV41 zJ%h!Kb0p#dw^v?cvnCWik60$}y1b5@TT^pi$Labp1Nhe!j~-G114;5!t)>t~&=-0JhkVU)KJcT#Z*$A5_g7^-3vAS)4hEBzj_-bz}~UV5vD zzM5%wB;AflgbtF5+Ym|9w;>C4kzPZuMY}~He>M`ga~5)*WF#tBRABVtph1v)ZGyfE z9hyLgsRDGGp7%5zLodTh5vR#L)F)Tu74^yFbBrW|>}CRSyqv#7PbrIGx-SZ3A^GYl zdON!I6k6t(e2$ujlOB#h8pQD)=7M}VORcwl~|VUC4v-GNGf=WA$F)5>Wli$JBM>UTWTV+YU-|u zRB!R(56+QD?dGbQEoJyaw5hZ6s&MK~a7Ci2KU%YeTKaJ9ESbsdo>lX3ZM#}$b+pq$ zxRTmz*7jPsXEi(IvtX8Ih*zkuI4wB0W;AHFhRs3P?fWWuG#DgDCeOd}$|pj>UtgiG zza6-rc#L?JW9GEtwi^+&<4Sql$EbCaw~{whR3taQiniX4w!H9Hk>DSA-p>#@oC40B z(3;UUslFLx7fkn0B_^Lk%P;(d&;R*(`s~hR0}v2NyhQLgLCz!Wle4Gagr9ur%G;<~ zcytHq-i79z{kfp%SIJ*6I`}!Xk-ALj7){G?0CjotC2}{~h)$oRzreGYoZ87jj5SQu z0oS2>$^HkDC$?p|zI+Gnel)d{`U`t^Hk>#3h#Yt*d4hZX%XhL|z=%Ir=2G_{w&C7cjlI?m+R1c)|bG1f8vm)u0i3Y36MOu3So7S4C?tl?cEr_o>- zO(aGjZ^fuLKpM`7-eA1BkYD68ntZSc3;!`!{6h2=Zdwg_IuMvM7)GEeTro{eMnAu( z5E4J3sUMO>>Fs2ARq_X%Gi}i^*fDj%NvmjEbg>m*S{Gm6rY)CBluC)TT-&CNN~B7q zR1!@Vw_j-tmu%y)8``yHY~wO5T(XVJ?3{wi%YO$c7y|5_0POi7&t#O?PTWZxOKatn zPsk|5Z@~+X^&Xk{)^}qMu}UE2KxW4H{x5f!{ld9gt+p>_s+1Qg@^YmG%4%!H0xdZ6 zTJ3_Eu|m#Qqdxdz`;;Pr-5BEt<^Z)^s)(hHQF~poxBrmpl`l&vY$E$?Kea2?ObRN zQhSiGW2T>BH5g|u?D8C@mrl^@V50|qCk|7&sSZ9rOb?(Fz}vk*hdW`vCIECe45ORj z=FK3X8EK%IOl$rQ&)=HK*cqM+wP+>mW*7o?$?*H$lqQ zKxO5CPOVAKuC1uJuYb{f6&1DkYEeJFn%C9X@bVgbrlPB>q4A|P%U)`1nv0$>1_VL@ z?2y;ndms=c*TAQMAv8y$H#n`RE@vc(%RG0 z+5!W9v$b{JyjB=GOsA@;sVaVUXb7g*+zeAJNg}PJJu=vz+>I7J>kNx^`FZ&WYpuo9 zVdflIKAdNSSP2j&Md2~Raa_&JgMuYyVwXLQCyN!@|GN*6*_ z<<(-Lz^hccg#y@m6S|d3uRti~d@ls|Y&N4|Q83tVGTI^cra!3i35CVQLZMHk^ay~c zU^g8&c`~tfe>|op12Ca z3I|uB+C`5YLJ!jm4xhuAf|w5HPT({pbC!e(hyp<)^J}E^aN-a64SoXv4~z5^`EtkJ zj>!+}{`h9s-Y)tJ{O5lVSx5itQ-CY#P<9=481;@taaG?D0}e z1%uO|A3{Vk=F3c~3rhpo!@xmt6lM+n3=F3nJFxEICv)pB97YUmMnId5_OKdd8P3rj zj;#yoVyfA{zF6P%5V9HRHy1fuRZ6YXW?$dXwaeoP2#Q2yW|OZ|YqFv9u|7ze%oFe{ z{VG%=3n%YEiHp>Qk!h$pL>3}qvuE*9q7>4 zS}gN|m8(k}ez()Lv)k5cb}jTP%&iXpTKaX1Uf&ZCMU94byNvEE*5?a^)E{gON~KOI zmP9pDuT18LMR?yi!PKQ-A|)g*OPDq=QVtm##$u7jYDgAe zDY$4l$imCILdM|?eavOhj-RA5ALQHsnZk+Zw`_Z>wWB|aTV7NB&H-Jc(HNKOBq5Ea zQnUJjV9?9uG}lz!GYBcE0Y3*ppf+met8#L4DW{^RreTBA70JmGNF|LvL#0w;KWs~%s~ex1itS}f!{ zV#iQGDW#&Rjv09~sZA6loM+-RXfy=KV_5hYy&yul6tjyo< z))bp zvGcp8rD>hjYM-d9ia)t_#S`&(gGSA_N;Sb^i@iFgpY18XrJ`z)&S7y_ZL8}mHhH}j z6TMAM&kU`2JP~h@%it8IIv}WZ4rk?vrMpg{5C!$-PIPY3 zZm-NP6zYl!Wch`vB2{Clt6im|e_d8rVi-{ntajlt^YR z7TYB1datJ)&L5}^`ep6prU(21m(Sz5v8Qa2S>-7%Foh%G*0y$Wj!ROxWntb{}u z083&Tlz#0_Xc8h)jpL1=#k zZcMlDow^hCPPfncDh*#mh)^?(p_87)RNpb;?6icDNws2V_=*i>&B}r-$P_6DkVr(n z#8l-$e^{m|Ew*|Ynzan6g~S0g2yh)>_hAOR zjVbKD%V3wOxWfxVdno_HVEqh&s5x7ShqovVA0dKh4TjTgxI#|~o`uFj5J^w9UYKs( zN1R2g0e&~Ubv3UE;1E@Y-bbc>f%*t?>Q#W_v-%<&(@fPT3a1KufH5o%;MiWOOb)wA z&NB+mpw1(o&hDCr(7dkMpB|yl{r>mp$gd7=I17g&A%YW$p51T|4bSiS*P&&H{x!d6 zm`q-K_%L|K9d-53ty;0_`TF`!L{cy+rYa7D3y5klbKc6}f`wyBW8wV-m!UKkj%UHb zM~E!cm+l8sAPes!wlS3U_5KH;A54KPT%K+}FawS$lkKN>YA-{XOnZhhS@qmZ5Vg;1PR<@QZijLCC{zAczfiP6e(l zw01327dp3N@@lf1vlr><_wn!ENdgxd;t+f3b1%JwIgP1L#KSZPr>S6MeZ(4uwjPBy zX?R%bflJEJbOS@vEF9A`3-6zLo1tkIj{9NZeMHl@_A?0WG3~PLG3~PL2WG%A?XvLR zsRaz}GVK}KW#Nmae#X%5qd-@QwQ+lc{=!^q68w=&;-&&>LqGl~xUj3dnS& z%#zS5wszZRP?fn(BXtOw`b+6~VEM?7Ya!9i$VXh4>3v=~z(G-ByrLITJG4JbSK@j~ z0EY-6z?T4L{1M=<0lph}1i%Yed{2c?cpA>MXW+yN;#}V?43#7ZS@EJTWzy z7<~b@3e*2hjJ{yum}6LYKWI_RF)SQ&3=8ig_Angt^?nASJ=Pd3Jd$qTJ9P}L0!|u+ z^?8l^6`ZB7!4e5f7RJ1^vMENaS;@K$shtuw07tRfBengVY2|WyNl0#}mTC3qAyimc zV3Dh9tj;ABYtB@5bXAsDym-UzXX2G*7fHR^$=ojDT&<>A{-wZ>QeXSAC)FL2bW6%GMim?y}}%^`0l%@vjvZPg=Y^s%T25VGob{fK``X6H-fYkC%a zx#hvwwu*|?KKirs-tbl31IepTbv)A+Umy||+2xZL6;YLY(L1O5-e*%4;!J zm{~Gb$1hXVwH3jsrxaBzPGe;uaTsj?_(h=hwNSj!f@5h6HEu#JX?QFRA0a5zf#E>y zPzNdvUqT#ZsGX_O#85RvRG5CQW2l;iV=IV-j}Qfn6|{k_#Z{eBaIk_p{{_9Xtud{$ zt^0^wXf^P1D~u9XNditvfL#N8EM|-x9H@EZWAqX(nDm?9JVAvIquS%g>1Pj5{(!== zgn5pKXCY=rh2M^6!PHY?W-UpL_C99TEW8rhv+#bxg>Ff=uSmoDhzHQDG#q1(Z9fR@ zufU~gc<}BCac5#P?1nr;re$K zp2)knz7o%#g^_Nx#*md7{Y|_pvmGWLzcTu)zobXMi149GJbL;XrV3TXcaC1__Xqx+ zHezS>ZPTfZl|Q%`QK(RsQ^cRCq6!OyA!BR=4vlfW-l8=Em-uq(SgJ*OeU$!KvFUY~|9Brc$CXbF=rYB!n>@U_r?n4xfGs{dmQg|l$X5iER!urVCL!VA*y0m91A0#gBt`x}6(QvF;@ zh@pQ5o|T<~XQBQ)JbD(Im2TZf>?GvV@N_R(XfW>z^n-LCy$5CKwoj*_i;A*AdmRO< z2iI}vggql-^xx6g|GO$(tMp;9Mn97ni$#*!63c&Hy(^dgFQ$5zM(x-)$X4$vl=zh9 zF53)p)kNglkn}&V>!l7|#n$yoNR5V6U9V}}WnmiyuT~Zs${k8We>M#zmwhpkhVoNT z9{9O?2`#`&K(C(Wz7KGqX@>HT5E6hhHMW>Fgu}=T`~X7<3>?Iig~0GEG@?=9*5Ktr zMXoe-tc9_ypl=WmDJZtR4>I&I0&?*M$QZH^+V`MuVg>m=^hFX^L;JVEbu%;dY=cHf z62i7WqlQzlmMh*A2p6cSHDix&MwKSc+pkVcO%=NojM`hV(k~XHa zmc2o32o(#xT5T&FUVs8yoVOuY>@b~_T8rdzT)Jd4TX0KUEO2Yqa^6l>vBkDT{Nf4| zookF1izT=W6X1ZUxW!;n$6#i`)fkTqG%E*iz(V`8aEyk9Vt!(wBRpjqigC+A`>w&b z{d&KHSy!MxPq!V&fPR>U_U^-+$M!IhhAzs@0r~zkc*RoSG5l~|v%`vI(?xw7^%;z&RU z@Oj!|)o1Rki_=G@7*%vbQ%&N@m7sP4;hAi%Q%R!wy441z;@P$mpFgwU8Txa>m{Ds_ zRXaofTJ>CAT?fazG&qB$E#NiTh4TIY)Iy7Tf_o}ZMlCdSu+(8$ALg-D&|pQF`ecke zH88Z&|2dYYEEMw#3+-EhSC54rn1+(=ydGFR@VYqVz$*|BO2CmamQ$unGlg&v^6Q18 z1munMUnU-VY@*n&u`KYHFN6(iwmws2)M>3zvZR06JzWA!uh$J%Unv2mR7m4C6a~JX z`Uu|bXM@-Q!v|eiG z8_hEbMJ1P1nDN?FR+uV{F1Yu05fcn#w&LEpNJ>JXdIWHvO3@^aX zL+!JFa{mbMH7rJncc(5h@HnnV2AH-m@Fm0${io@Ecw+#E^=G8>z33Rg?}QaNm&F0e zYW*Q2S6{QJ%VPlCEk!8QE!rgleGM(IIvi zWfoen0^1S{l+hq8bmU7c&mIL&11gNcj4%|(4Gu7#2HP6zEw=T-typU@mBN^w9c<4V zaL){s;VKr|PhhTMp_r>!Xx}ExRbTI6kkL#m6x%3l+uoaHKu;S%FFndR18We+^pv@C zgQX24Tf!;heR@ajyOVl0UMW)Mu-hu7awR%dP*`A9#EuXArv=OU?ynV;n(WIHC3Om! zAv;+9-NnmNbL~sxg@=ZT>oaDHZ7+7Vi#XZfId7TT3{}`BKn8hn-3(^FyemF(DmmL4 z#?CaWl{4lFKC}5sWR=~_+H%$K=+@?v{t#1`O|6tQI7hcOxkjTIv$Z7D^ck<090O&) zuGJ7zp*ccT9-j|&+V1#E)0>9{BDPXn>z?x90^^%^6pJ%vEP%5h(PL}af^AS=^n#>R z1%*PHnZsPTzXraFuyoQYhwWVGg@umQ zevxjADVFV{53d(+<#Y|O66}#-cEsjUCOOX7^$kL)0VAk}=~`g#z4sl}!2a~7bUm<3 zhcAC?L9ok*FYU$n#Akf2&3YW`%-KNVFJcWNSOf;0Zel#Sc#z!x9U^@ zztE+-l&>wetZJTFr!4Q(fm=8Sf?X=PA^Xw%j`1Q7fSy!NVW!bj9`>>X0Dy%X6mZrGpCIa`& z!g0?m9HOEO)@QZ{TS0{TW}&!m+&T;O9>aZ~fHB9IzJsa0?_&C9;b5KM3L}^^%{W@Z zLSIfp2TWYZ6}^Y^0_EbeCrBuxh;p5K{|n9wD3|^mc8k$naMwS5H-N~lr*A>|+#k@N zqg(!Ug3YwPne#tX8TW7C1*P|# znf&(Y-R9|JGM+DcIF6FJPy7g(sKSvoq{}}JjlDkSy#X%pnCx)+>C3a zg)kbEGmajezTz%m62@aO&y{>}SgA#nkNeU8F^B6ey-L~Qa5SluP*GOZ923$*o)!7eFHa+1@>kCu`X~2O`tPU!|6=TUflBU9Q_L^3 zCYNT^D6ocT z?QQfd3V{@aIK?2yHs189{IAP8@|W|VF1YS%yH+~bwTdX~A%9}#PL-ujE3rfG?y0$? zjrjCy=i_YW&E};I&8O>|TDDg_O(rrC?j`@avr*rr<3T165dX=M#m}p%7T7sGRvd_ zmvTJ_;ty1A5tNiNn|F-B+y6!!p=jzNynXA`C}5gLY$cwDe^d50ygeuFuYjeUt{t2% znhaF3W9rY$cZyI9E@5aWT_<^F9YO($PEaBoCU)0$T=K(V+NOEemN@J%iS za%J!Eyd6+m!0mKy@8qli0ZDYw?NHC}1=uUMLarx`ugcE1Eljlx|x zF@#SRWzbh9Pm+)RhAqzw364;nS?UyPD)dZw=1KmaJ|%B_|8-m;8oN?tb!NThp|-^u ztsLQc&Br-eBt=Hxgy<4Z)pY%6S*mQabGm#p!s(fuJUH16(SOjk>1^Hbw4EwO-@jxs z8W-0>VHByk!90R%taHZdR)d(z;@*~%UlA=k4OoCW(BO|bU_Efg0PH<{_xuqDEr;nX zfa=pqWdQ2fI3?Qu&J_N%C zJt7l}BRXwJjK!%qq}4^lV%c0|RVgeMh06NBP)|c7h9bKmoh}TKONppBtW#U9YL(eS zpvl)c&n7R!IYBJnL3brDZ-4A@&aH4KV7%}QkIl{0Md%4 zrY3LX{LfT``!^0)0GDY@vB~+I_vpP@>zR{00g$!ulNvWK<0v;TxoQ_uMI;iX&uv+n z;OQ%*QuuBrVX2<;UIPcxSVIyt88923AmZ$a(+kup8J<+SqKOI@rZx> z)d$Fr9)0eu&pE%tIya?vNjv9Xlq+k?^w`O(61``#w$R(qPDm3&B9N@qyhax;%ZA^#c6`%BJ_b<8ge5+MD_pPSJYFWt4?ou;@8YFj^Md0Z5-ihNXxsO@76>p4QRL zs7|9qkZgi^^sA4km*@9R;tEvEE-z#wQq#3u^kQx>BQBcyGB1`Zzk3gtYNNg6@5pFX z)$_YIKapsvCdqhp)x@>CzE>VEe~Q$XST^>|+h}sx%x=s0yzY%guZg_l@L~Rg8~!>l zuzs+2^vi1>-17P0{FUPi`aeV3Q*&Qxs_p2iXnVE$)SMS*#k$(dI$i-BgkX+vz6NLK zB;dRaq=J!Gq0P_H%9FI}InL3`BY1u=m;KNVe?JG-c%Y#+f*e7q-~H5sljq3KCZC}W zK|Gdx=*SVMAAwKm-V4t-;2F@7gG^2ceAkp?a?=C}UlN5;{M8@vDy)W9tDqI+AS8lN zJ`0YT;hy!BoQ{znO$N~KX(cr^`3Le%Iq5&LFFBEXYTpqUM;$zO13ae&&oGRiomp~Z zx=;s8>1-u{FdLsXvwL@JU2yW{iJ=E#vD(^L?17<)!3Sa$b#)c72a-Q$Dnr7t9HWVR z$7C=L$K%6L6PNSh4}Zv;U03()s@2cd)phY+d4)HpzV4aTt1iK(>YM$TBB(iLl}&|7#*NU#l}r zKs@|t|HPu>kx12qQLCGe-lu*%>CKP0TvvCHW68TVz*m#B=F!ih+Pv08;;9W7*0Bw~ zg9P3=2=81D^jHqNGZ?SzD>4Pw^!ZEhQdvSxow}WIPKP{&UZFp)9D?Gt%{8?zjE_HG zQ`-bM8m^?jLa(&1(@Ui8633Rd_Nz-AE~sg<>ctlidv56HxG;a#o{zid^!Lx{{&>%< z`R6-2Z}1#`QMhq9@!)Xh_{B4f7)nBT#yVQ?@Z%=@O_kJ#Fnj-6mfBLS6kV zl~PcmP&xTZMS=Kck^=@~!1BJh`|gVJdJyrYT3RO(704U{jiDmozaHlJD7}NT0M^99 zd`lIZ5(}%Jp3t;_X5&@tT2T-{$Pm};l&n`&=X6O9a#`QWide%XZKK87X&ZVFXgd;w z8uhxc#B76f`s1T>_Qv1~Z46LKbXkqR$D|L5;ow4lxD1X+!SNQ&BJL|tN&i6CSF_L? za&umO9+akdU|IKVrD_)+SD;&3HV?j5C7L{R#BZ~Y1*?`>tR*5oWIj;k!9Ih*YSZiI zx&w1G8Us*Y1K4VZKs$rXNTdqFz(f)CbNX>PlF?_MM{it0uY>vZ*P$Vbe|eI2ptm^C zm?Snq{9-Ii1vw$J;aNuKN`WWfX9g7jt&&!jkZLc}U*d&T^)my~rMf3=%@hNJ0W3n;i-%BrQpqQJ5{8 zLQ8)w<#yASmX6zX>9`Q<@c$ki*^xuIZ5~&@&DNxJ_uM}?j=moYxLb zxz6cwaQ_}0vRLD?O39Q|>cp^o{Wm%~hK4#izOi2Z9a7z;45?H>g)xFs5J7o`qWnfj z(B*Kf8GBjYyC2PINL5cXc72n)Z(k(Z&=8I6!SM~0wrl(rh5Z0=X5%GomJat{rF=? zvqtR^n++&pT-Af_W6off^fsr@j;=b_(3r89)U9q$yT;`vq5kXK9Sn&)D^o9W%$ zAUHXMavD5{EeTMZQqiq|sOjz{2Opf9h)muBhATSkp#OK68S8W%!}q-dpO1w)y-?v9(Mk6c{Dy zkXq(ZO;fbFoHLw_JxZmfs-_%3hut;(H>OgXGvqA8%^yPa%L`6r`g~MwQX>~W?C6Is z%AzLgjA+xS(QLjLucXF?`?+UxzZfEH3WxJU*xqz|O$b6vo zt?#sVT9qNSTi@@8wyDw*#(H{U$t`ZTrL3wfd;z7f%uX{1{;#O&?6$edDqAI0C6Ry^1CW$HhOViO zm7VI8ln7h`OG2Tr2ov;X>ai`?(mfdz?^?=Bk8>y>V8*SSr(L6NSGXt&6C{-4tE7o2C#KQ*D-I(EL%-qGW*S(m06)ZQSltuJn< z_co#QPEZBRNZts}jOIfnUgbt^5 z!Qr4=64vDXF(@a^my+f-PupOz8f2y$%f)3vVZ=urg-m!D(xe?HPMb#Xz?6V(;njP)QSsnr$Av}8zP)vxP6NG8(YrUw2*>E&-l9f7q~vSR6l?OMWXAY~+(yQ&8QV_KG}E!R z(~#8blLoi8&u2y(7cJr3bKiZr$H8$tw$d`bRI^y zAma(;%&L&GSbGJLOop?K$>iBJJ0GvDjiOJJTlsRdjRON>O~KW14}_T zRiG!9!tBNBAUifnklp-HG#aX_skynoc?6{kO*4@NLdmLSjs7KoQ$%;wnwoowr=pUt z5C&QSCq!wX1PbX8`HpB=Z|`$U*1g)*In8ac&I;vvrBf5}8=9l5V$p@}o16=4yB?dr z^!c9NWw%k!E3H*!<*o6=zV@cwiCQQxusvuO_Xf-bk!-Bs7nwUy;3k%mPd%57%`h0v zS-^?hy(oMASL3lz(Dbq+OhG;_?SiH_P1I7GxMvypEv>oj{odZ0LOMp?6s{N;cBfxOdY(6!+QY;mN<0~h;ho9e9tm|dnYTPKBcrzB4 zlJ(zY;H)bRR_g|)Ux&nSd;R4Eo%0JBWp`nF-ud2~*Z&Z2x0e5)sP;6uUz`@4W>mobp zOSi<9`r~s`EaxQ$30nYYPANu-kk&8+t3N4y}9X$H9%Y#Nx?R>*e<4% z^{1CEydO=KoSnI^!u&~CoOLLE?pOyUliGN+Ou!)vX6_4xB1}2XkZAE^Ev>V~r@PkK z9ZmwZY;7AEr0B<&J(5hN(OB=y>Ib-D^0B0*10odt52hsOGvZ7jumO9^_%0seJ4q%Hvj6R_LU;ExDJdR_Z39vRo|Y@{UUQfZv#j z)n^c=8LP)zf+NlFCZyI)$bkQf^f;_=D5g93H9r3)ri8x9++&wctT4Md-13;uI5k=~ z+rki_MQ)yz=)cA+R`L1#3N%a*uj6nyWxOhhr3a9IlMol< zxDA*7NakEHq)bhtCa0Y!RO+F6YR>K9L@h|-(N!29ymTi%{VjEl;#y3OYkN`)EEfBr zP#|*O@|9v7HI{_N!C*FIpAKCt?=3%t9u5v zK+i>=+gIIg*Oq~v)ponDB3sef*mQow=4aEH_V!Hr+07fyH#K&WXZkk6K5R)QPmRtz z(KB)?S>NJuxvuS5^R-l}X}G`tM|y^ZbWRPgzdN`a+XVXB>F(YS;H36IMquafF%yS-kY z&+FYiZO$BT13(M_P0-+*gGT1l+IGYnmb)FvYPIHgz-L*Qif(PIy}pI2{`9x4spNwr zBM+vMZ6|8n?&a~s3b%);aEDt974GMeLyi4Kzd`&hia-HgBLA06@?2Q=T=?{Na{wzA zz}Mb6;R~N$pjL(>ZSaRn<%+gQn5hd(*VWxUd-jRiL?g8#x0l*7_L*Miz7vZM@0X@&*K-!JA(h2hB z$`j#mfMAtgVd(Zj`efdF31`IAg7=cCaeI)g%92Id-0vA5CVi5i9n+^D@eppM^8QYy zhdhfI4v+F=z;LuWFZSPK>TC z;GWtiNNew>pAc*G((pIeGn3u8QW?wOu1)n(QPX0A8T6j?^! z&3&7E{nD=Bwe5gE4J>9>2kXv#49|wzp9VGu04MSr4c9-4i5W7WE`9sg^nd*J-@exP zOxt7c|AzkfH}5n)L+#IPrhZ7hpW8?s$bUO??)-cEnS0KHKDQ3G)D?b@f~Lt%qgV~P zI&kUufq-CV8E4_h&zjxY`ON6XSJ3n}6fjMVAdRvnPTom-rluKrDmn9s2{9 z!{I3;fIfFc>P8&Cv|J?Rfz^|Tap$rB49{})T<5K=J8sb1*7hd$ber33)H>ED@5N3$ zh36o?yk$Z^LU4fTJ6_a4Z<%IA>(njipQogh@6#W2cF=FXOa8+numSHT;GC@yR;oNU z+h~;gQZD(#i&XZ~H>c+VOj17Pa+*GskFJcLFDw9azD{#yoR%KW=Kh&KU#G|=UNw8Z zNXc2u-icA=a~qIDRM=QXF&kIoFmY$L^Ny)?)3iEmHW7`USTMS`1@q0dsq}iQW&L#W z23@=LOgtXes&WrAYkEEIiU>~LL&*$aoMpwcobO$D@4|ceSVjJffBI4W^BDITe4eRz zVm^=d>c!Vw`>=HO_+|7d@=t_0z#ecZ#bnRqd!pz0Lq6AaGOL`7tZc0zVbZnstI7I? zhWg}J*WPhQ*K&&)eVr`JyY7JF>eJsg*Vo-YIv>2Dff~+9K~;+qwcrgP5>Ck-cvcTa zDF$~aL>ne?BZ4ts!IeoD8W@ZostMQN>(D558j+T{i9WSTp+Gmx#9YoSYWUN^{6k0Y z#Y4xw!YmE-O`qMqFaZcFR}gc21kWAHKi7!$!9ylJl{c9KqGtkm`HLAK`@qW_@OTmX zL&tc?_Nn8ty-1Ir+0?wnRk5l$bD$75%PJrGhCXl8X8wG~M>#_|V3x%u38y%Mjkz$QpB+ATWgq}? zIpapJ&$}U;-2fqebBlYyzS3$e++&4XL_twErKI^~E zSnK4uHPk3M*MNwcJ@J_r7|@WLvL*W8WExGTc|4tRtM z0qx>VF1S^wn`@W){rx6UCKTG%zG$^SQg^~(w#GuxLO(?*qM% z#OU2&U(5O@7@q}aOwG8l=kD?QVTMZxGvrP@W`U|FLiVb4t8|B zv|hR5iA=iZL?RH})~f8?%X;_s_9|Pq1p`n=IVBHajYF^zw1{8UF-Q?b%)nV3__3O0 zlaGyLGNX8i{Au(gy5+zm?5Qa;MyqZ1ruyVVi|OHuBw9NIt;IBI^t`3t{&T|uIJp$n zrd2&ytBrlWL9;0+pkT3@ric61*=)AlDag!}6B?jWMSetLu-j5@V_2$#JC~F^gAp^C z7MJ7Yp0NWLVKbmv^(pLMoc<>FF8p4Smm91G!f?leT0#=z%E&!SrKhQnzarhA>k#t+ zft4h+<)oi(SD7m-MaO9`X+)EN_c&jsS8jT)wRM2v{~=fATi22~!HfcV?%c29DFw}g zOlTftT=V$*`yZg^eAy3PNM@7I{{UF?@BjS$WS0DZ_D~UuPdAg7nD5*tzzxNFQuMA4PH$1Od{_U9HZ{wfgOtY%$L5Y8u;FOy20UOalX zIo(IHL6>uRJ%Y_Xo&ozy8%&uU7k{HisKN>gM!1w>vUh%q&0N)HAAHGt(^lz`m( z2fB*9`wM#Y13=?=hmKHTs+^9Kx6tzC9b5@w2aYS-iML2+;qxcVni#ay2-h+883lz! zT&d`(b93S=4|+6~G9C{JX&1Tmbhu`g6Zl?8^muscK=_8C(UI)DBT$O7@C^;d;)5EkhFYrCYKP+SA^eB@{N>Mdbc~AWYseANXlKVWD-_Ew zbau=alZ$k_WVEC6!cIkIdnD2$9s9ksI}+JGOVPR`66uz5RLqY%m^q%IU*z6_IbNZ) zo#6ydOLt$Sg-qvq`bF|@oG*{f<9wO9;s92Kp)@9+aC}l%)7-H5h6bZ&W+1)Tq%~w4 z>+2p|JpB+dvU9)VS8+>AEjHuaM)Waxb=k?9NDcZAYt) zufHuQ=GU-up<2i$Te)Nj***Ul;r|J?tH{x*`OxC>68jWIOs;|qW+ z-BUjm#HZG3r`6L>u@8&}{MUDOVVdFNW=zp)%c#``X6GR7>k%c1;k4xKVzgXjVR#DE zQJk?1ACWpgQN>3+W<2fD9Yp2yNu-QE4;dgm({;n6B@u#WzrCl=~ zXlU#{A8^>`bDoa-{hOLjq<5gNyTfdnSu^$aXe>3{)BVaS&#G6tyJseAYQEGJ=+o;E z(xpn)V}!dgLL;;Dj4lgRtys}49uvQ)u;$7<15Y-)yEN?g?-`!A7eP0l-@k9(^c(#C z8vX@wb#-er)`pT*3$IuIBW)r6Y>| zJ8Bb|OrrLVentPG1oJY1Ix zKfi&5^1*D*jDGMl>VO}AI@YV!sJQj$-n_MQ<^2x232l1jP=B(fHEJV$(x6r~BOCvB zs7bYhk1=ZlXWCV(8YjX*$V4spg*fa+vPztUT1tPCeg65Zve9N8^fsW*(STw|_$4zk zks)sCMS_8kphx}w>B$v7dy|HnJ%0ZD@k<>XsXZl^DF~WIEl7`zMfx?>a(?NN3ckEVZ%So|q;*MYe(R3!!AkJ3VdhUu&~DcnnS^&tbQ% z0eq>|>L%wh^~qE7w_j*z#Gi%>+h^R@(9kvB?Wr~x*xG!eX=^Z0=P;Xx!&8tr(}2+2 zE7y2dz1-acdJFIAEOdFIn7TpqDXeFs{AnhYwSZ?*EGrozEYcLM)%<8A}jKl?hn-oz!q;TXQz&oBo6&BK{LN~`7RU?vkx%d4e~Yy~7k|BgVif!sZ9 zF#Yv4s;HKct>91FJ36<5Q_xRaTG1wjtT(FF%?z#Gh>_ljk>VNOzOdFC^4S`km~dLs?bEY3_9=8GKN-nl`m60FY?z3&+o%J$@;} zkvbdYQW-IsO^b`FX%R(7A*r2t_QUi&d07VO$J+jR)xk_gvSeA7EQI!umw)$=#f;2h zuDAv~gh7L2a$BsdUw14cBy(!oHNN}-gBRsB)!hD!&d11hmZE>rb{nV+WY zgcDea)_x|PU#io=9lR5DB_*Tq`)uPT+0jliM4?$t9A+dM0had*N4qVHdI^sfE=3p- zFDhh`fL84iN+ekV$m2CFsN>i8L7}1Map~QSwOVWt zlaSBw8o^#T0`J2#>?$-xXKYBXM1Q908j~hvnSW0rp0wK>JBDgDH7{ajem5w3FF0ve{``&G=Y^wQ%tbK| zARDp{jyyR}POUvQ`HtuN4Z z_tUB8ue~Kp=k}^q`-z)=pJ5%$|HOR)D3DX=NAo%e?!=<(sS0DlNzAV)gZK<#;LMNh z)aY~cWB0%N-E8)e&Cf4yJM#23W&8!?`?L?9-d8zE3=yx{?^EOhn~gSMluYLJ^BAQJ zXNaBeRquUv`t$VH?tkx{Z1(t~=jRRFn_XH~HjmipA44_2joO4k<2Czzilnt#`bFG( zI_`ZF*3wzx%g2RS^@AC3fl}})uL(F&FhB}=9buxlGqO1PErF@ozCepYp~!~NF>D5b zKr)jd&Wu|E!7%mFTNL#HIomKrtybiAQa37q|Iq+7@a)2UexI9A$8sy(KL7rOoYl0O zgZ6kB&kBrZ7Ut?^eCQkV?V51@B8=IUD2zyncNry<9Z_CEkz+(wCuc>ry1Kf-)4;n} z7V)4^sv3RIT&@ir9HJW=Bw9|_7UV%vk3YucanJoYpFLVmw~=Sd5e=Tz^5Y*bxGfk6 z@yG|XnG*~IZeK*bPJe^XCr!-$4q{wP7Q3JE4v$yDB#PDvo6KrVRhmLF*YZM z>^WIkiP%E)q(_I==t6Q53f=-g`9n_m2YT1pvv>3Ou&OEEE{fVk{)V3PGAYU3K;0k^ ziECZ3+y92fn+^+gBDb7E*#mWhP%oCc6&xGA3L`wijBp4eWO9LmOoisgiKjE;iZeWH z(i)DB)>#-WkJ-_ov-fbzC@zPbtf~@v4UV$A_r5OZl_;HJfi`le2Z@9 z+W4DF=9Vl0Esw{53f3}MO@YE#K7>2~h8GvcpVy`^lfv%jIa=X0EDjhYMNvEOG>+3+ zDB;r`I54oo>-D2P(5bSICIUU%yuJ_#I_&d;-O7YoZ5D~8Nq21cM9N&Nu-HO=^Xz!t zT-3~&jfUy5*!8XSw@FPW+{&m#l%##80TEji2(&b_*}SyP+N{&~q#8Z?S#)T)4H%fx@Ej){fq7eoKu+(d=@K#2T;hdD;|8)d`!)G+bj%Y0Nd^$|}x7 zY?UEZ@-wVcd8wgfA=4XWVx7R5|9@Pn;=5g5E#H9y?krx3t-W*h>_fG+LmI7`5IAtA)YcvpNhKbwfCOr1TCKGznzoQ@Q4TuDy~UA0 z#`sxcu_0w|?ET%iFT?$qTzuSr!T3!v(}_$XJ{TOH_lEP13jsazA)nHsdt zjQYGx9SKA6JN-1sD6>VMXL2KD6fzW3*c?E*1 zDuKXjGWmsqs;`S{tLp}JT1)v~A6m%gc0F{ziib*dx4tS+MLr_G*!Wyarn57HF6y*# z7AdxME290Ig)QzO2f=Ub z?c0PGoOY%G^_Z6FKSaHDRSvUXF#wkuv!jOwic*a+-ac5yd-y|;n>Uvn#{S7C*m*IS z37(cO=VJ~azQ!&6{Bea0NW@rX1Ou=5a3_jnMj?8Q8_6c5LqT@|3Ppa;CnQNE!c*qo ziRE(yITm9?E@ZlE5TcZ$A`Q8wRZ<>@bCJU>m5an~l|CSmR`W{v8fkUhIB;V?ZmtrG z10L8}!6_ywdhi&erm5lf&8RYmXt64SH?DU1`z)45gR#j{H`}1m?Si~17kCZTE_t$jPfk9*xK(A!9Tb`IS1KNF-6KG=mZ9 zJzb5$6oDU#9LIFxM(4bE&O3Y)TvU3U#KY7d=?!pJ-84+S7nxhj>jz*q28 zhvr1<=NL5FH^=vc+jqQLreD%Q-y9u44HmpeE{)h$&0teV%_3t!BoN4}HA8W-tz)si zy81XWKkl4Q-#q*!IH?tKfGsK%&rwQy_-cR3R^`6z&pMl~q?L zod%<}KSKY$cpF&%`AvK-cwRfR2grg&Uvx;R&+P*H&gBF1U--g^mQ+1L^yK6PB7sYc zA?Zp95}jTo94@a~h;}hLuL%Bvf@3T&Xp~z7OoV%CP2Mra=dQZ``c?E-FC7eatKEM~I0$kX0%K*6Lt$6*3TCL zMeom%_<&L*O2)r^D zp_EX$rG2ZcRufa9*c+W~HaX5U)z;oMr+r&Ze5%f5j)cR9Mwx-M)h70Lc-r>YCfeYC zpBbKddo0#qHJV53nvV2)7CzD0xj;-KRSI!Ih^ISwjqruQ6T(Z_L8f*-FaI6E&(!R9 ziK&zR@kb9oNB^{)Sl*$3+)B*9c;snfZ6z;|)n~SyB`4^o&R&0(K74ljS#*R=pF#Ey zDfuqV>~72FQxK)t`=jx#*2Bq z94Ci$0*uQK;_&zymiuij21}z+>lX=xH=rTbyriqsYU^~mX7i5il#2y=flwwA0j^GJ zi}Y)a#ykTRVv2;k%u`U0mk#H1Ig|+?#F3Xz^{2>_yq~<5-gFy2L8W*Kj;HVm0M^L) z)U6LbS-)vmqTdsn;mYR@1i)_W3h|2i~{?hl3fwK_dDu)!OP`_~(E>^oY+v+I3$W8x!P z!>d=mk$Y+KJ34*tg$Eq#x;LnIAoEYL!l>QnzvcGU>Ou4_pmJch0>#=|}9}lk_6q-%ugKMGlBC z?*r5$+slcXBP(2s__A`)Vm4;Smz+nGXi`UR z(lGfL5>!J#1ui%-xMBLp9&%0YTg15VTqe`km&u%4NV~Gc*t{C$PJWkbRWtb>`S*6Z z`dPx%!>(BIAnIW$IKV!jY6%MCS7tKHY&IvsQ@@6MjV5p@4x7QZ1kXoJIrPawG@769 zQB$PCT~T{jq_c73^s)8inyn~pdSH2Kxkn#Sls}rS4r+WW8kgP=IIOJRVBa*&*J)r1 zf@_S1E^q%jyUj|GYe(q))GGPbZ*_IS=_&DPul>|Pl_~8al^RC!n|tM*H%FWLWD;4e z1p)n9=)MV_l#>gr!iQ&ulMxR$+q+ zN>#xzDnYXIfr~#vz|bGj84UAVL(7f?gT8?_^hWwoK0S?m=ytlU?X8{fbS0&C%9dp^ z2kX~=y&;t*TFmP)P}nP=uxiYU&1)JDTSQ_JxDvQ0{*8V=esoDTyX0t`w0}gyv7Ljf zlKzak2_$pm?|;v&0%R8MgoGFFPK>&6CnhM)dThbK^LLu{b2{fcGW+7i*+(Ko@p~Gj zX>|O(_t<2qZ+;WWQwS6iMj~=gV?;uX2m^qFA)jD^C8v&(;4kUJr0sk3FBc*?zx@Jr zIJcVmGxd$!HtJ@kB<3)#$4$6j$2caghdKgBCUgCBr1JOl5z_U+C%bTY8=J9JOHoY5 z7AkL%vW4q6;QG7pd;y-Hp8#_yB3BY*%x1~P3#66ia*Wg=%>I_#R_tbOEv{OFtM0*5 zKYx`ISMA7>W|~he8PBW2#?W3|wVM8U{#o$Lrwh4Npsf6P!ii>!7#Y=&r7wuZ!m)>8 zNBtsq+XceOavbeils6;M9-Y@5hU%D!ijO!OrYXcCl}g=R6W-CrNmFZC%o*xFqEcw4 zC%jAQ>Q;E;11hQXuu^5;IxW7tYueGq^l}@rd@-&~^sl))FdJk~GhT1zZxOjqs^7I@ zbvI={%9h24jYbPW@?>9KX!>@K-%T;dE%VB9Utw~@$gcn(vAHV_Dojc5l#Zm@jyJ46 z-`JQTPeH2$Dk<{NU(NTeE@_KFCChV;#8rGkfS{Z(4ht#5SeHn_)iq&VV)+!JiIJkqpek-cNbOwU4ZS3sa=yG|r8da+oof`rJW=B#cmhj6e%}SBAn$*m& z1zM{`B9+pxFdf^~nOYrcx|2R4f)Qyl2GQUeFeg^Iu|_Ia815BRA=ci0Y9?8}{K=L~ zZ+}b6lS}BY4IODtF7tA^2Az39DtcX-s1r+k_4D+S>iZlP&zA;kceT>L>%TKS?Lg4s z)oYDr>RvLl<+^}B9`XCPwb4i0Zb;bDT4S#-hQxiUWG*bd{oDbp+G35RvKre>br z9)?ltuqhkt3cspz1JKF+Ls{)HImsj0eS)omuk~ep)s9zn5&je(hQDbiLhBh@V??2e!>NJ*?wj}HC zo0G~Q8XCSY=rIoZBb%D)r`?rqT5flFttKcoZL3^eMsrMDSt%CDTAYR!>#F!7uc=O@ zYje97fXeGo&G0l%x~Opa3RM`e)a0a{%04VTl%)^UA`M3GCk zA1CB$*XpQePD^jd(tAG4ZD;c5CUOF+m}{wtS`N;5bpkltB%eu9Q3uX}Bto4&N-46Y zwAnc=P*mL!8nRh4<}8U(&*fUV+jDJ|Qhr(aloai0^a^*&zZA$CQONH@V?!RX6P<{O%OlEQfje8fjWf~gJ4K~cu>vgoc z+-EXPtDau7BhXZjlKC#`x)qP7o4cCplV4sSzosv<#v5*~BAZ|9>>A>{g`Vw;U;T5Czd+EHoy>SYdCdH_3(f=_LoV*$?LJM=>OUYd&l>4n?Sq z=aR{WR9)R8D>*T^*%4=lti7u?7Q1Uv{W5R3Tcc3j+11|m)}Ae|c6H4Y;ptD(G&cp> zM#aXJPrr)RM4clCKA>;ilFh!}vBl@Df&DSu6kf0&nOSv_$nDf(=zWD8ZWb70j?G>x zzEiresqxm@`NtCRR)zySjT}HVW~|3EnaDJgvsT!p%Nc(-VrH@y8o*y$(xn=q2#sO& zQ9aYC(UYY``5K~|a+cxMh~)A(<82yC%X6!kCp>{CC@`!4{t2s?Cv;@^c!Ca3h#8PP z==R|W`Mip6=YBYQ<>e>%c2DIVhWY_YW4g@`BL)*K>BKBp16x_7w1cETOqXSYf{_~d zx-4QiY_uz%-IG6pd_cD~c5d+cl9Hgh!ET=wi_fw7o2wPoW6w#XnuRTi{Zrd_C)!tA zRT_iAys)|cU{h+u=BgKzmr?tPR4UK-EmL@}D8yoG)vZFKph~P!>1Ic2Z|azLx@%9} ztlPZsya4&{(46MO13#MmU|Y|%ZkzMd7xY^5{CeAXt) zr=pX`9ZuVto+RoU-Cl=%QQg`HqS2bVMB=`si%-SdYRSJ}dxF`|z4T^oC-(DyKN=r> z+kqK3x@~r5G@TG}4)?_=nam@@E1zUWQ{VD(Gin7e8qO(<#^tfC>P#+Tt1TDRt{ETA zl7)BIrJKl~uRY0N%w|Ac+j-}?4zj%m8HLo!&jdhi;ZtirLxdN<--O>o2gL86n!^6R z>9fW8--G-?I}5<$Ls(lea)jaE$rDe3ADbDnXZ>odo?+%}p}z~jyF4JuQ=7?ev-EfS zH?ZZt8~RC*vzn=NTQL92$z%qH;v-*o#JxU=QMWmIL_?ncJ}|6s)5tm-Y$0i6S8D26s5SVdRc5Q}P$S@$gV0)e^OG(8%f<8lS1A_*ZPVVSJ*@k$vVHUjd@%2g7f z6|s^EfebEqlC4w=)hSEuJgbp$rIGgums2hmNPRjaF>1|aKuh5BwGvs_h-}GnG1a`M zW}c_xS}Q8Ybt1c5?Lh+~;`aI$G$;1-H1BF~^xBDYz?ItGP`A_<>eI-Tq_RTbR-=O_ z_ru&DtK?-ONx)-YTp!>5o7~$@^D&Yf6RRXEj?Pycw54EZO=sJCzK@7@#EdW4loAn<6O z5Ua5G; z2=&vtc;ddrzb^fGW<;-6$zOb>R<`|P{ZbRvv&UlQdRMMEaB{|Co$Bo&9w49nZ zYTG+GQ^{&2;vc6LVDbP3P%CFtR909N8$Hdy_j#D0Kx0Out5q8eXBwNH-bDX9sqWv1 zq;`|pxqHr+sjYU4bz$mDvL=f)Ehm;vo2}E_WSbkQSr$Cc7w~UwSmpC&s--?n;JWUm zcSoVOV*3`PQgclVdU46S^qu+2MkYga(iSPNoQcI^#nsGLo+A_!tC?S%cZJ!g{~8_C zsLF=R7z7}Hj!RznzAeEYDT|PIP>mXOn*p-K0S)EN3C8zx9R&R=T#|zgf=P& zsx=`Y<(%fWwVB2zB$r9JRs6t^*<2&SW*7%TeOuga?=hVQ#RHB8xm?NYPBqTpLpX!b zyVIOJ+2PDxNpj$51$3|RW5{qS&gH5%oTn>`^*PicLLuuUpx34yFy(aaN@U&v1F=+A z`2^ylTn<-IE(u$zWbke9kVKbz9YEV$iKx<0EdeUMrGCV&4M_DCJ~AD7s!RWj4Vz5} z-wH@)yMJ?sYd91d4aBdBk;QKK|vy5Q0{rNXR(nDtGN))0>QdIXH0nL<%L0vK=%iXUrQU$G)AV`8si{vBuVG3wLY zn;b5utQ5ISN{*vu$Y>5pDn(q1^Qu%R^6PbNt`KnPQZk{0Fw#n>Ta%1OAa%&6XwUnk}shUYr-0 z7Yr>72WQxWeE=q6R>7h|)Qs8RwV3G<>@PfL&}13m&1kd#%Z;we((geF^%Ai_>eT5v zJuMr(Do0g$1=vV=xkmW7&O~t$*((=^Efr#Zl?eL@SK=z_2$cJq=yUN=gBOlp8Me-~ zv>~vxE;K(FyC%wPoukWy#QAuNl<}(^>S`aeJDV|P#@onLP)($sO`=JL2eQ1Qc=;L4 zBtJnaNuh;jgGbwFYg+AASgOh^31z5$yjlSi6mBJ7=+mR?XQh}|%FEIpV`lGsH8^Mr zr6rJ}>1ON9m~U|{mUZN%QG1)woKy#+q@uh6?bdx7pf{tk7P-OiFspkII_S0VZKYUS zjydN=44Ng3B1aZXrppOd#qKQX$o#LNttdq~s-Hj}9dX#~YdZ&ac)WhpPqp~0)50@u zPbXU*Ti3ch+OyWG(V6tdk!0OIIUwsFSvTvMS+k#-ow>&0@{;c6<};(DtbR>6wb-Q5 zTg_kc`OHS|-l@%p2kB3z-_z2u+3A9PKuR9K*au;W#^Etw^|^vd#f}mud^|s+xFP}! z4U5kTW$MW}dpsVO-R0UgW9YWH4}{^ERkQttbh@>pwdJ)NhL0u^&6K$EabYO|(ErRE z*|25%K7=g|x;rEqK&{W}?gpM8Q=~S_?eW|&9A6WUuZ{UyQA&t0_JR_Qf_lTCgsT%R z!rI8VrJA)i{lYc9$mDBV})b60#wqz+f{}5oh zFRjF^0jSKUeu-`ZRapBNM6g21&+FIZJ5L3JOFVGBD0zDhADeNcqmvQx%VGJTPv9T9 z^@NXm&HoIBI3h#y3H%x*6cJIiK%;#bJ?4eXURnw2HWA&3nMp#z+5IS;;E=b!d_4D6 z!c}nAQqyv`jbyVs$y^_O9W$05YCq>q-o?DV#PHa>$SomqSwbam?>l}A8#bTT_^iC!R4n?Co`%`P9(uU1Vn_g;f8-4V)&*%oU1zHPi4e$v{}DHh!& zk)S5=!FfDHT)Qwjrg|un_x4|dFTz+-rcX!~7BY4F)7;iT2sd=FwEcd^55tEan9bNnnIqGx?Y7n`X=W2E4LZ_X|ZL zzmd6I$hW0fN_xD>+XKFC0HEBz~wHftR293Je$hnvqLmClP^3%`$!1)-R zn|w_6P2S_rSc3)Cn5CcbED1S$yH+{OPIjd@lfodZQq7sxI$Nhx&OaQfiNZ0tIMuYw zp>YZY!r=1y+T~&7UEH_3T8?ryxdmFgMxz;!-wTw&)vd$PLsNzx?Q?f-^0@s@o4VNn zo=%$u!t;VE;gZwIREGKqxutQgPF9VubiLhMyFM15kG%Mht;>H%zk}+a^_<&39poGyTg|=nZC8f|St43ZhwIy}PBfx2YPFqzd`{sK zp?Xg!aw(ouI?*Uef^rA?kwlOqn*15jj2 zYF2in|F#6Dq(9U0)UwR(dd64dp3>3&;+jPl>0kBT8qa-LciYJFrW8p=NM~^4YlwWi z?e=Ajjkou1e7m=ARs-kt#t8jOBCpLLi)HY$e{i$B$H4RA(9xHctjzN!o;)Gp6%B@6 zU)Gf{X|400VFPZ(>1mgFhC=a0?-Lv;a=)1V#y2^~81@WbAMn5A)m2s*x_H4kfOP%Z z#6p|Zt=9-`)iRSnuGj`;+pk4LNFYM>Yfz^RNC2pePE$@51L}bmlgQIUg{N4Is4jD? zsNYhrt=JgJ{p#o8ZWJn_Sg|{7j4OodigK-3nX9vML6EElQ}%izfW!EXG%$C8jq(Bn%QHd&$)q3~e9VOtQX zU+l1Zm$xelOOI;1xS;ran$e9*|*{x8_alPr6@WjQv%sg>IV zkX_+sOdE7=V64y5W$WjzldgTgL?23X^+pg&D)Y661x|6+?1>nasO zp-q|Y1bGKcu@>HK9KX#zF9Otdvk5xWoH|#Fw3^h@Q@FMNg55mE?q-LsOi;?>x7O)` zK#=nJL%kZU)@e4+3dM&F2BWm>3wQ^T9fC$1VNc zH`df-DpRMsy01a7qO__C6@#XMQ13b*jsbpb9S5BVGEc5huKz}R$B^G0J~F5AKzH&< z5?r%phfjT6YwBofI=`Z=_k`bM?$TxIRs_?_0S}EC_{Lxi01p%>k(BBo`qk{;4^W2Z=?!W!M4rDZ zhv?{~Qz*sdz-RFR_b$#)p~EdLxeeD9Jb>fIG_#PCV*umn8CsiaUd9- zVYR?CLDK(>XsFlfj)=<3ckp<89gGlzA*NK?P}j-fKmz6aBmhWFq9`rrj~!-eFl5r8 z(GXXn>w$pzi~xNol`(@MAeCv62|{&yLL+XpG|95SaF08)6U=yl}q0dk05)hQuW^ruoW z{WnqyM;e1B`#$x%-6_CT?epYUb3( zBKe;Qdwp3R~UqW9Sx7%iQQNo&fe<&3(YLtetLuXbsa(16-SUt9@an%{l z?$r%v=*QdJ^;+#MCr|$5NP8kc2g*o536snQQTsDDIrHp8z&7nm560^zRwAq(6 zyPI|Tgi)*)iJhCyoEiHnms_!y%VoSG5_AmybLl6L!amRr_#avZ%bZ0hnFUTy9ZR#q9r`jAptW3<%E73w}wRi$2R@G0aqD1uVR)o7+zN`HDD z<{k_wq}C_|0tIaCv{@!WX_Q1M5GnxAlt&gA<9^7!So$4|$TBgaM4@7d<+j#I?H@Tm zrT_RG{iidpy>@T|QHNn8hI}YHD%C}+Wh$9ez(3C)W0_~{QwY9EwIeolWUVE)t?Gp zWU})OI&B|(R-bSWQhzB;=CKk%T-ZS;VxymAwbpAwb9tiO*EOt6PW7C{?BOHk9^@W? zoMmKaUJgcNi9ABb($&w>o`-4cb8Av(48})@lKvg{z@?2ud7M1fH$YFr4Bo}n?YLS4 zlnSum$%vriM*#B43LuDlWTK#P0Oh6-W@C@TyX5uZM;l@dHiL0m*wvxcn1<4+`+Dd_ z&k}iX2C8MeR_jb3{kLCvXWQ)ouh~4sSzS>; zvnynTDkIYgQaoFt!Dd;tgvpUz^VwgxaqhiLPC}Ypcg6uQA$kiM(j;&jHc-KmJ~y7N za2jkYJEm>6*-V639ggd!H!rZ7Jr&ui{#3)MkuBNgbSoj5rpBi?k9@hQX$skzm~ODy zNn360!LFq}x7Q|G0E#@je&k3b5+CmAd42towXgN`%#763AlZ+~W0p?Enu-{Y<3=_o zEKw-BH7nU=8&?6W&ydmW@Amc1BxHJb&pTV`qZhwE=eAHVOaMVvbNk$fNq=X@TRWS! zztz<_NS!40(b%Dx>3se=VI<}o*gq?t1*dBoxzbRe^52|S(Iw)<*jdh;+(Jrtg7lxf zwDCCE!d&qYu4up&<(IDj3g?Hpm#Jf8XI>&7pCrriVe&Ea*&SHxG}am(%_U4SH&`CN z3Q7yB>ioCsou;EiPSlNNOS^e^r`zKq#O$zd9d2A`Gq@|C5e_vp+&gRAv*lN70`xwSoJ&mXY?Mbt7|m_n;|r3e;^PX?&ggQ@b-*bH zj3B6ZpRg+d?-sO8Hk`)!89uAm-kdN0mgHI%bz& zyB{(+m7E0qJZa(Hc53XY2Oi+`oMJwI?6bdceuMjKv2L=Wb%+&Pr3o3wMJTJmFc=Ka zv{+m(ly%kDofsZGQdiqbQQQM#zptBxMl;BKo7K4FP$1}w)P(OGd{v?yN`4Jfn6`G4Kj+@dm}9$2qzI4{6C`wO|E^QCmcNsM_c} z!w6Y)#V;5kFTpzH{)=%gA?=yWi`%xnkjbFa-IW6Q>O@RHAeW9^q6OB);Z-`>`L1WHzWef{CS zw%)__b?wl*4m0a|2ll~-ePB`5q1}-p#_VKFD}=*~nKQM3a9^i?Rx@le*?JnAPEDg{ zWdB+-VzYVuR@V+ARwe~94Gf66sR7kjrE|#N}jvy!;aQr zy?U{qMI>Kv#3;vAa8|T4JQ!~AB4uL|#8|-(nja@;7l}Dt^IY+jUPLjT;~3RIm1#RW zqxa$VdyAizOcbr<8aJ&)y#J|g4DC6LrqQhPis4#>YOt*-F&hwyvJqM6HiUE zR3?;b)KeRxv+RbNYQM+sv~8I_eXG}#kXE`4(mGc-T`dy>uBksU`)0q-YcV@_&YFQd zO|;{7n6zMR1YV~S?h33hY*lbka0656F=Q7Zpkc8S@)Y_1iu>~TsEXy^)7>+ZKmtkF10jTv1ri7| z$z)+CJApvhLS$!10tsY+>?_$>1O*iY1r<>Q1Q8SvaRE`eDu|-E;Q|QaiVK2QFUjP6 zdzs0EtH1Z&AMf*+PdKOR)TyejuI{et?$b@MAbIq{b7+U-Kn*}Vk# z_=*L4Tik;)Kr{=w{+_7Za$63uHwWqY*z=t`J^R@A`_{R1+V$|>PA>QCWo*QV<`%Sw z3@$gC#gX`*b6x;Si$IC@Cg94OA^s&0op1f-b8O0x^98{Y&@ZgnWcBCt0(rjW)t1BL zC2KsLKx6PWUoTjPx8a3hS1B$v-=>5bULq&XTVK`dtta@hm`(K0mJ=O1(6KGu@Xm{P zCkF4JjmhK-K>HK4!{jlnNNs+I*0&V1HJe#=`9saCt2S$tOx0at4?8imFns-i4dwpv zEnPi(0rQ}Gv0pmIW)OA4hZT($cVkg+=(CfL?zUd5lF~z6MOCCCw9k8XSbfHd(>ckh#uD-7Uh7RmmYr+0MgQm+JsL|C$KKsX0C-y zb)XiP&pvVy&bqt^TQvK2j_JaODpKnP_8+kMS1hpT)!WJNNJr4m6zzpv8@hF1R_mgh%ENOS>=JJi^4!!6Bqe zV45j(S#(i5SeX(%0PoGJl5z`NE>D@EbXk0fd0bkt? zbt2;fVm*8H#R-#7=C!!d->}(~|KQFYv44d1vo*cFxf6>Ag70?ka80dGCvVfs%1$0nVcfh_3j31|jJcIOCe zwBiEWQ(&*N&d~K0*ocrh_8-7*JqC>rGV}nAK^$AlZMiOZmRS9C+4?sC?OMAllDd6`4K1e?naP$=%PV@qvY@O(U zZy<}t@SM-Jrtsd-5I`-!2GmFejfw4OEYcq5GPd$~0?`hzr*hhKiS@bGvVVr#`dS-x zy5#pupe~|VeJh|?1rMIH)<)Q&&35SQ(l8s;;Ae-9bvdnT7#K!w~AI%hVQ@1!e0TmjN&^b$Z&Lz|zxslL}n^Aziz-y|3LwQX#kc^i&RN5cchqTIh-*J6N zTjlzY(D_%3Y*76zJ9NQSKGX9Sw8Y0C1`1z9jzRp7T0)?08=#>5dJXjO^>44EzP)>l z@LSb3a^VrsbzoXhd;P@p^&H$4LmD=zM82e~8Q<`HiLncznN*S6IYnGE63X)>q4QgK zzN)Ojyx&q#-fli({RnU|rXXm*Cx;!*(H<4{CTgdl3-EdEJX?5q zodmv{pfB61+1gn_|D-jPk4=ESglB2LSii903tQlD*!WCt3$hL4HZEJXd4b%&msk;jq#HTpJRbYePck_v6}-P~J~U8Vlj&@P3lRD}|OM zJRPTa@V4Nvb)ATjhoLPP@DDL^sM<`}9+k9fLow;{kMT3ULU8Ho?B?Fy_Ob5Q=(DKi zAHwR+eLDAv@@iKRdJXR0$J9OewQ{ae5j_j-XFwagj^8cHTl5|RE_;tT8N5{P>{Be) zIrK&7KSq=*d_~ZhjrM?-tc3ISlEdNc1=+DhX-Q~CFfT2%)Vd9Z3ixPL&?vK>5dPJd zfG;vs=mInYq9G?z)^bR|PXoT(Pz87g1>d2;PZPf6WJ4w39TogOoSeb`;apll#~lGW zr}MF(QwutK1-!SyN1F!h+klsFIB8V)A8@#K6m*7jImtaDPvuu%vPlom)4-)idLQ0i zAkv6Xk>>4$S)>gjU4V3?NMk$|&q0gNz2be01R%X1X>XARb43pUF`gqTL3$&eCyO)& zbCF(;^k|WG6X_+8(gM%ffS8FtKaS_v(FbV{kxoXMd&&mxDdYJl`}5u+4ISv#i|3vq zE#z_XLFqsqXwTC60hw)k4acu&Ba;RUwhq?sRecNGF zUR-+43pQTX!H#R-(o5X3AG!46WOM=DB5}KOsIU#8Ti6EIp{5tQJq)Rg98M|)e706g z($L0~Aa{t0#O}2Yvz>plJSgX)%yeL8cuaOcK#23JbZT7Of%!9@j*Xo*EhhT0iQ$DtuaM3i zUC6SLX-Y}v3vwC_ zx=%^Et$&eqz`L{V-dXqWth<7a;8QF3+*y}JWG?DDB6HBcvu?*j4(xXiIqSvqJL~o~ z=#59y27bN=-d}}uyh!_q^iI4VsQ(@LKzj}gpMNtNSa;SjwPzQo1Ouz>^dHoy|N4dp zBOd>_#NK}PPg?N+zkdDv{EBD%ADsH*@c9nb#HIaDr~W|pIDBJ{Z|oqeS>*7}`Y$w~ zzX(f#29RTmfk&S&@bVP(A0-dy>_V=sXYJYx5wuPND^>qGX!|*&d0UbzX_d;PwUZ@*}nU>}fszSc>!Odf#m6EzYx$?Zba$UEN_?H7Mrc~tW? zH}STp?|ig`*LVK5%M8dSv3U1=6H zFOkOEmVvxpN-gnzQfkQpEpf>`gqC>ClUlOe7@Y%Yn!mXe5eo=YO@ z3i|!7LK4xk3XMyT!lH4Fr$ZMw3jsEMN$5hI5`gq&krvOL`hW+X!&WR>H^R$R;OYMZ zIPkd6-VX;v^H(lH1fNg90oJcyx-8lYUlxm=${2+ zJHdJ-K22`DMP@4eZw&hszJVtH))Q@av!IR|ps**j{zS$B{wH!7Vc2TNp75PP(SFHU zXU87TXXO;_qlVY**yF9dW^hS1I1jaBC$;`g_(-ReoJ%NL7n<}7w;-Vxr*R595Haa0 z^ip@|2WtrBTl{$gO6*QO@t#cRiDttg;qATTw$08hySa;9wziy1#4KWk9s8smd!lBK zQ=OCTw6j`ok|Z0pQ%5^?)~`->+Wqa=xvl5P6dSgIdl)j`x$nDiY;F(s7CR8_JzDJl z%D1iM4&W`fL#~flpA#`=fNFCybsuPa{B0Gea(_;?xm-)=?Esf;G`A*$vYJ?JKn&{%m-2$=!7d=uUz)$g!M`6@vVOf>*KVaH@soT9{(4& zCy`TOeG)sV^?PA`GN-@Tv6FuIgJW|&3hU$8+`6ij^*QL%;H2IPAahpq7NsDyDtcj2 zN707^UZT5OXjS-yhGl32xF5*j*IVJ8Y5T#ANBXwNI#c(kD7Djob3ZbG`;nZ!9_u@S zzr?y-ME15`18oLsDEx8jV@u3lm)90Wg`_>ZO8M2t;13uNj+YH{z zaCu%7Z3f4`?#yiXD7C*xp2EK%+6>Vv(d27rGhj!YI(ly9?@eg^mCQwHUKiAPn<23$ ze97AkPW!ks{4Iq&9$o?ON4PA{h&DrFkN=&w8A5XBI6HPy>%Ykag*}z`$%=N;RbIwI zFHUn6HZQfcWEt0+qkcr2!Fiz6%4CZnwZwbRQ%>Cb`BZ3Qw$>oSNNH_4@Lbhg+=VwJ#EL&)SVU z=<7k}5a9hpFK1NnM+KbsaxqTF?D!?RYb1f=1K!WD9B_$WqC48kX9a<;ZxFitN$Ijk z=o0vP+2_JXr!Hu=w1-I6xgD6acp7D zqIOAqu1kqOSF0Dgl+U;*HQPi%r*6sQR{dabJUW<6`f^PI=^nHl z7rC^2L5fd>!# zy?`I!_~hw!e7T%b9`5^b`K3G@KhP0h`%2K^_&6UObQHe!rGOs>9DBZ<4o&k*g732xf?ayBg$9b%LQHfeh(x0xk zzW3q-5ByK3hO+^iYil<*f7y0!c!Twt!^z>a^*lIF(I2p#{EBaFy5q~5J5GN6FV0hb zeewKx@_LbN2fiZfAEe`?<#GeQ<&&&`=q7J|`|ZuQ5AbdH7Ff1LMUMQpwIJBfKRs&L zV!6xR|2NL0wFP;GHE5mM=5=|Vu_U>|dW*i++;sHlF}iOv`Djba1Dji>(|wp{K)vCX z@;C34S^uIhHsAB|%g4yY%_R6C>&VU4{^TN`o4`9Kz^5Ddi1mipnE>;hO2uH;(_!5a zaku{Z;-NzhtB=SSaX$BEz5=Ng^;1^_w*|DYgLb4O=j4G8o+P&aoK`Usb`G`O?|0YcSx`*+lqLFOt6Qu3~ z>tSj=@*$~x>}$&QF-p5^W=<{s^z-JwHnW!Bs5=4)qN(v3PA6W_!AYf@maqzT@MZG& ztJddx<2#dmuaYNU`0AL;X12fi*M$q2d-K%I)}@#WAmW@_%&fG@A_?%43L|X)Bc$kp z^_2CU&q&eE=d4AalUNe-skN9sx*1#2zDY;4G;PMhfP|J~6rHo&{~a?Q|3GcWy=2;F z){EA!KPJ<59ky=#6#HZae~hi@FK;F%t@-$zZ}etsKeEl5fx!WkDtb$P@*c(%))KXX>ek(UU97Ss_}1WC?I8SzRaoVSl*2z}PkUkCjSHkd8BQjXW8?$!1MN+t z=s221Yw0aEnC)a=>!#>3benb0=+5bW)jR8b^y@I%8Dm&!_yFHFRXH7Wy6D{Bd5rUP z=VzQRbqMLu*x^D)(lM{&?v5XK{K>_`#psgZQt9%E%lEE>UAMX(>!k0rxKnv&=gvzy zS9Lz_7UH(m?R)na_hM{0J=o*>E)iX>be+<5b=R-DHg_xPcBXrD_s!j}_2}ATc8|q9 zj`!@{v!v(7p3n7i>y^{1uGi*XPxLy_>$P5=dNNOU&%T~Po{^r3o)*ta&rP2DJzw&C z%k!pZb8lVmgx;%rZ|(g|?_c`F^qJ9TNuT;Y>-+5Q^Io48FAuM9uSTzjy$*Vv_4>)H zrLSw>K7E7xzT^Fz_X+Rw-aqy0-0x8zSD!GSYM;k_F7z+NiLkc@bR95gK>CfT;oV0+s|k9dIPzWWd>g3jx;xZUk5Zdj|#th6auf z93MC%aA9CxU`61{z@G#EGte-wd|=bS`vyKTaL>TQ1792X?!Zq6ULB+javjubkRNuE z898X&p!I{c4%!u@5Aq1=8#E{=JSaM7V$jT>%%J5#)j?~5HU&K%^i0svpf`fv3%U^W zO>kLoL-4)94+lRL{9N!W!EXhh556>b%;52ZQwJ{?ymWBM;JU%<25%mGWk~9f1w)n& zDH(ET$cZ6mhI}&Q%8;Lj{1u`LnG!NLBs-)yq%P#1kcUEchP)N>VaQh@KZM*Gstt7+ z+GA+{p@WC68@gra6UGk4Zblzturb0IW1M81Y0NM#GafX)VtmW^q46u@_r{y1P}68r zlIc;?9@Amd3DarQ$EM4s>!!a#b)jydeL@3p%+}b@l+g6h$3pjoeiHgKz8;+)RuZ-* z?6I(S!bE3<_MRF;Sn(rQzLRC-iY`# z;zp!CvVUY{1N|7nEhh>Q_Gj_flsZ{&NUd`C?kbfmf8xf8Cnx?oDQZ&Vq>oYtro23P z=H&BJ22PnirD2M7YU$M5)9#u+bo!0dd8sF6&>7=q6wO#YozZb-r;%8^JC{vo1Z>^%lz*aJZSN-tg-yO@UDd)rjJYCk^b+D)Qnvje`b!zeEJ`M zM>5~f{4%q7k#SMY;=YTk7hlO5m~|%m-6cDhCM|to={rlWw_sgG=zcl~u{42}M%a$x_SoYAeUzR&9@3%ZDl9rw^hI${aa8f#;*G@zOWaC&mh>wLE{Q0KDVbO@vm~=* zc}aE2nv#trkCp5#d7lCveBm0T_PrQ~+0p|ne>cj=(g@Y1oR6G~^4T1sG9HcNMJzlo2?5DDSm(%i2 z<(}pK<)-qH<%#91%I`0KwEXGvBjqQ{&z4^(zgB*uyrsgqqH9IJilB;!is*{uinNM_ z6}c5<6^#|^D;}Ta&zSqmHR7?RK8ky zs`6~*$CY1IUaS1M@{dYum7%J0RgWs~s-UWfs_3fZsBgRn=8ZRrgeFs@h() zx9UjMYgK2eKCQZ1J*PUOdRcW@b#3*!>MhmVt9Mr)s6JZ#M)jHM^VMHeU#JHYuQulVYuGYR)4DgT>a(xU+bG2oEy3|_%sY|h-!#yNNJeeu&AM+p}JvB z!^Vau8=h}C-f+6%lZGn|zck!#bZYF{*sn3DF`_ZLabn|LjTw#kjWvz;Ha^_Av+-c# z%Z+a}o^QO=_*3JbO}ZxcCa9QXudhXLsVIFcl_KZ5N&C?F!EVyKxWR{41K+@o?6=>VEXYq6`{EID!A@^&iwGXi` z(rL_5?#KC(&uHr~D)20JEI0&OMx25Zr)|fsIqz$!pf*8!30#)JKiNd;$w+umUc?=v zXhxh8;Dg!e6s#Ig(sqN&F|2Jnh&i4IwB^`OYdgHCY^_FHOsCj8m*Q~_&{zf${M z1RTPY02%i1iP{!j@o7^?k6dWm-Jur!86?WKPt| zpuJ-4Nvw=JialJO)n;NQu?+L_&tPS399NZ)rbX!9oRdgFEDpttzNM9 z6lTDx@e9S=)^6yz6#LoLf#M5z&jT9ViTg-x4X!)2Ea>EE*g+l2a2W7+;%+%sA$^Z> z2+-;=_plSzG#}S$Sa%>SZ#m>mMJ<>N+j|G2CF!Vfy>TB8noB@!BEAM$jyE==ew1L< zYBPDAj>Nfyhmd|oSR>Ebeyp?{g74`Ikf&U&2y)B;e={V%U%LX%0a#ys2-wfV_BTS- z7~ohyI}=zA#kTaRJnz7}H`M^WPYK(9cwA^Yqtbuwyr8cKH*>}W0Q z>?o}E7_8!Dw8D8JRRKr}-sJ5DucHg0pM9`ePVX?_J3#9mSkeyI%Tj149`v>etL3fw zdep-_q+WmqdHw$oJmO$I2e75_Mrd&gX7JVsD%{R_t?G^Tawgv3+8>4bwI4Ob1OEe4 z{PFhw2%gVEJAY6-V3pu&&(jd`Vlz*h1CYPeQ^-st2 z-`;b+4u7pTT7P#;LrUI`ofcAXN%)`Edc9qmpYVC--&&OQKmCnD4s3r_(EXFh#ZKrs zOXP#+iT@pjhPj@2P28__X+4i$OY3cv{`W#J{2g8|d9Jxur4CZKAK<*O3duk4(*NVx@kObGujX|&Z{^O=oXAD&K3Nl8R$ zc4}>rJe(KOU>}VM$(z|W%@7rHvN+R&In_y19QU%%y+YO?0)t*JAkw8-ezan z2kc{=ziudIPE&LXb@{qd-AdhA-6y)sx?6gpch`6ICf;Mbo4kMX{@wepeiMCMe7gI1 z`}q0<_!tq@5BC}A6X#RpQ{(fb&n};R{q=nRkp8~?1Nw*bpXW<`4Zd!^U3|Oy_V)Gm z4fY-9yTCWo|D8Wxx3-E=e+_x?LBBK{ZP;vBc_n(K`^k3lEb{Ol+Dql(6!P#X@}SxC z(1<+T%N}4)u!A^({2g`-bXsO-3tCDIAP z(oTTT7x=q|zn`_6Fuem5boihAM-L%|zqWt!89hcn#*?2VoIww468=i?w*`NgEzrS} z(2d65RF1>{*UggHx*57zx-1>?c;`PI*Xe)#(~bWxkPSd!JxkG7PlZ>t1?@@^dejHe zbIeCCz5w;xg7&!)?OqewzlYFb-;LI0o3@C|f=_ZUdeTQ|A^OZ|+H};VRJ5=UV^z`p zS}a+jt)@lV17sfNaaO=aW5{f*tlp|+kVU9>aU=&GOFY_*d1y@@MGw9U-%9S(#-Z&w zjF$a5^zd(>{eKNL_C@rbpP-%p2tD`5=)phNenemVz4n9l2l}STglH{VD=}c>8|>gr zI>N8(Ogu;zj4JkpXXc|N!~ZrDBMHSQV;I&)N0D(PjwFyoGEJLAQne=t#reoq?McG4 zJ;YhtM_gzzEzzDP9kgeNtG1tX(vFbs=waNnL!_&Al=RdN5_jz-;;9`Yy=W;d(_SXM zwO5Ik_6kle{TK1m-Xy--3F57tA_KJ7NiNAF`Py5=UwfMbpznK!1R`=6q@5-MwRcI7 zb{5|Ldt|V74!-02WC$(SULZX%E*hzwCnoJP5`mavg7y^|p5UyH3VyzmR0@cQRS~H<=DU=RYJ3 zUfE3THn|J)-=AV``6t9mPZ7Pgn>cA-kYVtaGRS@lM4C~{g{42 zDoGXnie99bNHx7oza}f_6?&D_&~HdBy+-QjxAZ$wProM(q>=tWf22Rr>-1+FhWac0 zjoeFrr#H!ZdV_2r_o2mlfNVre^dP-OHj^zZh~6gKXft_)woof=MdSS_eC_RwvS7x@ z<4i|(Fg-JnCzum?lI$c;F=y=0ybJAfN9Mv@Str(+xslz>oq4b>fSqR{Y$&-zE|agx6=q~6@;kZ7K4TZyhwKA*dw-CBlRw!<>|^p5`44*+{X-}V zW5?MEa+@@>PuQoVg;?2Z>?HaajS@;JqdI10;p}zx28+N6&X?KO>`iux>ZyVKi@imi zs59-r&e4w4g}uk#XHjez8%|wmCw7rtqMfN5`<#7&p6DI+CHsoHvv?NA5?CS|$CB81 z>Os4()9ehJ&lb?Gv>UUqh3pEuO1rak_6^Hmnc6KfRl7k_w3}p#_7|Cf);616qb7LL zxmq3#g?GISp8N*HD48@&OJ}dr2pX+bA0@3#2Xt6Wr@=q?fcLOE-e2=JPTmuY`X5sF z6#LP?qwX1ct54Ov4(ltrsC&KEn?$L5tciwHl8%!$o|LHj4w@@Dq3&Ib{dUhZ@vfQzSC)IuDHre3wyVA3WrFfpp;pt_g{mVjY=z}QA zsF@HS_@Ir;f=B6twlW{BVm_YcBQ+8K=ZU+?crPDqXeRtI{$@1VQXbjl173*qKtZtx z915Wc3{Zndw!kO`y%4;W56yFEK7NZ)4w=}8lT$Ck8$$(k8{b?3&&SvBe=p5cyt^3u zbI^hpAi@bjF8FT*+T0ZU5^?AFrY#pk0CjxR5yvsF?XhOy9iGQ*A(f8b~YX8n)Jik*Xp`r}9O>qm5AOoKc&drbm$i+XdCpu{znV;gU4U4neT z#ll~^e+(WHYK5QX4}U)pmO2RI3w#V}2&5Y-YCtGT)r@)&0gpcl-t%x+&`1$U#G@}y zMBkkx`l)2ph)J+8K0Yv2cm}D^MVfXOqOe);Hs-*S=nBugJ6egJSS{d*2(1ss7k<-j zXuoTZz#F)!-9q2K0baoYL}Wi`*U_I*tR!Tb6MT^-_$N=HC)1(7dIdX8JfZo*hj2q% z)EDC+d$rdQQ^jcR!iVUHDB}nsnK#fL9Y(K=v(q%R0N8zFEIf||+N)o#2nSVe1}uv@vg?@9%{_q_Z!9NH@{JvQGkPIY);5*z0-{Dy@ z7^93KWTc=@O$AQUW9K{iBYLbqzX2_2cGTI+9#x%@EEU_)M2z2UI}R;E6FOdnyevr!{=Fx z@z{GXa<`uQB-&$bUJ3U}$V22|vXyMpwvtDrH-boU2YEvJA7mHVP4-}1cQ4sTp221W z&mxLENS?F#9@6u$`5oj{a-5tXuaT4Fb@B#zlbj;|B5#qm$vfmUIU{`z>1~h?$$9b- z`51>Deo8(g7s%)23-TrTid?jLAml3f2IJ1(lJCg(r7M)G!(7ALT#aa_;p$lm`&7hfd5nYUx1KD&5T}pFkF4k1#(`9ryEue+8i2L-k zRNF_(XgRH*m9&ah!*8y^N`pFDPa9|>ZNjdBtLSRFhTcur(slG6dM{m1H_-d&{qzC4 zk#3?7(#>=WeTY6xx6*C&5&9^7jBclo(;f5)`Xt>+pQ5|yZn}p)P507$^clLJK8twp zAbpNLPY=<<^ay={9;G-+n!bb)hF9pT^f*01U!y1K>+}u!COw5whqn;3ze7*cGxS}0 zmcB>dr|0Mg^h0`Hdf$lWKcg4u=kyDVP6#iY`{3LIr{BtO`B~#Hbyuo#s_fd88a{^c+~B@Y3WO|uB;pD&V6Rq3;uF%*2m^4GasC! z=Zn!1KjzN@SRfn72C*Rc>x1Dt!(-;2vhaktr^~%u;l;8MY$O}SMzb+&EQ@9_EEc|N ze4F>mCa`2SkxgPLY%-g|rm|^lI!k3USQ@*F&1AFKY&M6@W%IaqD?HnD?$NSEY%$AX z*=z}0%5qpP%VYU$8C%W@SRpH7#jJ#tvNBfADp)0}V%2N~t6{aQj@7dU*2tRJO16rv zW@|9gvlind_h6)CJ=?(U!>G>#Y$Mym9%P%@7WNQ(m~Ca-*dy#w_88la5uqIzPk9on zRi0wI*lxCmJ{T8g@R&fv z0Py{}-!D9R?z!_Ag8TjFFw*oP;t#|YJdO}C2lxEp_47zT#0~K5;niPb-?H!6_v{DP zrHczPGxKsRc^TR1`oi?AQj0DpyTD>tmRXotWHIDs=M@)a8VWPh^Ybzc(F+SQOER6J zb1mrw`FT#!`HS=OGM9FYF38SXY)J>AQ*^wzy2Peu7o->GF3QO)GsLA^fQ?H*zNN?z zFK8Iz1+Zg$Mt+ecJv}q8$SGb@W$}57o#G{Bm&7(Qu8HaSxw#gJtQ)t`Qs6SK?WrM2 zik75AOOgZ(Nk!Q?8JRBQ+b~@xIM6X93vms}LK@d(hex_(h-R21pHGs{CpkPfqzI1s zl&t)MyiO@u#ZW;(ac+*KxX39*O5u`Hm}4o-k|d_8i=;Hwk+0 zM#X|raXUuED!aw1yo*J77t8W4m1IlX$aW~Tmv^Zw@A9^XhH_cn72>sy750*G#@)iq zocvN32cc=wc$&RLpOc@rxX`IMFWVRz7Ada@VI2e^ATR77Xo))^6Tj1C<>xOIsA0wg zeNld1eqkq@SR&QIl9ykUnUk4q(I;9^#r4y(c+-Kd=EN{%okN0^d7 zOz{a*a)l{=s(j61ieH%G6Q=ltDL!F}Pne=_R&>pZo>|c|EBVceo>|FnR&>pZu36DF zE4pSSzgf`_SM);SoDoXS2t_|a z(T`B{Bb1yGO3nx+XN2M#spO1Q{2~>KT72* zO64m`@rzRYq7=U<#V<8 z_|XbKTJeomdW}~6qZR)cg&(8vW7P8)+jFIt7{xb6@r_maj8*T)D*CbN{Wx`vSMtRx z`4SYp1gYn6qkJB&>T|eJ(hXPjI@~Db3OCApgc~KFaHFIjZj^F{8ztRvqm8bVH{2-Y z4L3?Xha07y!%c!OcG$FCg?vbha>P~0fvdUS1O9B~f0+$xD*FjF*=Q@{%ks6Xj)+yrjs>6nU8{FVo~@y1b;y%M5u* zlb5^XWv0B$5|>UQKQ=AM+}Shh441mF5UxW9yDQcq!%~=S$uG;cNL;&4UDC@7vU75> z(~GjxU2XTAu3@rZq@NIC$tiM6%q+-7Ft9MEP(Ve~XpEL7Y*Hp}QYLN+l~o4|qFwSV z%km403i6j_Wr}w~Rd*I@lGP*BB&$cLNmdV2M5rrwBXDhFWQquLkzs;>ImZ`eiJK1G z9Ts;k(V}S8y-caLr}8TC-)T-(&;!h;*me^vn!s$s$rN@ea&n zU?bq}@pmxm9N&(COOnHTNe)6L-T7Wp`}Zcb$?THiKqJM0M#>#DTvFOz>YUP^UaA9) zR0kTVchKmZ+Wx(nZMd!~*pmv80id{dRneb(5Zm^KyF=`!9tyu!-pgQ6Qcw|}d?4dO zaqrd^6)HsO?kdDKYVHp4p?sJqs*AYya10m)#N8oal;|=%6!)$wbW{)8=(ws_Qao@= zvb)J5?d}j+3V?@WcqwVg7*jrVj4;*Hw(NA^A*Y1OxKqC47*Gk7W!h?mZkMQ1!Hps*4Os1>Akw9oEumTDwXz&EYi}u}X3>3KjRRDo#}o+H~7d z1grwwrCnsJUUMjScZWDuJau=7T?L@?%=T6+16lFNtu2}rX_r}TLU)Yj(S(RZT;pXJ zAQEVOB(y^k?>Kne0%d3Okq?r>^iVcg+%7!Vnk#T%n3ysB$W$gJd*^$)3&}rs#($=QK>wGppxjMaQh1Q?tSkSMYEJ z54XXUa~!Um*Kp;uhAX+l<2z)Qr6YJnk1RB3N|2mFr8{hjmlEJAa%75^?j){)qbb1_ zMkzPftlUv^xRN+rX(m#oBbB6)O0q~L*)S#9FqQLYX5eBvtdiK~zUSCLO#MLtdOF`_(iRs3TV{}{zzhIx3d_{S*zF^azo`|w=x zk5T+%6n~q&%kU3)ihqpaFYV0~FYOIi#b1VrNUQwED*myGzYH7kT;)Gj@sCyfV-^2c zmH$|k|5(Le+OsKM+B2>y|I(h3R{59qjI`n}?HOs6e;KYKt@um(Mq2Tgc8#>+FYOv> zm46xLn&M@ci>u1Nv~#3Y{-vEGt@1DJ9BGw*Y3E3*{7XAWTIFBbInpZs(#}os(#~;J z`ImN$w90RS;+LTEEA1T5MSeq#wsvHg;A=Dw6Ztlpr5)p{>Wj2Pq-A|DOFJ`~rJdm_ z_~0t|7|pW$a8>nG%7OZ%o)1&_!&F+@72a3%SK6P^9Hsb0DLN5CzmU^*Rq_oJdI4O$ zAEojWrSc=~3-Tzqv?rw1d(u9Uw&g>$ld@hxZiOc!X=At?RtzJt8Y!l{5S7L6p5i_Qbiy78cm^FP8^Q=!`-n0&Lt-CO5 z%I8~mW9IcK%%Rp}26eBPL%k0(sC)+YdCbLPbuH#p+D-O41aZh05$Z{EY| z*>tf+A_waue#AsV`3jkPwvV@=B+SP$BO6`=-gC2_`@ln1aLr8m}QcwueHPOK|2 zW6dj8k6>l%a57FiPR3*9$62iVn2Z%D)3AE>JXU+m)IPyX`U;!}Q;T(|pToikR?Sez w*G>HIr_oXjC~?EfF0$lcbqu}%z%wQ+u`|8}&?w)%0ju?3nb^{jRg4g$b|GS^xnX|LA)938Wnb}#Tl~Ro% zex(NI<_((De%e<`nZ8QtUj~mFId;yaJ2xrS_lQyteKvUP_?%mgJsGc5gQ-e|{XTMR z_w?&KPPhiQTk&Q3*m2q8cC}ewpX)VT=gwL(Sj%)=slwEqUKiH~YGZ(+%@{*PU}P^lIN zm8!qCuxQqd4e#7FN2x#w@!7p_#}~efX<7c-3?M8@W_E7*tW1${bQxs&I9Y3bR+9!)IM7 z<$Ej`!2d^rS-(~Ji|e?tPur(V%LmSQW^apgqWb=40c_Zj<~eb}cDWB5%H zW*8B;&QJxT^F6OKl9YPI-Z%Bk z8yHDW$m${Z37{#kq1*T;g9dOu^N#s|Cq6RkRhTI?+pyIt$~y%)iXb<`wg* z`L}t)ylwttJ~Q8%?^GjVoU3}Mj)W11FItC5`ZTH4dN4j%GlM&kwzK(kbTXto*z9iq;Lo}*?$XRC$KB6SV)Z)z3v zR&_gcHK$f<)Qix6sdu68t52Yxs_oF7YBzMR`Vsn*wz0J-FuDQtOzTFPnCPauDKu8M zhPKh2pj~uVXgVp=x`*xo?WIqLo}v3eb2M?&XOdE_hw0(ak$NO_w4Mt+S1*7r)Qh0! z>E+N%^p()7H8IyW>KmcA=v$z-X=1L|=)0hQ*AGG;)(=A;)sI1+&`&_0(nq1k^f6`3 zjfQ8=(IwBTB!Q%QvQ!Q(V3Y^(WckspbtRMTvv%= zKa$!r-1B_5TK-NTjAD7Fh?pd)be>F8sVdXMov!lnC12&K(@*Ko82p(_91BTFvByZg&94-mvbo%yxD|2EC4L2J4%{Nit?u|9%GXYmNn16E9AAuFy;|L({;sOjF&)s8 z^&I_>-lu;t+MI7Lqwc-yi}c0%ru%O7-RJwK?czIDE@eLH*y{HOcp`d{(C@BiGt z#lI)4W!S8+qOiY(y%+YMuuWm*VO3#A17U&2fx&?>folV|2ksA)1$Kppg*OgQ2=5r4 z8D1Rze0W9p!SECH>eY*`H@@ELdJolmrry8neOT{{dRy!5t@l%eiD(otIpW%g*CHE4 z=0%Q@Q()1Hh8_E zzhUEs2@QueytUzd4WDTEa>Mr;1se5kl-FofqbZH%G`g$NBaNPG^hQ+usOC}0QQe|? zMGcI~k9s@m)2Oc-k83==@e7TQHwkajw8_;?ZfWxOCOaMuoe_Iu?3&nr#BPn<)4X-_-p%uxk7_=p`P}9YH2)>8Yh16mYvXQWfxeTJ3K2W2+O1;fa$I=Omt&cz@!a#Gev_t?RdL z-g;{5TU!6U_2aEyZ2ea2gRM^_g(o#jDoWbWrcaxT+q|D#FF7Z9X!45WtCJr~eldAV z+o-m=ZEtS-W!tj06>Sf;8`18`cHg$!*Y4-`Y3v~1k(r)d$&F%I~x4kKCQP}uzLB~%bz@rRv>s_WY58fB)8?ctN;^MoMcUWt4b!`&_fDUlep~u| z>6ICsGSV{oW(>}FFypt(jLd$SXJ%fJd2{C5nZIYHWcA28JF6gTVb*O~rCHzh$mnr; zkD?wA_1M_6Sa)7fkA04w zo^txVef@pM^nIvr`5C93G3|`I&iJ8UQopYKmh^kQ-~0VO?e|Up#QvT7XZG*ge{lcX z`rqIGiT?lU|4#o;2ecW`bwJMnIRi!vIBUR+0rLmEF<|Y0jRUr4M`g!n&&n>!{(JU6 zvv*{FpA(+bG$%1ZtPe%Scs{g3vqwXEGXLR!DE~7I?pD}vS=#ir*jh-?3+|f%$ zUo7W6qc@D+J-TZ2FQbEF>WyhLX26(5V;&r{eQcw#XN+Av_Tab%<2sJJb=t;P=+ zKY4t~_?yPB9{=3k~0G@Q_5!rTdWPN z>8nYH&l+{slV@$6+;DRC%^{lDioSk#_^=H3w_J(O@TKcpLroDYm z+Br9z^Yiqn)7Q?(n{oS$pJxu5`R2?cvxd)lY}T{0zMFM)cKYm++5eb*sGx1ZxPsLM zWpi51xoXZEbGFS5pF3ynopV2!yMJD{dDG@SGw;Xw7oU5^x#ypI|GA%@``rSypuvK; z1??8}T5#5aYZu(R;ME1|7i=$#Dr{TWvv5k`tilC_OA0S4yrS@-!Y2w}D15E(y~1^c z8ww9CY`idTVba1r3#Tt!vGA#dA1&Oq$hWA+qUnpSTlC_hvZDG$9g1>`mKQx%^i|Oh ziyJQPvUu9!YZt$;c*}YEyo~ckoOi`}&z<-0;wHr_i$6xnYY?f{(I4^U4jMjAJvy&o zX0dv-a7M`@`e7F<|G?~VNow|pk;9YJ@ciMqNh)*9$Pr13-aGwo+qa+(NpG(nd?fF6 zMi#Z~!%tx!IE8&!EqmL8cKi?d&KO8Ss8=0w=uX2~vlcE^4+~vAdr{FswQ5fBj9KdH z!ufM&sFjRF3e|FUbpL_i=qf|E7P}K|>eX7~?a3Gt6oU_qs3i88$m*xG>KFWlJh8Q> z1NL@=L|>aeicOtTG1zMpWt{xf)HZwHxo5{(+O#ls1VfBXTY17sb%4}zS06VvpF?rf zDiB;RZYNgT8OARS2%)iQVw##}CfdZ9Skv6ZnL%c-#}%m~td=I;B$!qv(X=*6rj1E9 zLyR5I7$+o2Yfb75@inTadR0>B#4HkP@+}ltH=BP=1kHJT*=2fQEdeX3>XF zkP(J?od;3^N@NHdb*h`HQDi{2Ygqah;ve*_fhILEWh4c&3#kNeO}FdroYyE zbY*pX3_6EKj6pY=ugurx8?(ujnayU4*=n}gQ70ptP^yjE#~3gj*jP7)MoYSFs%Csz>ObGD5fUu8%vL z$^rNi&4|IK*ZQa#oo|45?j~bz+mv-_+o5NiFE&pSb9>kvv`<7!=+^Xyui*L(;uTPR z887r!EA$L~slJu=9dCM~3HaD-^Z9-Cd<}d}eI0#SzFz*8{rZjJ^jib?{CL=CD%XGf3CmSzsmoB{|Wyy{+Ipl_&;U*vem!SU&RQf zUD$KP{#|1KMcARlmBju~QnRGEq=cj-iG6R0eM!X+3&ZPL@t zPAXTd&+7U(div3SX7)pzjw-#P1JKtCIXFl#0aD zKZMuk5dX#gm6+FB_;2v9Ql~up%l~-Gf3DZgKi#?VPxTk#U$MW$e?AQ&+l*vvJHhPB@F8ml$P)_@b(;NKJBt+U#RtHnZ8n=n>$sh`a$hi-=jI&^-fGt%b*|tYcP+Zyn~_(y z(sQ~E3A~E8&uaQ#cT(dYKnHaJi?F53AGEW60}ARcG~t>ZJaO z=Hy@K3SXoj@eEp`PCopL$1~q25NH_%1T^JvC5$h(2d6 zJ%W$bM{2NIhlXc^8mj(-p65$7R(*{oWQ&@twyCqwolRBWp?TaX@65B+c620r(X8x2 zQ*s#X%8zJU4xm%{3GK>3G%JVDtsGIy)$iz0R%oMEYQMTv`_wMp%QvXeYNML2%F)AA zn!RSHsWji4eP)+=&%BS6T4z2rYt6ez?oZ4I=40Jl$LapMvF?fPvA6D{PuH0`OZU~S zbt~OTr}7SIh~_d%$Dp%ppemucuM(O2rZdY+z-R&p`g z$P&Gj-sT1RLVXqb$ZPdA`fvI=eG^*A8_-6giPXht*^%eZ3x_@6p-xS#!`>4%GMQCc02J(~ESpF4DRBex0Wu(1Y}YdNA6fA^IO^Ngt+f z`-mQ{AJZfB<2qme6K&}edX#<=ed$wrj2^3>(c|>9`e{90Kc^??=k-MWf}W&b)Mx3J z&_n%8Pth;ysrnUtwtiJl)Bo1z=-2dg{W|ZeH|ROPsb}f8(BUpb_P?zQ^gC#--bF9_ zD4No@^n3avy;iT&>-A^)OI@lr(ihpJH|wqXTm7Bhp?A^;-)(y*dcQuPtMoyANFUM1 z%s`W?&R56K68?&|?5Mh!m+NA4o+&hoOp#e=t}s`carEVHG>@4(%-_wu`hER@{!o9c zKhdA+|LD)@*?*AJtzVe_m0e%)jUf{RQ}5S78?B z8I^CIQduX(+uZvPI(=)d$6SQ@S8x{Y-cUXMfHLgMRC|!GyTCu{nD7z0OYi{r-r8e+ z3x3sEiaHoPVs22atFesdEOP5$7}vv91E2Nx{Zrr-;`A5cds_8m_8}kHmCrL-=0Qv^ zE?4c$$#AvGHD{~lW{ye_)AFCFA%7VUsRUoD>QEQNqp4YgUv+VwI?Z@-x=J#C0lsTg zl7%Ty199K&q$t3CsOn<=fob_{`2U6+)m7i8nwWXGuZyLGwKVvU>-8Qots)lV}E`+|xV6W%OTXZ&UK zQ!RX%s&!qMOciVTsg{30Gu6>}aX>Zky#%&WCI#R|)kM!%U339yvU1{D>a~>tt?Qr* zJ^w4Qt0W9hoA(E3i&xWzk8m%9``|6URV}6c*>-N*nKf-2vhu~Yg9WO-h3{hS+4}sj z$}m5xd>`*iW=^ea$Tt_jmtdBw=Ke_4(qA9mwzD-V-Q2CZx$@zJ$b(;0bKg$x|C2Vi z8~^{!^=-5}8}>hWwzV7Aw{U|D^Yy}w4et@I4+9&o>nV>l;7#K4Ja|}*v0+F$ZTwtW zDzeh%!wag%AMzk1Q{41PI(b&of$XqtS=!`y{2NT&d9yBpCrkqMvV=1Av@hQz6=Ryy zzKggQ!s+l9Q&bDmBY>9bVqUv;OR5BBo4;eDJ0 z=JQVLg1H2_+yQeD=3Eu;8-Tz6zr)?d$jsswm8R*(>#OLwSJD5kqW|xj%+&X#Zg z!EVM7A^+#I24g;Xcq8e%k>_So&u20gaqpAg{|1kvNA09P1h0da7&ZN-IuY+wGZ%R` zm-0PZwFABM-x*WwCJt{SI|r#u^BHBEi<}!qKY@8>3+icCeKX^lql`v=SA+Ejsu%0ceasd3eS&&&9d+_m+Tv(6O5T;PlTIt6EpFsl(o1{lV6LIQ<{?j$ zY4^S0+p6CB2yNmBbt|5F6;GYcRK3Zw9NJV4@1uVD9o5S>TV;}_CgvXEd6|UYi9S#a z{fW-Bxq-gMsXXUQ_(g&v5TJ zc~C@sD#ARB`6;HwDwVCfVm=EV25IDF7U8a;PvewH7n}FW5CTV#a(1}dl$A%Um{Ol0 zmp(>6@H8ue{EQ@@QLA*AYEORt#t3;fbME2Phf2}Hc2osAQrBm!x{jLusU7c8*E*{s zjQSeuCiDlI>SolSXtiF)sQ)nTKBhirG|0#i*}#|($reuyOkl*As9Q6NWxat;(rt9I znxoqi&o6X4#=LV?scx@3s9W@DGLFw+%$}*TbVtUSomCIstQG1wW6EyGl%9+$Q^|u~ zYNJkPoIg)}%{bGJOnc~_lt>@On=)EOC(>8#*L~IN`V3`P%06*ZnMJ1UUpf$`XC1I*$?LFtigx)ltTgBXmCP ze54-53XgA8i5|^3?E10PSleamzJ?K_f>dW-y=q27o zdvc$?f-$uniC@E5+>XSrXB>Vb<8eFYzJ)RPZSENSP8plu&Di^1#@+WZ&VGO~_Ct)X zA7*ULdLc&E|72ACBqQpl8BagUNcwq3(JwNB{uiU?R~R|}n^E)YjF{hKwEQ-sw3nnpD}v$4w=izrCL`zVGIlO!^t?;P z&wCj`?~^g~_l%-{U>yA;Bk7;?Vg0lIg%R%&#=OT)I)8MsnWvM@*PLv2rY-ud_ND`R zu8yXY>1?`~uBMwwG2NM6OEc*v!(^H)(*tdAFVT;kZu*)tn0xDQ2B7`Rv7>yGXJ>-! zJP$g*VWRWPHzU!vjW%P@q>V!(IKfOrk9U@tj9zf6IonJ#=a}hchOzp>*`~nELB}`G z%s0$^p&wXitcCzx0UEAoG;6U)yEyc1EqS9Qm}0ZUl$fRFeDsaW%!TM3FG8Pp30k~M z(c)c(cF$@St}<7n@4FVwq@C}(0WHH#=4SPVI*i8Q7KMHgJ>YF&7Ei{??Oj; zkGU5;#C_&|^o0+ibNq*S*gRq$ML+R4y=xzu!6(gA=4tbcc^1vc^JoF1R4cS?Pbxq9 z$YivV@1v!&Ga9^S?W#a}*)O3}dy%&Us{>ViG+EQoV)dh!<*XEZ4bA0;Xy5*YCi4}v za!;Uhdm9~NB$~RmXd16YZ+Jag!yC~2orCu5a`m2h9WBF^Xg$Nto8~Q68op!RRaaR( zm|D%cwnpd;4>F&YgpRNclJySsrMIE`n}H568=c|3=0h}uckzx`V?IKg_z`dBkI|id zB09A7vTpHn^M(1+Z1}Tz9rUL=O*z`qU1qo0gGR0rZQOperr(JOYXzj)Ttg>woEmPHPnT~sn-)~td>C4t;oGjNSjTr{I3kSBfw z^29PSui9C7p6hF#<7@rFHNJ%}T{J&6Ej>MQaJ6&)keM@z>kp}MW6m5JIMa#WnXXS~ zIz9!?ESX<8yP&@HBsDWTH6t~x{?M8S9nUj*1cpm00>dR{u5FHgI6(&Tom=^yTbWtD zyhU>ZBPGa&Bj+t$GyvFNqIKbOM@K6Hvs& z+4BpE3zp1Z5|}u*c*gk!_0Oug88}O<0+Z#j$jR0D9+=_e@eDVx8EzezQQ!nHtHzJW zSv611miPx)kU<3s6v$O%LG}Ic0@u3&xA5oIc;gm+M&#UT=l*%N@aNUI3CtId&HQu2 z=es`5cME^Mq+dKq&*vf98_dK zn5nMW+jTdnhhyK{b=N!7bssXb+~-{PsUd&e=hAw*?xhK(W~TLa-RHXY+3xi~_d3Tl zL)Rg9d9M8+$4qy_OLxOdcjK2HN{8z{-Hm6u8_#q%KIv}!(%pEahvF6T-wi+AjZbrnbb;kfZlb^T9u!%KC;Ne%fIik}-^svG|vA=CB0r+eMg^}lzhJY4^JyY70s z@y&MgCEI;2+j%Z4ln>e25#wnhGv=4fx-iP+Y?E0~FzZ4mXLH;Ta@-Jd+zT-QF=4Kde^X|C%}uIo>(8^c^T<$13AJlB1m>pstQpXa(8$qdCajKsSeS-4Ju#5Odv_=DPmmy8h(4G0b)S$#dQ3x$g5^_j#`SJlEYI*Z)DT z`$4YzL5{nuG$(#pX^#I{PKz6qlP)=oDgF$~ach2-6JM?!f3uvH$+eTNEGHdYJK<$H zEtYGigtMIVaV_x}l;hTrET=_d@05I&oBk}Pg=6owNGCtIcH^HbHHmAdMrGwndboDe z^*(R}eBLj1F9 zQ|nnDg|U8GCercMvJv91!&#rjo%LJXIlc?)_%9A61mPqMyQ)>f5XOdK88-~eI$=1R z4MW`7FvOh`hOkZ;;!wh{oP7hOr#TWDGc$tJh&#TsuzBO~OX||Q_3$rwWm|hQeAnGn&Y&i)Ew78=iQ7u z*T0^we?49Qdb8$#bKB;Y z_qM%`U0hH+zi74{<}s9*onqG^v&uUD;q*2AB-w7AGAiX1nqb6lUCEKE<$mL`#&>U1)=c0#1g z<+*gX`;?lKRbM*Lvx*kZoONMi4__1gEVoCJmE+2T99Isw<&@==Q)-qI8B7TT({1lL zZaa5-JXvlzXSqF}EcgA8mE+1M_x+GH(0MMy?b~If_Kc9exQ%RtbjK|xS)7rYBZHlc z)PX583QJr&rygPFUOVy0NX?Frp@8RUM`C59xlg#g*9^Beo8k6iGu*yPhTA8}$mrF; zj^(ZhR@EfA6VvK9%l7%86mR z6J5!b^qhe$Y<)_}8IwJ5Sl+l4`z7o-Y3W(#q^GAQSb33}lX=pcaTg*iPxh2T{fo{h zpLJi(w+Wf@Z9+Tu+XTPWrn5@Yhhe`!sJUi6e64G{*4D4W78J0$KUZikp{WaJ6faO6 z7cN-10L>B$Z_pe$Uo_a&`u18$9IVfFb#@r0!&@IC?;Ur~OQ$DfUfk|b+jMgRjphVd zeQm#CwQIq(PGa?$R_~!7i$yziJG!Eq(0yIbc`>@_#jK*6jpl3;`sU$iy9Y5N(FZMi z3eRlyje&0QErhQ2T?bv~y9Bz=w-S0nXqnI*LNE0dVZYlq8@j=~2i;`s`sUTlk88Eo z?1JtVdQ9l6LRau5uU6lft)$dh9c{k@wyTWocfj@SY)l(gi?(5&qRj!oD%3XH!Dbg5 zF+T%qfwg}RtO2)z8^G0IC14$_LSIu9tB&xQvQ+53@z5>0wS%>Vw~ zv#kNnn)iK_lV9RyYley5sN!N0RMVK9F)8N#wGCoK9K^$Z4@J;h9BX zp7JG41rq>saY;i#E?~YZsTW8GU0rm*Oad)IG|%!)0ngi-@jMz>TQfdW(dbaWLp$tO zZayR>wn|lj#AVDlKh!EwsaA2^QLUP`YT)4m9^T${-6S0I@P|D-GlA7^_Imi89)7D* z3CteaBjF1VzYd<*ioKVA%VYoQDfkyW_D{h-3LXG=dw6Cq6K+ai^>M=G3DmM`p4wE+ zQ|nI3pX;%o?ct|+_(>j~utG<|80?2TR|$jQ2e^D651;Abt(z3rO-GM?GJHb9wF$A9 zUcPaSeQW$qh?F$?fycimcPMu(kI(h+{c8Axsv7=O{wM6LaTDK*_|TH=5#JSbaQP(o zmJVOjHnFq$8ST1h(f6&A4I|XmuS2*r1ZTYSH>?ZgPU_JQA z!(01zT>IBrzU1V8%V%0X?%^Nu@b`N7)gFG8hriClU+Ljjc=%-=zL@X}-FVJ{pYHOL zJ^VNipYP#^)bR1t=axCnv++sfVN2VNuvW35fkNf=;{1K1+p&H)$7VEr=Hb_R`1j!7aNSt`WtV@>WB)|l!*Tam9C%5;Y(b8k%#x%&#PlU({(@9V?QBoRNPRmbL0B4r$P;%ehR*;$G(Gy zPxA0BJ^ZQskM_7}MELdGxEb=FdTx)nQ}E4CaF0B+M+@qCh;ROjhqrFXLwhto2==*r zg@^z46ufo6$#L(EL-P&TuXpTQuWSBM^LMy@O{qWdFFE$jpK1Ph!cEqF^M{(>>)}_Q zf?wsazs|#7>ETy+_+=ixxQ1_moM~R@#IyMv_~|Y`*~5?X@cABoh=K&Z@bIY~zB7C~kO<;Dd{YnKpoXt`FSlx%u#SGw2X6cWj%g{wuINm{P;@RxHPD));$_UC)}+iUuhwbH)Wg?R~Zd?yKM3 zM?yUB?(H@H#2kq^6mtMycE@b@J{vco#!W2w7CK^2l|P=e*GiWs&bHrT58H=fKcedQ zdo4db?!9Flv$>|+TYP~Zu3WHj3zcaLPaTfg=!9iy%x5*_7_-)6|6UExPDwTIbWhr2 z-l%aC^RkD3&ci?9;UBIkms;;c>TvVvj=PxqkSD`q*2LUO-2MuG1MaSlSsAk&)617| zy(ng0%uGx#Z{19dnGiF|bsyq~lJ6z>lZ(BV@5gnonDm&gm>ob;Ov{*POs{=Qt{cVF zi=qC;c=+fOeDwcI%*yD4nER}3gs-T>f6MhI>^ESp2OmYh6a5L4$D<#r&D(Yz zeQ)&Y=vCyUhmXDv`zxbYL@&ei^2OARcKBb2J^38pA$ofB7CcC($$wl@2keDLtizG$|t*$2(uYW6Dk|1UiG(CjI~eH7EnKfv|f zPF+=+mE=}mSI!zsyX&CUORlgsdXZuG5WaQAhTS79PdUK7EBqCfs%>(&r`X&tPo;?u zw>Z4Gxtdjvnw9F-=YZG`VKpjyCkh`c&-PWnV$(|asp4{~@b-HRt@~J)>Q2k^O|<2& z685fyn0_;`h}-lu=z#!@5H`~xVLi&?DwecPBXpa?uV@A z*1Ci6sp2w6c)P2IR?iA=#9s1BU1n`qjc#K*Nceuj_p|PGKXKnn+-C{@rufrJY`ztn zC&cDS%Lk8&{cB=>wb)-{dA{bh{`VF8C&f)Cu^%CBZV{X2a`zW&qZ4kIu5QJWyMLQ>{yNLfmJG z4_U%5v6QdUY#E&;J~*}Xn9QL3N8%PPelGFZU1J`wym~<5`I~j4>xn;ug+_|YdUAKC zwNX38&kUhua<@$QjuOL;61P#}@}T(8QR3E7__>yv7+bse!o=p1?XhXrZZgDuhD|&BcG)<5AT}S$-FV6Qc&TIY z5{Ghe6EC613qM)t3BHa&!ppPq@@#iIFR6>9UG(N#8?AfG-KJtQ+&3GW;lke_beP3&fuSmuD7LKO+1K%(`m6 zV6g4FKxiAO3EkyxcZqFx38lN-%@jUM+~kOx%Y_aU`y6pIMDF$#`-_E6kh{YjDsF}f zKUDZ{g#SkPo5g(_OZi5`wuLD+4t%d+V>m<7I8pqcAub;jS|C2m5I1)Se~0j=S(oYy zag!`*$&$NQn+LI9C2o2-u@!!pxV&B{{S-nRX6=!d&K$IzkDg-hVq@o;SKC~LIyQE0 z`fK4gSy{@?E!KyH;@d(AeW7C`Va=3Uah8NWQ{2q7v)g=SWuKZUZn_HJRrsDV8~&TP z>?zOolrrt<+Sq58S)Q*rZRpX?Q{n^rJ#qJJaXDRLGF|LN+Utqp+eEp0r_hOVccOjD zB#O;8vDqg6Op#KUA~kJ_)U+gVGezPf`J<;uTJEv-e4R;r`1aPOYYINtz2EF!*NSxp zjWO+B*HO%E$HLjYuA4DSoS{zR8`S>HFWbGYdn>!wb$`B-naPejcCYKRl-=uk7CY{& zVjY3q>-tLado^};uj_01>gHX(im`iLKQDV-zbboOe38{L+#?%MN}pX_x_kB+^r`SOsxuKDhey{>hA**hl6?scu3*uAcKayVym zyVo`Q(~MzXnHF}hYrYC(uWNRjVXtfaSiyOQwqLpRldszb$k%N-eARX%eJb|7W``K| zy~bC&+sshDW_wWfhIx$LVA$~*f7$U`k42K*$X*Sp`_$BWJ-`LMQB8a2>qf`EBhMs*1QkY~)=$3q zbtLdplD~r^;1DHB#qPAA#D_Jz+BYJ$IEZx6c%zfA=rN~m3lqva0!x8u^V_k`V*+^!wi z&KGZvD;twVvX&;DuMr>Te1LNe=PEg`b@`RlmSvpwjx|?_+f~Bbd-m>P&Utz|r+s#f z*gE%CvQy9`y-eLpsNsCk_#RRFh%fCm-!E=a_4!tD4{@y22UQZ^BOc=`>J4UtO5=;^ zb-|s%o%&OuUkcqIbYrk8Sf#%b`gL#{`<#3ubd%gI6S_ILk9|(I2;Cao&z=k0gjUE? zyM*ozRt76|rSSWN?icq5g2#i$_4h)nE-{Y;8ENrcMX*9l(jeyE;9hdFo-*OT z^KLSMt}2Y*G4u^ALp|Mua{nJ4K^r^I5wxM#If6%nM?+S@->7jM!C%-l)IEa7g2zI< zHG?)q?qQz|J@2&--M6MU4#EF$dY`ws=DG>K9(-MRaw@o$lM)Vo#z~F_e+vF&?|F}) z&A~cHt*}CAITW-KLk?T!A>NwaxYV+@rDyNE`4F_F;yFTj5_H{pZdE^H?Sk8b+iTq; zy}`qrZrnomY~2d+UbAj|z4yF)U3bCXIYa5kw0T*}y^Y5SPH#C|ceXa3sn#d@!z%@n;&((3gv6?$Ka3R2ld$29pS$YejP05B=3WpIc@!@6TiCt z$#b51TsQtwCZ2W>dcJPCgzCEZ_g&DAVXF^GOP%&+Z-&xR?a;&2@{ckT_pa%2rfeGJ zUZ`IF5d5Lm{T;y_a(`2BlbCM1^12D})p=Sc-$P{|+!EX(ez*_Xs zinb|k>Xe6-Kf#ZKAIrUOg5LzGBQ~zV4V*R}b@EHxdE}V5amy~0m)4E<2(@LKZ}x3e z?ar_^@FPtRh&S&pc8Fs?0QRCepns4z?@{(K8EW#`-Dskj$bL3c*byn+eoN1OE`_F; zot;Wd3H!P5T|9d>8lGWS0^aECOMqYOOhAu}y$R&KzJa~PN;U5@cJup+o*w%Xs77{& z0^G1i0dIYFDWC`Tt=^8Cow^*#J_WekLvKJ!%zVV`d)3hHRe;Nb^ba)sgJ0p&>`ky3-p05I`wM}Uv&tN z3y%zs4E(}5CeV+w2WJYWwQ0%8b=dGQ<8SOsWbfDNlOERX>}9}7?ruh>0*9|1=$(aj znfP3Gmmn9a_m;5xN`&^3*e|;<*6bZ|%w*e?lEU$_6NAn74E^fTKB(Vq+bQtoaLS}J!p2EPh^Mc>cTudU>y?`P>I zaZ@IAvu#cE{Vd&T-{AE9Ed5q|_)h3{p*tj$ox+z3tq`Ag3EeGE?GgJ*vDqhdzl3tY z%3OMhmR5<&AA}wh_lJc4N$6prKMVau=&wSLh|8lwkI7rWr=32j(+_p}l5ymq&5@6V zuC>ya9JF-3m9pfTrKLhQ2Iup@r)*2gbP*9O-G*Ws6? z>+Ktu{IaxE=te7#$uCPcTiHu~S^BNq{Z8n1p*w|^3#}0MyM*o$n@ZvL2|Xw_M}!`= z)MVQ_LQ0+vK21t29d2{Rj1ZbHHsclFO_+)HJw!U*3ch8g(910~XA7Msv`}0w6uLzG zTxzMZDYhl?vCy?r?>`Z`-o9H%v8AO#Hwyhq=w`|1EkeH)8+jM$?ZWRAS}wFg=q{mq zgjNdOC-k7uBSMc_N{VUuqp}1Tq zbcyZ5vD>BX$?+buCMmXbt$m|Y#!y10h7$6(!EXuK(tM$zT0_5sT4U`OTB?$%pY&^y zLzaFibc4{1QWw7xx=C!xgl@L|4eE)dTWvoY>0)VxxZEXlx7bt)zfb6XdA3UIe-L^| z=ubiq3;kK>FG7E{l=^_&B42DvI9up6p+&ZDZq5_B#I|tq#rDR0q~E$7&%4c#tBj7M zT{vUXNw&U;8(Sa8qb2a+#`Z?yk<~U0A6XyCRZG{3_oJhc?7$6d%k*`^DK0( zo7gLSGdrZsL+84eouYq6Ls!C1VT%34!gLt>g0WkhTB+NjZN1#?siv+#PnyO&Rff(| zSEFa`#g1ZU=rh!H^4GxY(V;F;H(DJkd&8mMQn#Q(y-MB6u3$H)JJ=U&m0FGV^fq;u z)t0HdMVEDtoo`2XVSo?#K^O>ta8M6KfJjh3__AsM8j=T%knK^RF=zssf@UC^dodsu zu$w9R>eEyU&=SOh1keg3g4Q4jv;j6>+wy!n&>nOEr-6>xcLJS37tj@S11X?8NLB68 z$DO9qK?WgYVrCIq59)tU&texN@X0J1?27zlDf9vB1$gCWG>OfVD- z1H-`xkPk+JQD8I}1IB`JJTo3lfS-st37iEcgDH%Ar-HM=G;j`>4lWC>Q9oe_b+z$dKp>v3i_m1!N0*9 z!OiM@@B#P`(7#diZq!=v30Ma{#jo|?Kj1U)Irsv62{wRIun~L(z6Rg$Od0W~kEFJM ztzaAYHu#154r~WIz)nyOcJtQV1NH*6DQX{D&i&v3_#RY&AHYHIBRB+p0*ArRjQM{7 zzk(y+C^!a=gA?e#egnUQU~s!eBc?UGl^fs#eh^0gJ^;c&JrDtsa7SI%)M4gnx9gsm z^Xc!M3l@O$g6nlLSOU)H`U0>FTnLtfi-Pa#i!m<&D{ylu=1OoG{1w3+`buyu*M9@o zf$PBy;6`vW&#uCxPUtn{nbp#MDD*|4&k0>G^c$h}KQ|lbIg1`xs2#y>pnh1&oHEqW z?*1bDQ$n{2eO0KP1w*GSlrb~>TA}-d?iK3jlfMw2e^KG)5VKooiWB{}EOu z_I&%MYH?lj0kl#(ulg4bTKGwiQ`w)?@hL35+G8(&3g>@f?dYbx4(K0x)*J@k46?%R z&kx?}wlDFQk5lEqsq9bgv8z@8&CzCBH?B=BEndyD)_0`ZDGo;$ZPV&aNln^4`h4r= zsG~cl*XQc_Y2USukbN!7KU{=T?8Gg2pW}f3i~>JJZ9TQFkSA7sX^W?rHC zmM}FsdfXhLcUY|xX2e#fyu5Df_)<&DUHjhu^r|K`wkKFO4vuf3JaMncU&g3UyH`i( z`#xN^l>QitP|p1Ej_Z$`ddcGh!6%&3l6-&I<`U(4iq>nNr?@1HpHKFQH{%9un=KhO3HkdX)JlRa=+QMb2I zH(fTawk_H=K|hPy#k=91;474!Ew@dI)|Q#EyscoT*_q@EmdYxCi1iCX5|4N+$7fhvQUk-(&>9- zqSG$y+MdVcO^xpJ;_(Sj=_wFIyY46E%Kij(L z=Ep8{iquSMzq}nBw_I0C!jR3WOmB)$>E>8Etc*|LdTQhjKvr@4Q`|vbJv_ zr{<&2hWE%E`Ww8Huya?;*xB$4=?_Byy7l%dhPJ@^4{WjE!)51rnvZO`ue`>5oM(=)K|`^r#{ zN8ajRJ3TwS&EfKsfYWTWRq~?z0Y<&LA_qz9LKh3kQ^sQ^}TuUlF zQcJF-joG<2PFh6G;k?0XuJQfV(3Ks1)IFrPEV2|$lO3hhXMSvRs8(Y0gYBvO_rqK7 zYOmY*@6W2vHE%1a)u+odSwbV7H>b=^_Cya;-`bL9G2v=jV?DX^E4p8W-rJ8(x4K=} zJAZc6t=A`WOAB&KfnKehVczdZ|Bx-ASy|WwQ+iR=euR;q_6~<#Wg6n)>WsZypN#AF zYsWk}4M`<@IYs#}JLfrgC%SQPEYZ0Ag~MC7YRc#@Ja`HxBHxLJl^cw~+&Iwp{goce zPWzC^0f{NS$vsXQys^NA$i-6xV14+n^552U@8PLK)z;3Kjxs+UN_X&WdsD{xZi}l~ zr-?ly3ArzAnUO>2x=#PYbw$~bYG$jQwgUf#JZ3I1nCO&K|(u|K-4(+l{nc0D=OQ?>o^rm=3{uC~jQT%Bge zH@t$5C7oh4@Z0t zVZVfEY3J|w=hFGi-LG;?Os<|yVph5?sb*5d#A~o!;Z3o?=)F? z*iqK;btb3n_gl6{U~5vC)FJ!-^5&Fx95pLUYEm<)Lw@`JE@o&?R|6txA(xWvEk6P?MgdCTZ%?8`SeR`J+%nnQ!k-Ykdd)JWUfAqZ=eFKvk2GWth%hH?V5`S z>RCgP0iC3tB~#B@Q9tY-)w)pILiMZ-^{gHCwuae1aCe38rn~WfwiKNL-(7cyPt~dL zcK*L5Ut8H)XjcKWlvMz2JoT>)@}wVkktqC^(AL5Z)WT(|p8cyB|7N?0T3AnNVS8%f zrSL1Mi>Xoz`yg|!g1<&zgZ;I9E7hC-iv11#I(;4d_4;~vyP6=;evt)l*AyhmngXYG zwns+Y#hO>vTk>~PyZ$vrYGsh&C%gd$rD#!jz%aXIzRsBU2)~m2?}^_2I5?U5<2_)@69-m%ALX_MIQ@ zyuzON_(G?LEtMyn=Y)1T*K?+Zc!|+~l%bs>J63dj3wmY8p+L^l9_WzMp<{=RZC7;Y zSRE7M)OIT9V{O{3YBMKvw$5!G8#)sYCvE_TNkb$t0g3B5-}0PK!9T?LR^sh0mSZlL z)82b4(b9RG`EvG3v}XrShij17tJR@aTR9KGedgjN%$Mw$*nqQFt7)xrTlH(D5_Ts% zp0FZec7ln|j8BedSIm~b#3#308@D0uso3SQ1Dd_p>~VWWUML zPA}jOb(irU#9qkY2;LlV?>>1pT!LRs}`yhKNw9APuAwLI(0Z6F!T4?2Xw6oDTYe zGeAGk9}EE5AO{QtxgZY=0)xRgFdj?U=FgT>L40{?^#pzy_yMT(TAUnSpd#s{CWXchW_J1upC?jE(Vt{ zj#~jP1uMb-CbR>D_7kBUAhZMjqtMzSec8(zIQysD9nbybv7a}+ADQeYPyD=D{p5v@ zx4DnBeL-m75!x4o_64DRtBM(qF99WBDL5ZofKSVqg}D$c2N!{h!Oz6hu4X)e=JGf2 zI|#Ch+-frEQ7|T*1!jW+a2~h--&j$DPn+>+Gd^v`r_JQtF>>x0Id_blJ4RUgK-=GpIfcB8Cv`k67#aYjxW+ry;dJlx*FdYI$b+h@L^@2I(NfDiaV7zluHP!B`^ zt7ECp_^$zINI$9(T9_!%7&HM*K{F7|y%-P+nu9pd0<;A2AOW-jiJ&z|0&PGtXv_2M zKzq;uoCZ2#-wAXET|igR4WxkXtY=Kcmo)fvkU_|qj1saidvo0foDTYeGeAGk9}EE5 zAO{QtxgZY=0)xR2!aWlV1;fB_FaqR*kzf=U4aR`6U>whk2NU2YVom~Qfyv-htL5e} zvYiX&f%)KEumBW-g89rJ2mzWMv$v9?*wbGy9?Y6?g96Lzk~b0{on!cAb5zn$lo*Rnfh2w*AiI~i>&B~ ztcXHZ#8UsdGTQO6imr-PbXBaPt6~*h6|3l~SVdRGD!M9G(N(dEu8LK3+gL@nja78p zSVgyuRdm}}MYoMrblX@(w~bYF+gL@nja78pSVgyuRdiMKYJIGv`<9h-Rjj0|VkKP_ zE9t6ONms#2x(Zg(Rj`t-f|Ya?tfaG}Cm*Zn_OY67AFJv1v6^lltLb*Knr2|W3 zZYQhhcCwmoC#&gpvYKuutLb*Knr2}gm83EIVeg#(haugf`$B}d=@SkrqX}uM+ z-U?c81*_=}vYPH7tLYB1n(iR0=_*-GSIKI+N>=53Jg&V%1(1tM<0BYOjh_dsVF3+s3NB6Rg@h!K%F~ zR_#@>YOjh_dsVF3t76q&6|44sVAWm~tM;}r((|)&uZmHg|9>Q54k2LIMkTCY2u2D#sC}cwv5+(`> z6NQ9{Lc;7p!t6o9>_NipLBi~z9!DWzqL46ANSG+whk2NU2YVom~QfyrPBEnzA+8%zV|fa&0WCSfX&FcnCc3M5Pg z5~czPQ-Or3K*CfYVJeU?6-by0BuoVorVI&FhJ-0Y!jvIl%8)Q+NSMt?n5{^d%}AKd zNSHDtOc@fU3<*<)gegP9lp$ftkT7LPm@*_x84_kM62^~&@t;D%bVI`Ukua4=m`Wr} zB@(6*2~&xLsYJq5B4H|#FqKG{G9*kH5~d6ZQ-*{oL&B6HVakv&Wk{GZBup6+rVI&F zhJ>j^!uXLen~^Y;NSI0_OeGSg5(%>d39|zUvjYjU0|~PO39|zU<43|&B4H|#FpR9h zJ|x6`Z~%M{s=yE6Aovj+0zZMn;Ag@*xrF)g6cXk~B+QRUm>-caKO$lFB4PF-VfG?n z_99{S{x2lVuO11r$0K3({E3A5)s--N>PVPMBupg|rVaiNSI0_ zOeGSg5()Do5~dOfQ;CErL&8)dVJeX@WgZDriG-;{!c-z*Dv>aiNSI0_%#TQzN+e7f z5+)1@Q;CEL(=F)39H0+#fIiFt`Y;FR!yKRwbAUd~0s1fp=))YK55t@g{g?*8>BE#G zVbHjNCZH*32L5k-m~tdcITEHE2~&=QDgQ5hm<~voWKSQ)N|+cVOx*vm4^xhWDM!MT zBVo#sFy%;?awJSS5{Cc4gVA6N7z=9pFy$xd!|X@G>_@`vN5bq!!t6)F?EfE1n5{^d ztw@-yNSLiin5{^dtw@-yNSLiin5{^dtw@-yNSLiim{KH6DH5g>2~&!MDMiARB4NHl z!fZmqe1(Mh3JFt+gegVBlp2~&!MDMiBUs+KTMG6sE?cSX%t z0ik|ONXnEWWy+B<P6e&}Rlqp5Z zlpx;Wy+B<aOmpyShaeyXY1dL_tAOAb=o&5D---curm9w zGFw7cW}jFYHM)9_anwhl(bY*bd>Czh7WWIhmF{;Eb+;R+k}+Eq7y`cP&K%j1GoSA* z01Lq)a4EP9EM|sq30Ml2f#vY!3UDR33S14Y0oQ^R;5u+UxB;wW{%;lS?nZDESPgCl zx3X68Hn0ZV4(f%ayxCgownRgTW+Ttmp6Ya5y_Si&wY@$6j(H@&$RKJ4~B9Oq)AQn>$RK zJ4~B9Oq)AQn>!36-l5HHqs?uj&26L2ZKKU?qrGiq+_05#!&b%(TNyWOrG4$8eeI!r z?V)|`p?&S4jaWNwWVg*qcHyi7H-fvsT7ZX!4=&tn(tgNp@Sm`v#iR$xQ~Snb&zS7F zlKoS%H|kk-_WhA-e~PPOwAPE!T5>2VVC@Zhn0|8u{pJSx%?h3`m`$_6g@bV6AY3>I7Y@ROgGg`?E*yjl2jRj& zxNs0I9E1x8;X)%^XoL%maG?<{G{S{OxNs1ucfo~&aN!_aXoL%maG?<{G{S{OxX=g} z8Yx*LTxf&~jd72m^WT8S!4u%O;7Q^(Pk}!Y!+M_h&0l!^8}XbMz>9!=fOwLv*!Wg# zd@DA-6&v4*jc>)qw_@X4vGKyGMmW_7ryAi@Bb;i4Q;l${5l%J2sYW=}2&Wq1R3kRN z6&r8Bt%KP3R&0DLHog@b-wM~7;aW3XYldshaIG1xHDluq_|*o#+Td3k{Az<=ZSboN zezn1`Hu%*BzuMqe8}@V$_HhrkuoYX_iY;t~Cynr=5uP++3tO>;t=Pg=Y+)<5uoYXl z7hBkhEo_BHjo89gY+);WYGf?eiVbYV2DV}YTd{$y*uYk7;9hKCD?Dq&2Kw=|TCsr= z@%$!i{vG%`cp1C`UInj#*TEa$PV8X=zEuOhRRg|N1HM%QzEuMwum=322K=K2{G$f^ zqXu?4p28lU!XBQ&9yZ{&G~l;1;I}m3w>03lG~l;1;I}m3w>03lG~l;1;I}j|B5J@M zp28lU!XBQ&9-hJ;p28lU!XB>29&8SLRR*u!VwbTgc8hSSY( zx*1M4!|7%?eGE0?I z4F8(pUo-q`hJVfQuNnR|!@p+u*9`xf;a@ZSYle5nuw6T_T|2N{JFs0l;MP&NbrfzL zg9zMl{!m<{HslBbsYObNkWUel)iq&Fx2X`_bGHGz|Aa@?b{%B}e2R6|(Z~!N80XOgf0|e0Fc67KM9d1X5+j*wM zz)z#Y)9COtIy{XIPm|AP^4Uy2o5^Q0`D|7av~%S&S59-~G*?b@<+OTk*Sgtrj^~~| zk06olcJ=gQJpBkyKf=?G@bn`*{Rm?zjq5tF0XuL2CvX8b@BjlWMwTUDDOd(pqQ_fc zVRu?{>eJ4ApwHq=jk#xD)tB&o*_j>s74%>>uAJb?A+BuX_eH0vGa&Yc?)ij!s9Ekg z!aaw%=RaK8%C|q}%1(9VFjpEwSGI8FBd&ZJCnqxO=H4Tud_l@Fo;{0`Am;MTC7jE_ ztO2PSdz&Tz93~ADLW&%?cpg~xblzCl~1|Spi1#2SB`V#7$;pU0n2E! z@{EI_{0?yE=iG_Tcw#G`IGt0MQ%BtBZWk^1G%cfpZ+yl*hq>n%_Z-#l#`o%a$Bwh^ z@bZi!%F}*SojS6=J5B0qknnEm=MGIPLT}?esY9aXBCtiM6|bn3UMoni6{Ob+(rX3jwSx3oL3*tqy;hK3D@d;uq}K|BMfXBfT;J4tbkEmourduzU8IpD)3+);fn8-pq+LyD7HtCZ#x*U}NDDR6LXEUgBQ2~Ao6Oi|GGm*`tPrc^7q_Ry zJcnA>9jgwkE0+^kSV;w@=G4KUA z4o-lR%nh|rXJ3L-;529jZOpT`GXm}aL2xE6S>xw#G)^hkq=f%apn;#-C%f|zM9vYdB!c^?zolu zTIx{waG~Dx1T6m&mVXJ$za(P2ma*A7a4)zI+|Nku0mcwt#ez>fj*b{0oj`|vY#`g1=20$=Kvy#5M2MqT{|JPw`!zXiW%9VO># zz{%cl@>V!`4V-)sPW}c?R>H}f;A9b;yb?~9z{M0emjdTf;9LrvOM!DKa4rSTrNFrq z);HSY{sT4wJW+ln!XCGox_poP-sk-muoZBYBx@k;a4rR|rNFfmxHc57rNA8<+_AwO z8{DzM9UI&+;f@XN*x-&0?%3dt4SsmwhX;Om;D;T4$oiaL)7Nlr6@GsZzdwlIAH?qu z;@up@?>~;;e;mL6IDY?e{C?JXzQJ?1^| zIZt5)ewW}bh9=eME7 zv+jy>(JC}rMI5an?p$|u@>qANyTYa1PbphNr94V0n z)OIXtJL`EJSWO33(}C4=U^N|BO$Qd!!0P$2aBeJB4=hz*{bD@!DXd(b#`{@uHkXDR&&{FRoHN55&( zZ<_R*CjBP0g56$)t+)}~1XhEa!7bR3TWPbmfi>WEa0mD%Px>7QKlSgte;K?2UInj# z*TEaKLxa`8UdA?4;=GiGQAk`GrAehUsgx#_(xg)wFL!sAGnrqfS&U|ufTds=Sc$Bw zkm*Kn6Icyy2IrR3jhyZnIo-(VMou?!x{=e3oNnY4?YoiFkDMMfZ$nluviguUfUE&z z4Ipc8WaW&#F0#6mtcl2)h^&donux3^$QqBV@yHsFtntVikF2uhJQba$qSI7#nu<3hiReCK(i#?F6 z2a@$bvK~m5)U6h8MZ33wHQ;t|2ar7#BGvCH^B=&|;29uN{*jUhr=A0{`tZ--FW|56 z`aIGjb+SGeCM=2Tg|xlUY%eq`(gvuD7#47s!CfBi@^F`jyPR>|u{XKf^4x;9yJxS( zird8gtBR2ueXV4y@R=G#i+wv9vTsea zQn9k{&`M?8Fa1Ijt#lVpUP;U^>g`HoptH9t?6--fj{7}h%V*+pkaHk14ulDIWxYAI z5^Z@qX-h2bzp(L#k)|zValb&KBS_R3ie-pITgfjHZ%}sArJg$tTQp16f`jKes0BN< z;Dy`WSw%T-<^RW`^+3YY^nAzZgHO>1pP~;wMIU^MKKK-UaFDS`P_qF$@G>5mh~=9E z>Xij-r{9&*v{0Hx^xLF*F;?c$@1CYqSXr1XbI3kw+J^>xs+Or?eFo8$waiVSXZA?n zg3UZhzs$bSP%qpR>V@S*)(=D4l9-V6Yz-Imt<7hBOKRsoD z%fS#|Bu=FwP2~3pE%U+`6-QF_D&^Zq?WKnDJVAcP$WO}G9P%=@@U2*WTIKt1 z-;%z!^S7Mv^(6UxDa_NTN1e69`dy5O(itf~#@OuF-0=kU5l4NvRm-#1G25Acd5x&T zQ*rwkHGf9iIEmD4Xa1$N5wmVbOC50d%$fB%XSV3tnb&k1HE+k~7=;YeV8Cr~dJWOC zJI{PVj-M#o@vAtKIo|+?2MHh%B!L`|3-UldC;+h$+wpv!049P-U^2KE)PpHtDwqbQ zgBf5pIQI(D4q{6k#FjdUEp-rE>L9k%L2Rjm*ir|vr4C|C9mJM87}0*!8QcB%I{Wc; z_T%g9$Jg19ud|=g?B{6ub4IkEGot+*-)BF*&whNL{rEon@qPB=`|QW}*^lqDAKzy` zzR!OA=R=HW_cEg0%ZPR_Big-;X!ojC^at#~)8HBKEche+%AfEgp5wjD-u)T;1)Kgi z+Vl(HMeq*K{14a&Hi37+X7C<(A8Y|z!3W?&@DZ}gIU%y+b2n%Jd%#{GC(`Wa`T@WR zwD|R(f5jQ!4;TaNWDKyAF~Cm706Q52?8NqL!S-yy_H4oSY{B+y(Hxw*?F2611|Gov zVVKwq3s1wmHdxor$Wfh3&B$?!ejQ`H8^9`638z1RQQ*Or>EbbYKH^-~dkG0&d^|2Jo_z*#}H?69Dlb0VIMXkOOi- z9>@m;pb{^!3Jd|``8)wk1e3sIa51O{Q@~U(4NM0!z-%y|xrhZ|Ay@=11($)f#M0J* zd%=C+e((VJ8u&VR5PSnX1Re(8Vg&SU@E!17@ICMd_&()&lzp>5;Pr>#N2L82`~>_I z{2%xk_&N9m_$Bxi_%-WMAH!<=20RX)0KWxK(i=Vneh(l2fMt1_SIehKg2O4)NLt); zq&<(7{WGs>7Mf_$3*betp6|ZI_ud2>z+2#L@DIQl^vpu14@Tz?0yzXR9bf$Q(U z^>^U7lsJ%e;7aoC;2VRsUT-ANpFCvn)F#9?<5huujWb|-Pzoy1{xVp)&S4w`8P z&9sAN+Cekzpcy8zlbGjsfFL--$|6o@pda2xKfF(~0XuN87s3f#zzsa0jXtIQ%n7XY z7qEF7R{9u>{s=35l(upLW=kCLHTv$?bWVJi^WN#hUsG*p&Y2fzM;{S^`G|HTXOh20 z|NR>6Xt&OsA8lzjXL_H&!tbMXd`7!EM7ugfRAepZC9DJYg8RVz-~sS8@OAJY_y%|g zJPf{xr|~W3m%k0Z1HKEs2Oa_6r}aO2=2=*`1=ekWb?af>dRVs})~$zi>tWq`ShpV5 zt%r5%VcmLIw+q(20_$FZb+5p>S76;M%)B3mc^xqCIIKGf<77>K4;Uw2bO5&bVVbP+ zIK>S8aaeX7mTh9D{x~!B$C;@=&P@GrX6lbKQ-2)xZGwH9VBaR#w+Z%bf_tWw|*tZ_`t%rTdjTPPu`!>VA&9HAX?Ar|cHp9Nnux~T$+YI|Q z!@kY1Z!_%M3j4OgzOArtE9~0}``&?l@4~)!Vc)y3?_JpUE^ONc+jhaWU9fEzY}*Cf zPQ$hqnD#GNwi$MP4yz8rDs`4SYItrX!PS#oJ)!*BT6%^<^bADFfF00N(K8&PXCUed z=&9%#4l&EIg;|a*%yMjDmSYP~m09$Y%vzmf*6Jj)RwtRYI*B~*F!QmAnU77(d~9Oo zV-qtUo0$1H%*@AOW0BAL6~O-tWJOW#G$*-Vt|2)$=Bk+LI1%Dx~{_5~$o zj};}}N{P2pV&aFu0i3`E+`t12;KhUVfl63i1%`l?NWKc6>PB!ASPgClx8T#<%C+0T z8gM(f1F-U@(~hmRXlNa{7u*N#2M>U+fvABIP`7s9q?W7J@5$lKF@uW zn*RZ>KLkG_?Z@CJ;HTjKz|X+X!7sos!LPt$$od=bICuj57RZR^9j^TcYy_LYyI?bT z54;byfUV#I@FDn!=wgsoa|Rx1MEzt|ZQ_}isiBvtp_i$he^NWAX*nlpIVWj3C#k9M znCdV!^)fZ}GA&5rmmO-3?hso13{JG`I!~NM%%{tYGj|?mP4!|f=>()~YPd$umzeZg~u5{g(FDO6&hSW%+`fWfZW3@*JQq`8T7w z1+eXRNb(BuAA@NpVA>b3>j{|k1k8E?X1x&ZLy+)AMn)?cn{>|Uo8<2lOIM)MC*|MAg$+Jp4N`shmrdfGRxfBY2^4E+weJYj$6>`qttF2k!5S1{xp)G z#vVM%mA?}CnHyKcct448c_QO-lhJERr>hxU4M$jdaMjuAY9Uwi_--!W&Ecx2(^YwL zAy*5yny;>w)9${Y-4SyKjI?NXU(oKppxrTt1L*f@cV8&U*COLOa4)zI+z%cAUjttU z4}x!ihrq*NC)fpcg9fk%>;;X0-v^;5>7Xa+peN~|C+VOk>7Xa+peN~|C+VOk>43k- z;qP(ydmR2AguCkZDBx~etOnukakzW@tn!+aSK@q@CM6;xJ2?-8Eb|ebaRB~=W71y| zmD5-er2`wV0|#&d7jOd)Fo2g81wLRBfe3(jkN^@v6379$AP?k&0#HfaSAij5Jf9~3 zW*G3w+wjWU@XFip%G>bD+wjWU@XFip%G>bD+wjWU@XFip%G>bD+wjWU@XFip%G)Sa z8>I^3D;=PQ4p2h}sG$SY&;e@b05x=g8ahA?9iWB|P(uf(p#xa57A#o{maGL!)`BH# z!IHIL$y#8ftd(oQlC@yTTCij-Sh5x@Sqqk|1xwa~C2PTwwP49wuw*UxNhh#oXU~;B ziF{81v1)%{H2E}m20RP?h>!Fq@Eq@-htGfJ^)Gz>D-8G>HTnX05v=F?FY)|0!3OXa zcpLl!{1f~O{2TBKTUgB&tY!;VvjwZ!g4JxnYPMiCTdk#Iv2*L=7$Izy|EV0i3`E+`t12;KkiQidQOSsxdP3@##`UkzmhLC;#fIjIk z_FZOaSP8{gXx5nxo#3}({H7mQ^GF=dyqk*WnVH5lW5u7jKf;! z@Bhs>>>x6o(%(N*z)arz^sMrivupTEw1K}btG}nz-!}D^ednYfL_U5gL0d&{rmI=y zI4zqQUpGAn|L}$7@6S2S`j_%9{0|R4j`i&^|In#A5iXhpm)MmnpZWf~oS(^O^@-Ej zY8l$UIV{~ZuMipuC z`VaIU>OVqvaxQ@b?tKRL4&ZAZC*?iioVteRz_ndm+sU;r;AJb<|HXA|0ap%krI9ON zk}AK`rDFjhE}osnxCM^G-IahftN3rHRmxsc_L8!TlwB(2G%2Se_c;0f`+R>VBe9dA zvEON0oqXFGn>Eo6kC1XC&Iq-fy~uHr@9jZC@sxMba+;9xguWO%xs(xttfG_=o2=nv zry}1wi2U-sFZtfi(Dy#(dtwFTd#!x$p!!}b-;>psZR&gbLmD~3_atKS1^I0awbWyw z@8KD8*KVG^3+)`^uG8q|9i=xjB=Z|wY2_I$Ty5nU;uY`bIbU+u4lGXr+It<%ypCpG zMOVy({x>_JYT%@*8Hv10%^c&YcshJj?BOfqcapqLk-CSxBqI6&zd9CF+LJ4%vAb{6 z%e+nR@-}{XD?QA=kmh)oF-FuTUdw$ez@wDphtd~xvD{zsZCTMO+V5n!;}Vf(7kTdG z8*h^5o8c{b{^RlYBiX9GQ313g;<_Wn)sc~k!y*S}9piIWz{Z=e1n^7K%)gOq7E z(#o$7ZX>@>$Qg8_#E@F5f=Nol@d9=6@yPJcE0r=VF$Dk~CA2W}a~p*-r6{FDc1M zN^*#j;FWjDQRb8yxqCl%3vVMiVnS_rlMm@v*h-1soA_68Y1tJCE@zEIYrV zyeg@(vaGB$H7zMMCE1yilbv5skmYhZbCSwzIc~px%$5*X5s0@R(|qnZre7Zz+%{16 zo7eAM<@4Ab`u#>AV07H>uzP%d=bAN6zZR!e5OT0`vfZQSW6L<%QvZ-S@@V>jo$mgh-m8c$df;fyNjGD^O;;#<}y~D#5uC_DRgC3 zez=$vxEm!nQ<76fD=DHAt<0vc8dR5^o8ffoIac}SbWu%4MpkyF-|P2#gUk1>`lMh? zNnwRY_t+PrlCFv>OiwTHdvgp^`y^IZ--t*~^g2c-DU91!9mYH=v1B!p^l zxTA`C%nC`ImD0^S3R;n4+N`=w$m!|!>F&fn{f5u0btL$7zt=Rq!8f%6_4<)ER=rL( z%;Id2ZyKxf2lVjTTs}xJ77i#m z6$iW)2eh%g+W5V@IQpiS@ZI|2ajFs|lq>0lgHBhfddVp$E3K?3&(F!J2)|iwn5Lm+ zPg_2?cR^A?ZROSVfBS9akf9a7{aFS7er6i_Ltb<6?4kKXbB)BA)wPQ%uCD1t&@GdRnT*{QUf!Y^TeamYPbAqEIN{GHu#|g=@#y zIVp1N+67~-o!n2;{GLm_rY*j{dh}Eq|E7+vo)WN`-b+1xP3t%LT4U60vx*iK&s;Mq zcf`!W9-r4=H)zuA>9c1~8a&E0e4fEGN93X(C6Ce%?NYUoS5&=sPIu}btq|X{;!KW` zemc?*(5tE{m6W2OY^SrJAiuJzYJh&hWy*0;Jm`|_)aLXqPVyvp%;e%k`x}x%{lzB{wGsTrtJopnWv4=b0cKMS6W}?gHPILu41C22&C;vBv6x-~B^76~i z(hIg=g|u%=E7#K+ENyLNcc8QeVbj-PQxYeOM7TuJQqxkCU8b&RB$b(8K=tM*3Y}PX z?`Uz%N3LD^z}g|>vN*L-)AagTcUTM>zGiO8nBs-E)!f+kqUwzP{nMw{lTUzr9#yuJ zbF#&DCW!68n3RerA%ga@cDWYa&TW|83#iH*;teo7&F?M6JS2KeMCo@?zmO<>1`g;mz5WH$DE0b9BdX+zKgo(eAF-}U zG;#rwcGk`LCB1jFu4%SWYn7mlls3d$I?Lq>ey9v&w`z;e=$qZwFW04aO3vYpSj)5Ys1<~}r%AJV1}J#Fcbh z;ihJ_C8xG%!P<*#(DI^t=S{e7LY}7ij4KSo?#`|rFlL(APqPxzWK z^0t}9OG;+kJhEWexD1ca?M^A`Gk(ULOJ+^%JEkJj^j3(-SIt~gCv~OdQ;dPHSZ2=N zQ8jC&OU+v8+gZzI-Jc=liKOpjm38zv!Vv5Jt*poow+89KV_Jh)iG-|>CBdyzJyHcJ zVdc*E`n3Ynm?v`1GtA&exEo%p3WKb{lH`uns{Vb!6KGkF%cIS`NG@{HQe09NZO(R$C$TiH4_VY;;YM~FQ7FwwEx}6pFA$`Psq9l#A z;7SXMSc@bBM0r?*=(tIht{x68apV)*YR_fL?xp${Z6|PCDOaI`^8sQY< zpEx{Hu_M;qpJH^hxPE7H;trqH$!5HQ$;i#bqeB^@27hKYF08!! zv|y~buMW*{>MJBcG%j8%HI3?#;S^bx+Qc)pWcY+**R{AzYGe#D7z zk1f*V=6cON_N4eFZofU)q~$yO?kf_KY+pppTH`SXPO-}od_VXXr_19D*fdYDL+OZl zfDFkK9Sv0y?o_c(OBS)I^x3hpB!nBLBK$(S|6$UkBOJt-q5z96`e0S^8lDRazYK9m z=c(i-sV@C3X30O3yOAO`eJ6WSV(y18*8TJJF?>&*c&`sdx)Mve3YRlvdaJ6!4GXP> zTpE`v(qUg`nEhN1x6kmEn+DQ<*KL;%Kh=f{flT9Ayv^`b`TX`Itz1&thp5k9$FP5y z#zkl>R7;T+>ZVyQpJsaf4?E)hMP|V9 zuw*V)GGGt>NW0P=h*-%#1y6eXevjr2P9b}%URINPXD>2M3l}?Nk_)s5JuYTEto8Mi z9R;d4eYRjjm>PataHRG(k)`8i{_5IagVQ6mw_4R+ovNn~Sw|MGkJxlh#j(<#x1{MM_RThfdr_cN zQB3g$FPYe&8em#s&jRf^DT}4mm~xIAF>f%K}FAGVPSGHn>eoQm;+gVD%a|Va)7$DnZX0S99CK;xQ$2*9oRP?@j%l9EAQoi)GMn z(&d91v570IDoY7^M4#QOs}Ci^t(Xj|I%ROF*Z;_r)gwo)p7Ka+7Rs$yR=Rl7#LEZK z;hj*K=!`gD<#g(|u?XlDCKnWlF$`-rG{SVc;BN{6guJt9tq05mN8Z5NOV(Z_L+J_Y zgxpZtF?yV)YkqgId8ANWD;y;hrvmKLMuUT9V$Hv>c3NQbWW&sJ*F7lIi0A*bY-WoU&se%t-E)!Z&qx5uJYdAmflC% zh)%vjIZ7U&<1En85S_7nVHr#-4<;3lf`U1nURjqlp?21_r5D{}R9zzB7>fV-QLWS!hU`7`ot%dUR+bRu$0m&IaO)rhf4dTDsAUmNN7p{xDW)ZM)=M4uRfbtvHn6luiP!7B+vRon(#)|_gZ0Wg zNJG+bw6mVd!bW31VH=wxLh$1#=8MubYC$oYFDP%INR2E^??lE&+tjoA@T`eEtACeg zl|_cRk*7w3N9y?wum2i({xvTuM>!~eU%XoKx?09ZT9)6N=QW>;=J=dRfA9$z@wCvjNSSH1(6HZt7nUkD5#ts)wPkbe8K*{v z^<&0d%<$CfAA2{Q^<~-*Rp)Wl^eI!Ohu>dzdkwj&rz=^Ot6V<}jhL-;>7%XmkJ&dZ zQg*$6JGC#JE_iqXHubuCwm!25CTY7ry?rrSGj;2d5ZKZ$8K5I({xk?A+Aswih zgP5Y9r3qK(xQzDn;JR$5WBKaN$|=9BEU*P^b!wQO3*7TKiK4*G3 zA`{Vr%oSLY?qCmhv;^WCSm~VqsY)PRdw^Qx70;l+CbJ(BtBiWp$u4oHwO;Xw_7Ab9 zbIpMMw?-Ou!bm%r$K&CS~?()Tpe)91=JUo4XYQF`x^3dZW zPnS26T}rZ*jq=(hzt;l^wp-`|=k~PCm}d(FD&tLk{tR2sxo)3!i=G%5&Gj2@u*b(I z`1Q3myXNsF2WzxEcS3?Y_=&b5+2_&hwvM~~3Gp7SKKPs`9^EKu6sJ~&ICVzprfd3k z)-H+8taRayb^jJ--lOTl5-a@!PGFPLCZfyxt#&ITH^`6_)n{6CENM$p9*7#E6u^j_ z0*_DEcE4})d6xT3$0Ogj2Yi=%eD?Qt>$=~4wa>Kw@@ICFfuzl8)5d6(PN&D`b8X-5 z^7%YYXYk*_m$*QZbI%^94>_bAOUvwQ*^dgFn$zl1t4^0qdVej$Fgb_R=e~r&=~GXV z^IVTl(+`Vj+N1qiR}XdtKh<(1Hx@4h7wy4Fp&gZr(k3x3<%K@4w9`2qZ+UU_ZqcRO z%}m|$XsT$^O8tNlf+n5{D;XjqEm5QfIU~^lG5=cF1dcEmSZND_e^VB3r`L2lg1^^f z$mL~397wclZ0v8qrAiFOx_c|@+7L6Y zFDuwP^hDwuL!yzDDr?roWyR=*+r&$Wj7}>|oe>TrF3T3AepEhl@Tvt5_AATn?{x>J zc)eQBvgy?$=9Kp^jcF9BWP~SYXx7r2>fZf~qOrx}W|+w-{RX7>@cWGT0mT`a<>Ly1 zZ;R0{NX$;TG-Je|lz2~JdTM5XGFsAohSY0S8TWCLgp|E=x>NsrMt7#8(Y7n^B| zRi5^7zhPN{9$vd0Jr##FnV#R%u(=!;Gu;-}rQvaB^y%H(C5mbc>+_Smx{}nq-fqq1 zNJn{5?I!g!T#i`%V(T;`w|9DuH8DFlM$OT3pNQz0lR0Db{BA@ajiY6L6Ybj;!5Ps9 z{!Ta^*Hs1OGWnRAW;wZ`NWY4eS(YRvS!u6;$-P*O;Hd!?m$ohjyklFY$HgZjlX@e_OX_&!7X zhcrdmg*OjbjMOHpb~GAqVhEmIPAKMIAjS!DL^{)ykh3pSXf`QvRb@Wh2& zy~JuvW0u$7b5dnReMKK8&h$Rz_4A4crOMzmbx?6(X)j4kD>Di+^WE{P&oTOl zj$Z%ktar?iBfC#=lA(qifz*_EGuaw)q|9D^!G-QKWZua0j5h}PS6_NQeWcb@>7=EjBvESW~l=Pbu#s z^y*zPrMz+yESx2jngR+dsJ>-`cApG<7)Yv zmsxmCeoZEf%c#l?Q&40qM#kRsh_!LkqZU*o%*TIFl8FhG%8U4KMAKs8uraYUhDGHS zq2O9_vP5^X>#wh_yl@MI@c|4vO{%rRE-0_ulW3y_MBR=Ds^-fbyUT9RkXcM zbrdRo8FtUAt;XuIwe@v?Q5b|Vv2sPTWo**sK%5M)RS9SM{ zy@R5uZ$zGPXo|Fq66VK6lQo>!74j`qI|zrAWBbjhHubJPMT~OiFz;nfnypONwG*bV z7-Aa1=XIsvG!*QctrqdrS2JI3O^5wCq-hzc74=EEZIO}TnS5PH-|ncolPU)pj*LE` z#a+5TqE zp?R3e9zAo0$$F{&a3N3e7cUo$``lB71AFAp&(0->K0w(l z%UZo3c7wW?3f(FV*w&&MiOjk94b?eBUUYtgDK75cjGg4ypE~JAbRyD5tTLZbf@#bU zvF6CT`D&16XxrsOu+T8|MW!)cKJW*v*YWJPbhoTmiq*P2Ir=2D7x_MN@*S;KaX@O$ zsqgb)JJ@Lyr}<)1KYvM5KP9y!CUyTWB=r!f>AKh|>j@1{OX~Yd7V)&K)V+_88aMn* zowl3O=c~C}SKMDY!BFl#$}@WCVympX4?QRCE#u6~x#V$U1P8r6oGTF^slRw4yzW)86RiHd?4?6IrUvZ!s=5{Ygs-bkJ~kx$mbfSdw( zMll(wSmzFAmS@+I1Lcw&n9&`kGRYjaKvoONbFC4_-z-MRID~(7TD_`!D|P?tB7?9z z9Y2H7ekaC7VmHK{V`fdnD659fIQ@5q-&`nmZ_@#T<(~n+=hbLq4?gt^x5sDL4g2e- z3>Par-T!*SilU4rx=N1Jz~7a08(GAmn6LLi7hU9tu17dq4%H%%% z<7ENU{j%x{R3mx*?;$Zh-sN!{KEw6mbE>6L?zp!pcQ-9fl~0i^Y$L>1wJd@*Oj(4N z`L>JR%g4S!Wht~rcve0qp-BsTYnDN3@tF+CfpS`@8xKkQAz$sT$h|K863xSRG+6!vj&?671OJvcWMco z=WU;oo@w$nY`e1i6lnVV!ohPD`{`TRfIeLE`@r1^rBLF4H^67AVrE;gV?x3%$QC* zF*P^{Ete>qx?-rzDA%m0uU}Ck?}o0JT2U=NZFNQH)flpTOk#QB*kwb8EE~(4G0TT6 z89gd6C@^Yt=tVSR-WcfO&$~wi$Ej4Y)(l}Kjas!CS zJ&2CW8AXI*l#wA>xb=&%&pK;Z1_26GRG>>0s4!e;xK=cEg;IU(iuw@?(=+B)hXxf| zuff^9@}h%^jOtlc)$=m?EEpN7sv7Z-YqUW-do!3D+$&mVC3*S9)`%`Z>d2RoFx8L!^!2 zv)}v8Xqc_2EDg~Kd(bgk5jFMPDW$ce0rB#OG zmC?ZxBhOBwC9!kejh6m`e3L87rYQMrgpSIpr=*z%5eH^o3W=`}ap+N66p1(>)Z4>XUh*e+WT~!QHLlmo5V7w}=^kdcf_gy8eRDz6;#-tu-msEOw zks@+0JwLLs4nS*0f5W;Nc}k!oc6~=^_l=5KvGFRhzQa{JtCC=Dz%#|}bokQ?GYTrw zh7TPwF?Zm?v6WLL_-lG5I^1?|ue^->!lde&(NnT2t}y!a>t}lXy2INeAu~OtSBlSH zSX)?d(SY8CMFFqhZZp#2vw9}6H#!B~qXR_VpFw={=)rZs7Ngvl;b)K;60M~$u z1W}dI^8!>=H`gAG!FJWm{v6=~>pVm=Aw4?vjW>wC#Oto5y+?g7%V8kQ;F6_0%yURT z8yXRytLR6y$RkW*OjGGqvu0g6FeNv+U|@3Z>II{!{if+J7&da`@IoK%cy?8FZE0z( zS_|>-;a7~Po--)7D&J?8*5&6;7@lVuEb$mIVBiST7{qG&!6ikN!$t-xEf0~|UG1lM zxm2<85F;WXSEmMLxi*h@{W5#!omM<&6dk>3jLgWy;iFAiQJMT<)2ygWN_JYuVdaI# zlfw_tLy4NVrNFUSSWPz4ZaZy2>|Z;*P)&SHAwR>sQ?k1=Jy&JhZ^m#IY!U`~LQaJ{Ti9*=7^Q>{l)CEzZlIE$J``U4_zNlG+0|3}2}ji^t9e@suK+ zTS9V|B`FqzSR-Z|cF7>TGp~O|`LK|eMz!^xYE&&7tMwF>1)J`utJad#TV-&IatejZ zR$gKg7)2`&<0WMiI;Z0qU3>Q3ct%sAsp1*QGl+{WjHZU4jAx|0sIJfbOhvb>)Hfne zIHWw+VMM}2549mZgy&l&pm#pCbe(I}-Tj7V#mn?+@2Rz2HRUzj>Agc!s^OVb-R2AOF5-UXlhPIi7Dt^B+_$%#FCu_Uyj^H38z_aNgZ7Do^z`33s zS+K%ZfXM1{d9&Fx9p7f1%0!>3ExwWk=B2FWzH+hV_f7Qp^>5n(<|u!H?MfDwv$8yr z|6jE6u0X&Qd_kKOmP0yPtv0y9D`RU*9$`R>#VsvMX{KxXeooR6jali!BkTSKe(52a z9(~SUe$hm9nTjscDepkbmdWH%%+wJRN0vd8Wp-(d-0)!FO#yRF&YTjD-)4X6=Qf+) zGsow%tz2pM`eu-3`{nQLq|NvG?6J+@d~6&Q>1etZ5fQIcnS|IF>GcXTt?o$mOdDf5z}RVt*!Phs z3h64}e)FpqWta#vq%d=nwVp^kgiRM*m<{sfj>+dL4>RZcWIe?BJ)#t~4XCr+v8?%z zrR)q1zg$W7_&IoDO%p^^8e;TBWMHo3zsYb81;z?eEBOy8%z&qs^JbXmqU>3Up=rex zYfozf^w;q}u&hb65V5SGXO6taZf~&^Dsm872=-8M#jx^Pdmtep-fs+Wc{Qz1 zWq$Q=!!y9`^^`a>dKYFSC23C&=vy$*<#7hQ9*@iI?c1Ze?Avay*KGunM&$HzIg-3s zJ0ux^BvnX~ApR)A8SmG15-I}*dEhv-h~MP#70axT(K{1P(5dR05j z(1-0{mYT^F-(Jpfunuxqu4wjesf6H@Pp-TtvvywXlRwQZ?KNb`PYENa?a1Q{^OrYV zR&;S~&ea*E=_S?VEBUa$g?#9Q;%<4eij`>jL>I=J29K7ja8V4{;b!ZlfPahe@y8rQV3=}Zo1PnJ~B++MKq<# zLq2*$Bi+@6JlH05jL9ulN83#AuT^ff^0&+5H6rZn`n0xwN{OE3Oe@iSxJAZ1ROw1~ z>sdt%Evn%L#Q+)DWt)0RY}P0-q1T+DH?FL?d7e{v;Vi349XdRXF;r?x8q~e*e;^pcq3y-xAw+e zcl<2ODR%P1Qf4lX|2FD-TboM@GfTm(PPt?uVb+Km?OblVtDN@Z`S?B za?@{rW{#Sto%4*1LA1wh)5dFqnPKr6?qG{%7p(-_HG^p+kHZywH~50h?J+qDCMkH_ zp!AwA*yNDa$a##(<}kt>(>cwT$Sfb7 z>+yQLeG7Z^&m3GHFsi*~$)G-kskwQE$LGl#U7k@jr@VL7pv1(Aky#@aRUo_6##3lL z-{Wyzs^_-_O&?YO}2 zc0s3O0%TUOYjvK8+QfU5G|bS#%nv!`Q{?G|Jl{c{Y-_c-v>B82HDXSPghm?}Mn-3Z z|548&myIXTm{FV#gS5YkUTDe%cisBbRb@JhM<%7%90O+FIm2p9t0TK&+4)P`f&U?m zaXtc{@qhWO_FNJTaK?>SGKr4l`~>WDY!48rE@g<6QgpS+0yf2-mtfV8UeJL#4duo@ zMV>xW#WVv3_ zUDBZP();BmzG^o;3t4ci|Ep(7`9-DI22a{d_hOIV{+8Oe)3d5rdmhFznz9EgsxBU! z_N>@sO#Ij((vs#by~T*tv2J z70}9EwZO0*rOqOSi|UcWDW++VT~R%>@@nlv$)(dNB{5kB{}`4-k&tqUMR^SgdWuz5`G+~L+-Knm zono|bA9?)@V=$*g5^kq}*H6EFwG@zjWjfRSbr{)BWyB-GDC{SP0-4>eI?Gx|%Bkv2 zne;rXf6mD%i(2m|4bv(odxHLfVb^U}Nw!xp3DL=phcZVR%g$e`G_C5-_Bm1%M#hlJ zQqr46DAHLHWw;}fgzHr6t=iuWH{VOinO6*chX3)()fF|>mF#=8yy4IpI1x$kPfj4c z5(KNme1+P9>Qp;WwYX9~Uyau?)QX@Ml;@W#OQ{T`6%JDq?lB88l_`o@W2()ZanD4V zd!KaojLEl7aN@NrU=5IGRPDI>9nyez)Q_tj1xh8ne-l2GcE@^UE9k63X3A<2){sFQ-G3P7I>YBPw9j!rjU9m@qtb6& zsg6P#IwEOczg}wOGQ@8T_xgO^;fDV&iK*V}uT^D!x4hJunAJ-*w#f*l#r8Cs9T>-H zgx0Fq{EDap9VTd*@B#lcJY{ZeNr^qSB16|7xagXT@}a=jLt{Z)um?3q{zX?`H1XQ; zxi*fm$h+v;iNzdW!9NbNC>kvXS@6y%7D?5)2dpt zMlXqs15+wW-AR3WHd4Db#jqcsnE){fY^XWboQZ0^uapHU?AD>8f?UxclCjH!oTeW;*Ykf2g*3$|TWPF0mz8%0G7@eRvp zrPiU79Nd_1b&-ni(wJR%I8|8{tY(|mYvzdTX1mX$Z)S4Ec7h#Dy`~aqUzFw;nGsdn2PsRxSNxSqf7j$BaRylii@U<@IS#)=7 zOL(O5r2dAC#^5e9K$gvmoc|-NK|$ybMr*wSN%jxzhJUVU+JAR#=Ddli9#%a15__ll z9%S2cz^||KCIyUOoVIA$0%nN}Gr503aEw1d9c7-m$(}$RnQ>D?-cvL#lV>%X=+54+ zJp8u(oRzAj>!o?M*M8-pxVS{&NYDQ|D(CDs{9GB8d&GxQ{$Z7mdq$8WIZUwynTw)-aikE-6Px~;c}2YS9ZiB9+^n;;w2)3Hg~1XjEI zfpbh%#JCyf8bga$j9vaE@`Wv9q%(;Oz|JL@#chn0>WRXRPnF;6GECRw61wpkDsSk@Y*U#mMTwho#5VgQrBm4}aKYs7r;OF8~t3KG)3mIwgdEG{kVoYYIpo1+< z#S~22vkO$W`=aPhEB2bAJpWR>jF+<`F(Rv93!i;~a~9DWDvFH4Oz{L1J3F_(NIgc} z#Lh!|8J30Oq#eHxRbkh|5xcLvv(HIXwPyPn8j-oSu=CM5Ke5f7WwPBT{ljUc2nj8y zP%uOiDfE}rCUh$rZ26&)YEmaPhBUB6F_~P?p#kYFI?JJ77NxE#cc<`1HzkgZbdYNW z?MF^P_JqzGt*8iDg{am4ulqtZhKYnk2Ykgdkt6N?0?tN;rK}6lZ%044JL1Qj|BM56 z&^B?#foHBS-uC2E9Hk&2!=lZ=~hJ}Yh{zWLcB9xGn2qHx3@8Sy-;5upgxp+vJx_Oiw~lusv} zfJ>qv7g&ed7sT95@9$G{%s%I>!#{fcGf@q-d7hXTvSar3Cq!2CretN#K_YQhy39pb z=?$F5Dx)?B&Y=~3CDv$_ojD8nzBOm@EWh_BWtFI`?CwyzTs7-ggW{p&lFWqDZ)$Ic7$vl$JAXX%1(T!_Doj;u(slvyIl zF8xs4^{Q-Ex|G>UZ-~1(CjG~e^v^=~s|c>rDJvU0>(rK~(&^<@`d;;%(7Ad<1!gOL zvCl2IoTuR*sU_ zYNKY=^z`BBb5~znmY<><{MGf-dZ@Qq~z9{ z3I_}s1tzI}Vra>-45R^=ASDLmR$zNC7thR#O^lH!O@tNk$a+!^W>-)Eg^ zJa0YQ%4)F{$*nu{fRge;X2O-&zckFOTr8a5`(xD@!Ub4k8jG(sjXxVE@4A#l#i7te zs4Px>^S`&ipE}l{8A?Og}^kUvhHIvQa&&=GU<^yP7>?g%z3c)pb47kt+4f_lV)h9>4*y zbJi@C2nQ^>nX~2;Ug=2&NUrW)S@C4Z@!b7IP1DgjMGLoIB!RvL!7z(5E>OCu_*Gyeh)H}C-wpM+mkujhw1sOdV zJp!3&SvfwVpU2-jJs~qWBZC2wr+3wW-X)`R^C}Y(#`P;5%Q9zcuV@*%?mCxLaMmPs zn#@vP;-Sdw!Oqc=IN@k==u=+sklk;X9*r4K7_qviw#LPbVlZDmYPUIp_c>*6l~^vd z8&JDhap0MH;Xv2ay;H(+$YBGh6+>-$DucK@_U9ju|O#9G7lqpUP?wq0b5mKIq$TqS99 z4WHjNGr{wdpLh~xaDE#Hk9do|?=byYoC5dVd>?^zZMWN-5d2*0k>GU)`*M#*yT%o4 z_5{!bN5B2bBP%RK|FVX*3j@>=PSv`?0#t}?i0qx;qxQ+FKYOBv|9RC^tQCwe(LSi( z&<;c_!8=Nul3xb-q1A9{I}Jr+x|KhO<-zuU7|kGdaip*(v?GnDZ&P+ga^sXGK7UK) zCSIr`CbzK9+{r*Uc>Rw_R@&R*8vj}`#oA{PSx(Ah;(B7~b9t7UnUAgm>Sks>W)bog z!h_7@Ua9`gtTUd>YI55v2?Ya(_nY;OEJdE`^V)v;il6xqe|%nF?WC+awo^CN#ZyM) zyrXRktw&xbWwr7be%?#jocc4Eb{l7y)aje>lV#Q5(9mdK#aU&!U?ei6W^UvMYorcs zGBdA^#>PDexicJk=byK_hN*b_u$x)CGu`W-yLy=X)V&tCVBado`v zW5=+e?1`$Sqx17eFRgk)${71e%hKxV>dZ|4=uvfb{!DZ9D3L!EC9-aux@l8&BjoQu zpS-HEAND0QNVaBatl6lLZzC+!9x}|H69&v*qE6W<9yC~D)RMPpJe8U<=kj04`=1o% z^vZfl-U(NfEjjamzJz>WxQc~HW?_SpaP|zgc8WQ`HSz$O?}zHI+V(BJLD+-@*Jux@ zEJ(yz2wSXuRxV*)OEF?Ih-4aWN9=_Bf#1stB>t)`IA=dUl#djJwtD6PTM>B>T{~|{ zA{m9JyYyF>l}Au@u2gK6=PB9QB}9+Xa>II5W-nYlQ9V*P#WIZ;;-j6^==XB=cDHJz zc;d?8O`*!u>8kOvAil`SR#mqJWAK3fr89VF*dj&q5}QN;`||WW z@$(e1RX-G7K^5(%&MxUM2$QD{o?b0oRzVUOU}+~VqnM*dmJaA&Hd3-2ec63e*ai?V zMkL#u1FxR1T)r3qtQxZwB7s^9&{?+;>k}%ZD!AfCWD;K0BJJ$os+~>zf8Bj~fFo6% ze_rLRs-%+cq?1l3=}tPGr0*-8q&uCy?{m(co*8Ba#u)}=E?{6#4naY8QCL*K#S>R> z1w=rU1#cWL)&q4}QCU|+P&`M$bHz!1pYN-xbkfr^1MKgg-%p3?s+X$wzVG|q`@Zk} zFqY@tD=A;64RoZdy)}sw%~UpA-C20kV7joi@ZkkjQDX&(;!|`bGjw4Gs01eln=sZ0 zOz&w52;uaP@6==x1Nr0l;4Gu=pu~AeGHth9b`>SadgCRFujwKhy&#GvcTb{gP`I0z zu)BqU?xyZalVn>@AthaVq@xGL`l#)|Wu3EeQMT9;wf&>uj=+3bM*HhxHmfYgXW%Ho zK&$^OH~|~iz^z1asvc4$cK4HY8>-_q^B}WljUXu2^+=TK>`5eZI!w7_JmrLcLm zyJ&&E3)zoGx3`m4&q3Lbh|CLM>_SM!O}|g@Aiy_864`wtJk!Oq{Uam&aNyu;9(MRD zVA>51vLChxKI^DWoitV@-KVxjYWOE(Bzw*KwRAa{rSE@aB^V#65lFHxO^u+&+y5t* z3$$6A|J0ciB^YTwaH{7iwYbB>PDdeaezdqLAYhfn9Zp<(&OP^nz2791Nu<63f z55wp0$lhJ;tuT?i+q`pr^OCw7+7UfvdcgINMP#mbN1CGK3t|EPFpTk=)b-@+c?{jPM%v*0jTuAwC*_aC$+* zM7;hz6cZ;HNN9OaZPQfbpemJ`&Qa=oP-jCh4fw*!Igm|R=%QyOo7M8K>H4pltR|2o z(PR@Ii=^&S<d2csMEO;BO?IIV>KFwDx$utA#MLq9BX*?vcjYO{rbkIo> zI{u2F$WLiOk=N~T)%P?7WH^=tBCg9!E*#NXj7U2#y54s=%}C zMoWqTTJ<$-QU6=~b2b-ZjT+g?OG$^}=B02sGS7BciGR zPA!s`AiucLm+M(n>7009Or|-$?pASdI*~lRZh(_7z6)@c10c0>^SAkGlO2j6M~SZB zdNa3ww{kfUs!>Jy%p6_kzEq|GvRS{0RS9jUz&6{SAqjdTT-EEn_F8m?6mzcy0G0SEN^Od}**s9zZZju1_9P!$cr zlhok7Xj4+4Pry}gUVt}(rcYKGzv4te%UZAZ@?N98Q}mi}uJ=_9J7xb<)Bxu(&Zx6Q z*|0`8%k&OtwGFtYY|bKa7o{uz<=eVVC-^dNd!^}}i|hO4q1$kvlkKV9=@Lt^>Oy&vN(9!U9=XUnYatPi&glJ zVKDzVuoMH9a(rxhP8u7&FP~ZDM<9nbC*OSv;lFIOPOS$}PpotT8lFLs*eOI%>Z?-S z@lMh>81+et&5&MYACMiv4Lw`W=NgEP_TEX(uPV}CKul=a;-W|oR#h!+=gNo=G$w-4 z`v`3`f!G4w{fyottc_vOB5OG~X<$u#=2`A6l;sZ3svey5J+P<(tM~CNe;lh^Z=&%0 zzr`M`Sb@*jEp{u^I}Z8&XheZcb4`^BunOzT0;F=>of4~na`kBd_i@d>T1uLIva%rp zet~+V<*rAM-^P2m+A44#q$I)6sOs=E%dZ-A%KZW`V#HysWS~Yz(R~C8xk1@AT|28g zd@Y=2jf4AV*B|NwbFv`#JEOt4*P^H2t?l3PkstH?#x+9aHAlrdX$}c@U$+2MF!QPm z$N8@q6WRLuEM+)W#{+%R#^V{o_joT<-9MFIs`j;)`bzs%QaFZERongRLo229u3GvG z&wZqFXM`=%kpf#tBWBcjHh7!Hh{|MW#%dfUF=y6O$S*zVqIKM3@gM}(yPFfZ8)?YD z`Z996f#!SY3tWD4vh;G+*oe&^tg%v}H)~DMZyOmyZ&lqS^;XqQ{+jFd)l%xMTKWu+ z(OIp>{26~&)dteL|DfW4=Upc(K0^6a8QBfV5`G`Yxt2i-GwFx?S4E8$X1wUTlYK*>#fh#Mu0#$f5?DAxF23;Vi5G39Ty*hL%xX<~;= zI8h}JVaA>69)wK=yGBKSQ@zz8Ps&PFPB9545$AL&<`X9@R-CD#$Zt(?o8SHd3#u0JI#E zY}KM-DR0pKMt_}4%KsZj(?bNIm(351t;^;=C)wF|HQuXFqBByoy3yv>wKi|&ZBC%g zKD6nhyo)r^D7$mXaBy}+Emv)rT)+JUj<%Y6oZ5Zj&DVi@&e1Jb41*zT9-re)UvX9b zdmOZWy5`||1UR(z0LM2~I9LPVU{r2+3~)dcE7i?WYdh(4&S*jAmFeNcpQx_FAMwp^ z_^f=qfvC~v%+IMUpr-Glg~xddpN2orYcUJj!D5`JV>eljUE?b3zDq|gwo&v*lpS{a zsc{RAnrcRi-$jdnqfYA)c#8g2Ff1VnTw@sFHtoN`EO57k4;F?s5CVpQLIc_s<23o?(^13?d{V1LB4xv z@SyMi$+7)8*2>3o2w9DespCoe!3vy~=rSKqLhP@wY9f4cJdMs_MDzIS_Z*W?Vz_`w z7h$ria2+zw^poEeE1#V!*kl(^%z^Pzqz`O8+KM35l6b2r9Y}6G-d=p51MReKaXnk*_CYK5jB9hzpcQ#FI?eKEoV7uv-1v+HdWm1%u znQUe_s7OPSBXiN#iSy^Wo$MRTA^kgjc!XfoU;&Ij(qL3+DXrfw;K@ZXCM7FX9SQYV z9(Bcjd+U)bNW%y76LW{O`MiHA4>tLNAB@kb>k;Sw^@G;Y%H#r($~34|5zXPFK)na` zqo`sis=QVsNCMkVAw!rwq2YafXymUnn?6+AlmCXOz)@3?#*oi%i%VjUGmieufeq9$ z-zc7J9j?!JLp}9`j3dAp2xt$_Mg=Gc9`yKWZ6+rRKOo9fxX$opzzNDk@>A(d=9Gz( zy(`f}ZgV*Cddy_>4*MXx8tc?fbXA{GJmCTca@7U|XJgucMmep4_Hi7$j!S?CYdeA5 zIk2J2XLKQJ3#KBTtLu!9xWx+Xo~B9mxyR)S zN&0?o4*6EGmisQ4z2HW{Xt!-~o387H1AbO=8V6menKq0(_LiRx?whHsWG~q$9kw%X z1o932h3f<(+Jx_!8@-~rQw6t`1m)b+n)UjM!gf4v{a1%OViTE9ZjW>$kotk;3p0X4 zhDL==%(2I3>zXZ+P+@P9Wu$*=oloB;JFu$rd=9V=jV~~ay56Qt98jzt?Lb8dmMw}r zvo8Yz*68&nyxzN2K)m*ja2FQo1Irhsi-7!wUPyR<%E2+Z0FJ}}&O)jToPU`BI4V_x zjj5mfm)+p6%W*);V0CjKg0@0mbgK6;rc@9@{LV%ogxTdUZrqd=f!1=UWIFM*980ehM%Lr? z0%`YapjKBj0%ag%k8aXI-SkNU_5J}B)PY~A_2NtWb|NH`2#N5T|S(&B4r{<@o^Wj!C8wu-%o7UgxSi=oC>wRw)Bx+K2bZ+Up z!0fPHDcgjrtrkpFhg^{7hxy6v2PgN9I`c2sadtdmD1gDE-6%kjr8@OUGJSb?eIyx> zB#=myL%$v}Ux>{*|g zCVqd%Jg5Fz`&h@n(J0BG!lwY&L}5hO0JxkqzgJVk%$as9zEtLk%Dqi;;fT5V-mrh5 z=NB|Sg|qf|G)^ved;zrWZ&Z-CNXjQqw_kzUXwU%4;~KT`bKZkYm_G%S;1|~5Xw~h7 z;Q*P5IT)oJ@;kR)3XY+KaZMiJ`xNbhYE5Y2_Zl9)qEXbp3U{G}D4lT;^I{BEwgu2r ztfiHW!j0SwiIW7jBmGOrfrnYqviRD;rI|xmBw+A_v&J7}xzQTVrR83?hjnaDGaDyy zFN|C;n_vU`#~IiOY_Li19x4!AUj)1_a(EvQK5BUFN#z#d z^HV36CWOziucLQXVePN<-^;(I{Zeg9{Tk91Ch#GSec;Be@8Trb6^SjFIQS8en{pLS z*+Dp-y4BN3WFS82!!!Z@y!Fx?_X3%iyRP0_A&9cs?2x&NYMoJ*O_iE}lRCJOwsv*+ z`YY`XL&FJT4zTgVPR2eH8y-#AE7Y+hxB%~8wb^oRf#d~ns*Lvc1bdPR_{GAW#8oi# zV%OXXZY)4jqZT2Z=2@?KFf=w!I-n0;Nqpmk1&U6QhaGl$YVU9ubP<%Xeq`^&U`Tg8Ffu$q8|t9cJi9v+7#R&_c274qPw&nIM@Isg-LuU{mS#fzxxm!& z@>C$#ADUSrTJFZ>9}!cb&uR#(FxP=WROvX!E~}Cqw~3Y3W*lhx6N}x-qv61sST03D zTJ%^xX2zOQ^DiK}9^LF09-|S{XpF{f^6*ksXSHJUfvu361TbJQYm2TAAPsX@D%u$^ z3zgzF9H&}8va5g7!A!$==RYxShXfBQ`W^4@nHi{U>kV~=9FpDEvSBFRdrq!n-Aw%L z4q26HC>nuIg6`Gutm-n=^O;Jt1DTA@6KZfEZKrkydS-IpSiP1nNo-2j=B7sbL03id zC-VG!AoejjG4*5ifwNOnS4#Dnk+6s)1A(xkFO|YFY?hqn`_%*za!ypt4>EJxmXYz@ zPzK{X8(b*QO@#8haR@XpGE9?*HR`W<6D1K?49Bc5$vBUlB^=cUC1sl(Ov-i;wXc0k zk*)!QXQ{BVcUPbQi5nn~U4w}VNr*-{UigzRhSpRMIYc4zHdI__iKDd5npENXfJAuC zYz-ORTaIFWImDf^(SASaeICMkE=y>a^<+I#xFf=aw$8>t@#GATI$|Do*2DixnUq4A86oL zY2@%kUACvm{*oDI%$TggF{8j@!&{Q07mXjO&9*dWYiqO3Em;f>BMDnD0iGIl9`|J2 zt0xK>hu;C0gBS8&CyA2Vtwtgrv8gIY`8Gn*wE2Ca@E$vP%Ji- zP_L0pq%ISUX6ouPQaHQVnO*YvraQCC-CfJs&gp9Ja&~BU=KMf+eY7hO=!({Z8yjKO z&B0Jhb#+T9)C`g!a19UnNAOXh^-N9}mPEUeMu(rxOBu$<(cG|~qk`Vx(Ob#;A>iM~2O(+T{oKPC0_T6F4}#uO(&0GHPXWDQU49jQ~PXJlmW>Hxye>fu?2P&~8PmbYY7on!zCRXf zsP@>;!fBNC%!M=CSLkZj@_2mginZt}(6)I^x=L$i8(=&QUG4Urg04=krmNT6s%**a z6OCt}tDnb;Y8cwOCS6^?2&Dke>FMe*(AD3oXLGZM?GCf~xJeCp2X>IRslLJUJ}H%ILzMx@5)Q6@l}q$ zgFTyE$JWnU~a}WSo~!L?(UZ!B@!jfVPOCqmnS*8R2aKW?tgOI|IA~FvIrF3ZRb8$rA=i|ob;fn$W%v6I))DJW_kXvMQ;9bH-- zpt8mt{0)vnUdIkzN0;Cz!m&1_DZLKBL9{a=tga)a)q$B~oZ@frQQ)mC@m5l>bkZF4 z;EgbUgO34!gJ!E$NI~Cv4>*QsVFO!l_!RI>Izs4KPRiv>!j{f0c)${WYHX1bkIha^ z&5?)ex-oAxG`a!{)Ya8sF$uE&{+V1&BAA<<%>@%RxtWE^pwl1q`QmY(FY0#&D>1|d zdtr;=NklDUbbQd*t#AXRx~uR+-|5X*D=i+2(R!ZbX!UyU@jl&*NS%PK!m?oX<{$T> zj$mO68wTu+Y8{{uoCjQyqsUp2J6u}ea7hZmZm*>D&(!-N0E>b}vbDOI5F2P1oUR+} zxQ-eL+MH5rPouZeXLp%pt2@*Y8t$l1CL`WDYOxyF$^mMu1KvR*ytjCb;+t(w z>wq`v#lIUD?(sHx8w8jAA=&EvG)kJhHyor<37~(ys28)vSCXx#7HH(jdceAp%95mx zjMj#FZV)l%mU|k~^%Xc6$<$<(%~pF=RduMQ+U>U4QrM}XT9-cU^}fv_25TBJ;mF|q z6@faZB$|wHh7WqG0;1@WZk3%3o$WV^IFqclH;P4!lTA0UaljNHojzfm>nlp-9#t+N ziGap{ry6^(alFyx6StUMwl*7Z+#4QfOpG=K!{J~c9KJu4X{>1q%NJOX>eM8g#{=o6 zcyHZI$ma_NeZCMd;Q@wg&?=}Z@Cz^0@(~3d&heP`-B{&)%4;(2N5JJhRaV=t3BPqX zg#1sU6J7>kKt6)`NsxmoYOtS5nkmAbDe1=p9mA1ouge|mZ|}&4DqUV*WVo{`*5E~E z;VN%!t+%Q+bboDkZ4IQU24A2f6zT~0qO!f&j|Ar9RhR|%f7Pw-%1Sr=NBH!jX9F5k z1QVB7xSXX+SgO2I3dTX;(9~Bq*aRFlk2QLHwoSmSqM`F19}M{jL|+J$=>NgJ$k4?uH#{Y5qrHeUYQ-tX#knz24B&Lh9X820jMF9A8Sn~4?QOBY4jLo(YQZa;4-JcL$nAxvV%ttc%xCUc_1N>Mpy z7w-Wk$%kF_5r@6P@C3BY{2XW+iP9F0tDcvsalL8njv@5xY=Y)&h%z5qxyL9a0ohMb zcLhoEC&RY7_6Zy+T2mZ$#pxl%#vTb??qyK3yGt4YB>={R{WvFudH}AXC$#>`@;qP1E0yzFMz}Je}T+1|6GJ@P(05@ap9POQi zwqwxqew~`ir)9@6MdF2c5q0MZvxo|^Xk%0ANQ*i%TK{OS)0lmf>fpu_t|HwUI`by;84y$C24|U3V?3g`12TNwJ|CD+ zeTl--7KB)g8JWq=ZfY^REzFV=!M;|A6E>S*YTt&sY>Gmk zZ0|7P+X=A(?5&u?i*9p^Nw6}bacRkj76puZ2YBo2gqv{s(E`UJF$@}^D!Eg~AK^b5 z<>!Gw+@u(#kRa_&|{kyIz-fgBI_IS^x zKZjjLPnJHq3OAW6eOD#y;z5VhE8B15*Ml|(3e>9|Cqg-H)7Eg2QZ2$F;5KWbEz&4@Fy8oR8>+G?`?{o58yCgPXxWzVX$ zkPWxg$CB)*>?TDq=I;^gwh_fCu+2t?ZL`yYN=)#ML?!eWGT~oXli{d3KUY(K7&q)V z)ES*-P1?;yiM~m(-ohu@!!$Qn)9_DHlHLvcn_lf^gN$!LC;z}c#Lm^`GpAU(N(w6l zG!RZ0iu4DVEa)#T*iZFuEW%l!vT7ZFKV2O+uBt=*M)>q;z7gOJQPwWLNfjOw9zawj z`bRt*Ef*bD?nW-m5yZU?WDX)&dEPQ@?*~7MZ;0a~@rchku95cf3HX`XOsn7Dnn9Kj zdp-VVcRo{{K5J}jd)nu%hXrt9ZhjH$Zat|RATMz4L9LMk0i!IIC5K4Wek($_Q9e9P7r7~o)oSLftzmQh>KF^8ih zdJu1`;Sp(<$+TiV4#apT3Wv$}WMIXQNiU%)BqX!=J;0Wrc zC|=T5k-ZS8A=<_v}!@`WrD3=+H7!jJY^;L{u^U#D#I^wj#+QIXa!8DFPu^FYW0k$bu3e3)K*07955rn%cWo zAj95*qxk^YbbDWg$ypr&WUjuBsr@rC5MzL5Y}_(2xUs^~Hk_CQXdGNXHU`M10U6>B zX-stfghoAD%tXW)9C&8JSMCU1F%U+s`W-;_f-E_3@$s!eU3DvU@r`69JQZjrgLwB! zHAtW6kWm*t7rx5R_fe$3!;yx3yIu}A{Hem**bj}}#6O;X#(r^Z&uBP2x(Aj$#n!CO z*k)`om!4_H-h~#vrnYd}8T;rdW-K!GW9|zrr<$?Eo&U$FxbuEdB#zg078i6B=Nv-Z z%eZrV&EC$xhMWWVR&n>3J*!IoN9Cc5HNLRwxr~!m<(w5+pou-t-T{tKKYL7FSR{XN zS%OTEe5n;n(3)}BtSCuQ5^9*sng6+7fDB1gz@FDc4Rp-zDZM9tt`qIurnd(vZdG4S z?j2Z!a&hK`RnP4CQrWHao~RT02lT;eR@-A{*#yJCbj>xA zP0s(;Bw6$yEXZGDw@Gh&qhzzso6N^-PO(9B+K!o#c6>~6NmpGZxs)-{ewv?XjCuk0 zXbIf35vf_hXz}w%YYsEb5Sk!s{Oqezt+Z^J03t|1*?12){P`D)aB9U`$G~yFOhC)} zKqC!ovlxf^Ot@yQSAjFM0KBk1wS~;-yS?x#x<{DrU{A0^m~ULN#Off|nBvOA+9-Qw zZoAAp@MzoyH(?Kx9M3u=!3`hFt+48P7%opZrAO_GVtxlqUMQqOSIX(x1<@Ep^3HA;fCqQ4;R;Y#ig>=f95Xn*k3*E_i2x^m>Ue)wqf zj`?ShZbDM3TWi@}JO<l{0Zkt9Q67-)i6NE@8NA7ZXl*AP`P>;>O+aG+r*(bCYl zePm#{6B^C8+iPWG<%}pZr$1bI0dnD;BiYTF#&buaqrneGlaX{N&>m=yhP&i|)OFTC z>qN%aHXCnW=xkqTZyxFK-Q*eRtdPa3?%s6u0jtd>Sxo6@;=G!g54d|`;Wj_p?5vIX z>KhYLXDulMVYzr8dkGpDIMUFptkAUDoP{s?>+vv^c3Wdp#4dI0nw;I;#lZ0` zO0vc5iP5j=6n`f# znyTA3CObxJk*rR%`<%_SzK~BT`;kq`wUA3fPRV7H!rrR-TBi%M2R4190XT3&^R8iG z#UBT*qI9YkIQ}Rd2??13)KN_Z#DxvgFK~2lEtBeD$Sok+B7@7Jh21vyraisAd)_ps z|IO`NY;9fKm+L=op|y44y#C~D0uD9C#B4G<+W_C_E=e}pBH>WjL5?*J+(c|f8O09P zFe{bTL^V#euX}BO|7+K^>wkAQPqx{DL0j8ob1YRu$^AT4HStPYO^vNGUIQN9VVC^r zn88%m_X+g`23^)u1)J5qPf%1Atsz=IWoL4e0SR^-OzdCvLn4J0Nh`Iu3fh_j-d~{JR&jFS2{Eo7nk$PXd70mCE#HrM9TY zT4!46-!%QJZ*H!~f9$5Fsn*uXrbb++n%46&loM?jFLW9nGG3!g_PQ)YWPz)DM>siE zBvY7fUiyiucbtOu;bG%|x)YO@r1U%Es^_z5OT%H|r5E;JASzre%0CV*d93saP?5s# z!@xhtSCVQg-9FXbN!_A=e}eRw>^MU8AK^{tx2W?iJS^ch0{u3AtDV9PizE9C>W9=! z^_OJ3I!#L}J2esh?@J1o3svdxh$0Qk%EhFby;!}H1KEuZ9M7cOftF^I=g@ z7_%tSW}r-3hlrB=O9>D-X~N8 z=g{(VE{zzY9w0bVe!8t# z;m<4U|a``r8u7g(joW$(W~Q8tKcip-wxG>~eOwc3z?kQD+Clret@8?2x3!$k2E^ zJTVw)lq84j>h5V0$>9()C2PnEbwVBHEXF895NAmGa+FH zzWVyUC~k=MG+-Qd8N<-*)67q$pcK<48s7jp8=x`|D+41SylZ8+w8D|L~?D@ld<(9v-^@4B2OnYH{&}xaniz0(PcJcPK<Vl36#J1&}BBAmygljn_3?n!qp=f>8BhqE06 zAR=}LLJ>x;&cC0!w-PO`-%DNF2a}WC9&fCvuDPitB4g0%M*5=3<#cMS-Q$fnhg-X< zV{W9k6}z@&m{pMiL^cAFa%kliD-POj;oYExN&S?gwe|@ji5NNJW-u|!_XxMn&EW9V zc>;D`y+XM!ue)S;_>y(E+(9khp~${>WQVdt?@%OwP$wyAhYaHFcIiHFT`7Z7STzF*98qG1qnwm;hpag`d+3_tY}6X9&3hi;iYs)z-LC; z*^1NheGxG@9Yf=O|8SNQC&ux7yO3$_K6f^X<8q;`$lpl;OY{rQ7;#vCCsn$Tw;|FS zi}ptO5itBPnCgn@&@qrMl z|H?5b98);2jYW5wHN+JxL)|0VJnwyjM%~S*^b=;x^^AI$(0>% zHGT3GITge;*WzdpeJig*?unV3p>fA{^?pnYcxzm$AMhOB;?3!!vsq zf;{Yf$-Z7DaGUm>Mk}jrjWzn?QISd3acJ5)=dG1BHI>m?9o%)J11iXJkj9CuH#jDjRFTk=n8wkjYRzs}-j|Lobd;j&Z$z#Ti8V zed(;>MAslGpW?{Q69IVygy)HsN1*&(KAHI?59b15AI$>183W-|&oR_nh3EFE+cjGF z{LaMEs(#J9>Y1{XmBaCCT3qZ}y5_H1xyNwp$92s@nGa5m4GJ!tDuP5H*uEPOGyXMX z04aS2oa7yF2-qRdZEy|*=V{Ms(hWR1?oJ}>e%bLfo#nI*XF0v+y*SGWR1H%?k$G?= zdoSTIe=f%)zgMK+vs(m&BVdU0$62M-ZWoOfA%EcalCp*JlA)W37Qc}QIc{-*O+mA) z!UJcSTMXldh%hW!O_7@^ z@-g6HF3DJ}H{1Y2J=lyJZUo!XYqQ;OgUyB$u&p=VV0BN>4gG);6lg!Z4R7NaA7U&s zhCcQyAbJmYAN;6)BaG|0 zn!qd;N4n%r(?xELv)NozIcEqyL_r% za4In9o2=lZ0L44`b)#yYpDHQZpjzsO4KDV5;cU`KX|tqEgKow?h1q8{TP&Mx$XxV? zKgbHKl6Di@;8YZ6{(h6)B};Qor{&H&tWISf2VfYVm)y>39Ekor+=I~n2wHeVZ2|K< z5mWa@&<^5~dF8rof!%4coAUQj#cU&Piw(Lg!t;_x@hiMiS)5l~mOJjG_Cy)uhd3kl z6X9xj#kQWR7i8^3AN1*@4=&gmr~JYTG8}Hrl9_D`h4S|yvYrp?`E)%di&>}3eBc01 zL(IZC$M`{KO{m6oyU_%9sN%u_4A?i>f9No%{lt=6tCy=M6#KkcvgGd%h1dp*1RJj+ zzxX1Moac$T*m*Rz$4tiCU1-<&K_hZPW|ay8$mPt6@+v4mwV_|U3hNhsr+hq%bFIj@ z7v~pImDLD2At|>|i?=}KNYPigy;+g|oBpyl&=rY!|C#svWH;w+kIj;Z=SZ@( ztFER>AIEzXxNo5jk*+zXzhS$;Qrc*s~QOw_UL#uZhv_=&Puuhblb0RQ$@xC|*H+2tA>}O-q_shU%p}`tyXqp{fRvuC&lX!N1H_A_skK4F&Fm#) zf`gAdW>PA}H@Fg##E?Wa|7AW&`6Rj7Aw!S_E8&nop?Y@p3R8$){vpK@!7H?AOeC{E zDa^p368ALaly*x_5l(p0YeZ+v=CUp=St@M8l*1NX1ibeDl_YkrNt+oPz zud+m&2)V71dVAygTo1o*X-KRefW0RrJF1&%grW3?4j%H}(R$*`=}jG=0Ehw-t`=@V zrXDSV3v-@_w_(8{;|wQt(OATokzFR?DYM5jp+s?f`Ipe0@sM8Jpxq~)!qETo)Q9GAu>OE=qU3h?erg%5+i*^&<|7>C1l&E?N?MfZECbktw1B zC;Geyg8-f%Q=}P(OGG{^mKH1QGgJoSS^iJ93VXf1!j}INe@2fG4JCZqxPv%3X#J4m zL!(Y~yp3-3Lgy%u_`5g?<%3Wo4BH*-=$CL9_G47-n}28m8*ODz>Pbh1`)oLvDk(co zgt4h3BM@X|I&n72{c%?GU_MeY6`O;0iTpGvCl3U*9zC0v#|PyYt6x0apSy70-&|3> zqhGY*s845=wPiY0XA=4kF2=k3Lb|sv1w&bNm(+h?cIH6VE|2eNwUTig%+bO**DQ%1 zar!|2o@jr}X1DcBP4(apCMz&7hEYhvs~S8HjlznErP}pM3e-VPfks;5NNi(?;>1^^ z7nL26o?xgatU#XygdlTqWvaQc*XNKi48jMgEdSd?rnWYdU~c{dE1x1Y)kKX*fb>I5 z1)RCqT=<*tbHqH6S49RsjEHwauC`UtSJ<#rtsG5cXtB7B4aX;xMF&9?IrB@0v^cJm z7RSZzRbz_gCW|peemg9dHDNuZDF2pIzd`mC*d@ zr+$MByt1ENz~3;=6{EVU{Hvn6$`Qb#WVa)9C;iUH#A)#@~*=6r)2>u zBd6d>i9RGU3Ap)s{N}riX0!1wv)|L~skVHC9zJ5J_EcSa?R7qhHCh~2tHZ(?iM-~2 zpg!e)Ao(u&-S2)!GQNu(%eg;Y9+I-eh$3G=Gtx?pXstkya$ zJg3#F)KpZ|D8+gUZx+mj9|&71;=@=H@o_FsW+j-_j7tWg@V)$tI9cQqcn`VbtR-)j z-ovEPqW}9B%I(zXJ*d`z{DA9G!^crWLio7h&5&6w!o#11cU)l@Sy#R*?KLc)#rnW}00W14tuXbW?s7aF)N5yj>k8)^uHo>N^<$qGHbn1j zFe*~8Ao_bG;vaqYq)=sO6V6{f zve=6Np-`obEGyh3R26=H@_T2Mzh^AGJ^#Gn4h^5cMrnLdDF#Zh^cr5r^ZeSyv5|jX zZTEF59tMg~9A{OAQO2Pyt=Ys=gs~#D2A^OyT%gX^()Zd{&sQC5GI@iRjuyYyxq_&Q z?MuCO$tT%$tncw|t1z1@wo#7_?+-Go`0%6Z(^NT=@#MH z0{MCl3i>m$=;3(?&kyn6f5!DR9~SITO7X~KPzLT3M z!w(CO)Az)OXm&NNcN;~~ji9H^TIYVcs&9RTsNoNwm=@tT9JXws!SENuCOrR^!-kaK z>`Jz2Ee#a3ec&RwVz}Udeh*I#+*iG_@DwWue&P@b9%&-^NANtAdal?%fh3G$;nguRTw#~B3_F_vO?`4Nwu&spo z2zS6a>Nb4Ac$nbAj4G>XGrh`VI_PVDgCcyh7x3&dUZagOEDW50aYt^rH;4|W?MkzI zzr!hRG`oc37VADURB!Sgvkftc%&-GrFR5Q+4eIisHp&NFW^n0_{ciJ>Hm6-U?y{Np zAOzZK-e{Uu``8))Kx1g2NORVdh;%gwWW>twbQIuGZuOYG04qQ?vS!Hybsk2<` z{+a*;bZr6wgtQT8Tf>+CkF$P*dYUWpT~#YUt@{p#K~d6%PC- zJlE0C&_VyD_%q4~i-W>1*=5Fq>PT^exhg&Ih!0h=;7%I|b71b8Xb^b;CAr-y$^N)M zpg-C~yFVuUvZ4~sm{w;z>aD1BIc40{gOT8~mtAh$SUP)k8IjLk;DH@NVx zKxh8zRCH#&N@up(B%uTOa?9upTGZ(b$?gcB%cOZvU|1tR2676`WRhw)I8)}(s$8~8i^&unY_wF_qAlG&VwKf4r_JhcJC#Zt+2kJ_oKvkYuf}8Kt?5{F%(Mh z8*n!m8+BjOeP+wn{YeysBT=O&FBQp$uPZl}%!72Y4=!vr?94OM0>X<*EKiWjOIwmY?^dwearO^}Isq{qUXihbEU!rYQ z*n?xPX&vD5{|7~Y992MU_-)1jva)=u@ThSCG@{gV + + + + + + \ No newline at end of file diff --git a/img/I.svg b/img/I.svg new file mode 100644 index 0000000..18b634c --- /dev/null +++ b/img/I.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/img/Y_Combinator_Logo.png b/img/Y_Combinator_Logo.png new file mode 100644 index 0000000000000000000000000000000000000000..360448ef8eea4ccc2768aedd471e6f6f9543d77c GIT binary patch literal 10270 zcmY+K2Ut_Vv*=GkLJ6Q0=}MC-RjN`W2!e>9K%`ftHz@*A5)ecw0#XHmAksV1MG`;| z1*8`NsZx~Qk&xsb|M$N8-sAi7adxvayE{8)H#@(1X?XV*J%SSf006zN&dvJ(00AE% z05v7}&(f#Z@n6aJ{w)ol^83Z#V1dS4$IKT1Xqo?gAVB6@cCdk&i?OMnss0@$2QLpv zyN6!(k0b*3`VKV>_vevzM!#EJKnH^L<(;fN+J@fO} zuQ&RaY=fY3o4rLt$b0<|!MhQ#8&emE`5qDPw2~Bbee0Z)<){ab61X2HL$wUB{R?vX zF;LyiLim)Df5_L`hd*EY6B2(-zI^mI@%-1cgEYM(0+-K`>i6m6CtsJhmR{RHx0Cd~ z(>bZh?khi+eIKx~ly1#J*)F&kMBg4BnI}A>gMoe1#qVqEy2>B;@_c+2&+8uA!@TWa z%`Qpv+3o3MN7LFBnvKW8oY|EDN)w`$IeN@b3gud|8;`3Ao(oPbmAn2~@wYC#Yb5{N z8`*s774Z9FsnL|tfWGS1+m2s9O6-cb-KdX`AC|k{Qc(T5o_5P>QcnI;$?ulc;7=2y zftijuS^Cpk$^}0=>#JFxJBSQirHQCQt_Q~^4VuMvyVk#CjeJ&!kB8}CQ(zhgXS*%J zU$WXmE=qc0af+SNDvvwPv*wW-4P9d}_fe{}TF~dnNknJMQ4l+!|9K!^idrk=%nNQu z|1%egk%)qQyUm-zkZmPFB32xs@2$ z9{>E&ZUa1VfUy9@;Yu=KRV5 zXEy_5l0-Jgej82#XqsS3Er2I4NeIFuq6ldLmMd-2Ucs1P8@yQ6uU{}}uwwy32-Fmo zk_ZW*GY!A(#HfpB!k6McKf^@A6B+0gD31k@&;mjHRm?C-f>s;)lSTlB>`++aPUiz> zsvU=!{=)14_(8PG72a4ettBJv>F}xNLJRxXKsmTDk`_D9@_8c(nFF&FQ@=`?xEi0q z3L0P+R0CLH79X(t>Og~-bf_^Z%#P-uNSAOT15k-JJll|s28%A#O{g|TVC`h!2nA9 zCsygVCT!O&2pW$&+{_9%w%@HCxCol{mo7#UnIrdxmFB`*O#NPs;%SZ~vRy3h$K~)F z49IQ8T3$Y24N3=<9}x&&WRiyBGby_Ig?zQ40*O?RvR@v#25}JfEZ&5V()3Kc;TlZU zp>HLTd}YyZ_`?^NbmAbUzoloycRWD@bn*`l!K+F5YMFLIB0$qe4~jEOVu)82%@(b| z*{#GN38I)x%5jy9vc8?Re-eU&VF+wtxcvF&S@2oV-EB=_mV&wLu!|_6KpwC=dj=h9 z;DOSG%dvfMa4D?+tqdP96C3n1au_MrkNMQ)Dw#_*_+JTnr#5~S|B5!8;htx>QCj< zwSmbE^WU~;-+ylI38jG`*|0OI{liHZJ>IL7^VYw-z=8QNdZ&hH#ySxp{*W19`AQg1 z!LBiCMVG3s6PrMnD&U%9NpGN3`>Br3P$vEdHO89XSSe0T;Lt#O;C6DS(2r_f)hcT? zm-zt|On+M6+Luz@wZ@y~YMBn|9W0MKnHS|(?2O$mnu@?$>gfdwQ{sT?VQlV|wI`iP zpDN7@uG+L}(H5+HiYttwRPHQ1b05iiTHY>U@o6h4FOoSACagjY4c5{$anWpkmVDo0 zK34IS1Lmqb1}{@5monJ5ikA+#_b{X$<|U_fcQHc)|8Bj-z!YeJwse=?2hMXovd4{j zotDSVc}GX;%hf&!_zq~)lUsupd7j=83fmztNT%oeml;0J3VdF7ODHCqpX$Cv!_DsP zle*CXGa5ZxX!8I+UAwP)8*E_XwUHZHKVssUPBCh#HnW+psX`olwczBw#w(pH^Q8mH zJAxirvF}M~7B^RNbM)F>*(zVp3o92iFXVPvgqVrLd~{ppUt&{(yhE*(c^z;&$1&7wnlxgT;rfz{i^rLf zi)Twwa0<6(V6S9icRvrS0ZT4c^Wkuq>TJ1A`}{iBCi+}luG`!tf07!N=CfxO@0)jn ztNAZDjFp$Tr^cpvv;=Da>`93dpZ2;nmJYgCb$23keD_tE+PrzMlx1dk$QqfM@Ba8b zX&rkNKD35*=t><9{Blb`rHzMP@g9rda)|`pa>%kSNAR3F{y<$yvS@lwT=V0E?WI@r z01A7A%mMzDYL{4GnS z{PC%I_xyD~_o^-(H^wJk*UK@D=wb~N#ufbn$s}w)hDVtKYkf9!Zfs-%{t|<@eM|i$Em%wV6G5Qk-GDD60ufv*1d7~ z-kL)U&@X?^SCr13UUN+PG&&G!bkoax)c{iOe_yCUO`ziEF_K_&Ycm&`O`ll}h`_~J z1J27xm_m9hQ*GC+WNIJ|kU!o>BY|VT9yGtMhWg!UcwY~o7D^@x!hQ%2H<3QE-%<6T z+}y6+-cf$?=}dK9T47(2KUNCRiyKQxwW5NV88rXfSDk&f{+9nvSGM-(!0a^=i(piZ ztA?mcv^xTvv?uy;@Qu!a$UEN`xV3aNqBtp)64Wm{Y?FOUa?y4vCFa_8MG=kux6f3S z1+MMH1=6N#W~@>3HhRJ=7hjHRi%N5~6%Un^aBO^{s>BPywmuAXp)ieS8gcNm+bXPv z8e({7^2GbchttxV3qm~+@T8+{O$Wlq@fWy59<964(k`D~wRmS8x%+>0OG9b^lgTp! z+Vd%q8C>sC$n~!t_wYv}m>-M%eiBdl4Q(F7bExr%YbXUVq_R-t)Oz&2wz;IK>(Uec z>z$#91oedW$!LL-U5Lu@7v#Gj5}RNz`2(|estO#x(fYQ`xi7Hqmqhbjb8sgEM$L7g zadbyR8S!5e%u6J2)F`}EtdkIDS`R{ciacgYz_HZwj+4wX%;wO-A!K@+3)eSqk5YxA zi;}*dj?^c|0Z4-E>d)8@g~mT(vF|DF>PdD0qc$Dqt=N)!+7U0hp<#L*6W{FN&as*A zhy$Cq2yS9s$i-c@&vPHU`g(V|cQ&d9bZ?rtb&o@yiT8ubfz4`R69Vp1n|dAYkoduwJo)7~Kt-fNA|U2%5WF7! z=eaUR`%zE}kbLL{3t|%tnj|TsvTP@icqZ{_(OXe*pDV z7(XC3@8lqcFCTwWR<1e2pL2f`4V^-#x{^b*Nm&V%(bI~eQQ z=?YNzLL7buL$%~n%Fxr&wGaJn^HTJ~c6hfkBJXu^VL8otQSNV^b!6Y+CgR8-iP(1< zYyQyNnQAs`=Td7NTx-W~kHx}u{9By_Gps+)#q22hQ-=KR?QU5VQWM%L_r7^~_981n ztKLxgmJ?Oj&0qBOSG1k2krsrx*Q2C2Xa3cOn>eln;t@%wb);!txJs??p}fRpU?ZWG zsS2fpE$WR*uX;V+Q1$Yh@f(TtRzN<;|D}6q8;6RUrLP#}h;=vv{%?m@kAnpPjhG_8 z6ZvAc$GuN`PW1}uh{v{BDzlUWV27XUst}oYA_K)cPJD76@STeXdhBekeEukMmhC2R zZSUcz-3LVY$-Ot31M$=9E`>w()sQzLflIlC#MN6J;<3QswSL9-%?{ZxLiJ1lO$ZsY zPW)K^xo%<_ip)Z_XFZszrN64u)72YgVX-rusDG?s(GITNj(%m5^)#Z1vFbY~Tnq?8 z@g26Dl8oxmX{)4~`GaGR#FZ)aG;b1O=a&Ocg|lw@OFV!fb;Oo79~uC?Q8z~CPDA^c zKGWU!dD;M#mYQ#1tcJ!Np*b#-8@t=k<-`xgsip)1fxAm3c0zi6kIs9vUEPz&R86!b zv=%;s0k1axhK*EthZv4VX|!0f(?M@RxN%CI!hpqFI6+J5G~|!T?urzA1ob?Uy?`0gn9yj2 zxf>oNNdciV4$`Z{`z%mBwC75rb>J2IIF1Up&!t{7MKV?ZzlL6&b(**&oEp+94yG9x<$z&fo)#Ii|pWen3=TNd$9kO$G9ADJCC~5=4_U=i=SzfEs=QdP!zCP*YP)iUv__3O5KW(u4v?O#A=0fl@6v2G_2X?# z0wGwzzI}krVe|a@%Qkdo_pp39OYc;voU{a{vZodXSYK-D$n+;K48)SxDDLr_o#ML1 zOIZ>JNs2e8Y@$r2?v{-wxso5`L~Y}&q1nV48s z+A!`DnL2Vk1J*l#81ee<%e|R$*h($DKeJ{CSDei%w6RxaJ8YjfnDaUwURO`vWdA#G zgRqqBM&cGf9{Xz~ueQ%)Ot@8c$Nd68bMZRaPz)$6&#WU=(lfOoe0kSRG2I_f_(v0Z zODzGm;(%DYSy+iaOZmQMe6u#o`{Ppk{Uv4|CJ#TfetuVHbUKV}wTWO$L5`XmhQr3j4r`wm2bL{=J>^{2jN9eCj{2rTC9ikqx zGHvQD@0VOXNq z;RXw!BwZ*5XGQ^SEwXj=K^djQ0UO4j*L!<#q6W-UJXNPtM?8UJY-xnWSoIS4*THXU zr;4?uR29P)g*I*lKR$>xd(VWqGHbIcfn$}TZ}(aFnRVJbV`%{?CC&vhPLVGqZQEq= zGz*#4sq4U^pV&8V;~d%tG^iCbNksWqTQyetUNB55kTz(e0Gt45ZVK=_tGuyQ{`Le&|2I+X{$T8 z75DB@J=AZoLP3g~Hn8YwZx>fJi)}qwVvsa`Jiz~1?(V*SUjn`yMB}gIcV!YK+NcSfbLt?1$pR^R8|7&D*n`wjKeN^!#UAQ)QbJLYygLaa`PM0U$jm?!uK z7Vj^Z3e44AbJGLgk7~y@9Lvh}M-w#+(%1aylEZf14HWJ8=A0uvZuWxtosgL&@jG5N z?aGw;yR21-tZD+oGpC6?kyaup;aW40%VP5myIhe}w(wETjL&s!OZ2&kH71P*S69`q zz#Yu>h_Rsr{bfs1mZwLQl3 zYCj$*x9Giv+Ez|cQ0^QsPe4vfGe2)eX_#4tm!R(GSnN@t)nRIXAqbHR1wcqv`Mea91!Nbi6^r|5e<4d6x)jEZcvGSLLW=ZZIpXw7^HJk0z zS&_4mMg>$!ZiUoU5iW7d9nwkp60stvtK0*$)hPV>DJi_%0a9~>Tc9usA*cL}2}N5G z=Dmn^DqcG21fMK1s3*OUI2dl|GZ8BDw=s;?b#D`Eq7d3k66Rl?+&0CiF=pVibZ+9r z-}+>M8^uiY(StJB2cE1f9{gLfS4_Z)E451HiJlo(Gi3nbngqsj*HM=)`?D#;RI(M) z8+B|O^S;TW^Y~&zTY=HEhR- zj}HVtp09EW(?n!c1JC^oW*s>peu`BtD-&{!vl8te5^i0RjAda+_%%9!yUD3Ye$yjH zm4u2BhH4YGyM56oB#%vXiO!z}#dy3q6?*Q;hog3jIGURo@UIo7usA>jm3AeQE2G<$ zzi}L`mua!3>2$BZ;_07nn-dlq0tw9*&9tIlZ23*GtE<;0oX?C%_KK^PoVz|pCeS6WQ~+T@o=H;dn)b)$Yg z`Q8{Qcgu!gFi!}Zp0Mxlq%04`KSjHeJr7oYDOq8zx2tvP_iN4x1=OjV-&}fr{N*zFM<|-SyE#ZQ-3mThW?#ZjQK>!5$s3Lio~jr9 z4W&y*r++MA!Q9*Ha?0NixzrmKptbP96tYWz{$aDa5+Bfx2-MA$?6Zdp)TyuhcKSJb zibvc!BtFw@T5byULXN{Yyb@txVC=PLTHP4J)Q`Iyqc5Gu(=iqF(NM(0e(lBSICi%y ziN%}dp`K9%=FP*fa$?oC4MFkPlL5WCZI`z6sE1-)#gI@`(Hb7LeZuZ-8{p8vL5*E1 z{TiYGIu_}}qI%oq+nPMv1wWZk_=%g=gEg3-5hL(tQrULyJwjP3bhFBh^Q_{$&Q6>w zdPYpIt!OpTiSj1Cm_lAfKC}V{`5FDSkPma$p-t?hPLgd_#0TmdW#U`(1TIv8ITf6^ ztxvTST4Ddzz&+zIdgO2nMQ~$Kyu;rSr+)TjXVQz99XQu6&+ss0OZ{?jgr&~Zvy4pZ z#1In|uBpf1257RVei9U-H=nejviQmp2iPz_JwLv%)UO7OpPN0fLVw9kJB{Z_?1qai zjffuNjW=<^)?2X&d7qyago<(ZsX`i`aX~PpJ_7@xcgu1p>Y>$+YWQ!~XkoR?mj+H; z6>rB8h7ioP*_+i&1F@>52i*I`wyU&0fB;j#s%y9{RUN=$9sxa+;4u(&DdhaUOGfMRMM7 zLP!fu&OE`vq<;69zaI*|!;5gNz$}X`tl|bv2&Xyft?6`2v{HIY9yq>w+S!ZI`f@LG zyhcVvaa?WZ!VBh(mm+d=+Uc)jr@w%X*bE*1DcJ!1-g?*kn8;#XTq9|9a4v6bpla@9 zKf^DO-RRs-)Ew6=)n>xovURwO3%W^kKsEeFESW3*-qW)tC#sF8>rt$a6sV!T8QzeM zu8v=691zH8j3OVO>$ka*-l$#wg>n>dzwFcR`z3P+b|*wBySzcW65QcL9qqRAhx~%D@Wr_D&ej< z9DQ%-L`*<~D>@F?3m@JXGp#NIBe0Gs??hS3*;#sCg+1{8?Y~n`zLi;~F0wD#L`M<% zp>K2h1x&sig>=Y^`*|9h#Q4I&*Rxx+;}Vlh+D(1)=p(xZen)mmH7b{#8XKznQOe6l zkC&j%AFzLInp%8h-q*kHjjAzp-f!H!mlQg5SSQamAmWbS_Ohg&n=@skF&J$2ip~WC z)gp2MDgqUsxVkK;A^0l76Tsxm=w)URgGgXhUUE?E_ZvmQ><$pF9e0KSs`%eIwJUcn zj=@j#+)lq3AVSKG4<>Be2RPU@L9dNaeEa4@Is+g}wUlblfQ+WMb(OLf?UzRjkdiY7 z^DWH9cwn7fWg$9CE3b$ydSqp%(fj1fay%0UX z0Cu-6#=3ZH4YfJ{EssY|zBe@8>Rk9|xcalp+30VcJc>|2U$}xV1N}Y2J^^MJB~f0Z zPCLh9N#NK$Pxlp!5ZU0ikHvEcbHeR*V?zF?9%a#dEN;d@Muw!cd!9??kzRT?*z5}^vKei$8D!G9nE>& zhoT0`KQJ`csB3;+laTwek@6m4)iTRB`7I;h>+3dK?**Nr831pY(bbldZOFRqo7u3q zqc5g&zAZDyxOn_--$ZD(_t|wO82>%BHzr^JNnCqRM>$`O!`-v_Rs>t_)(-@AaKi=r zP4Z;^uz7HLV3FN7DMvvcE`8|MH6AO>Q2CdymJVud0hDR9bA*XNG_MfVoBq`OgMZFQzB9WW)iY!~$ zQZkhze95jqz>*@bv-F<9raEo)qD?I;;5vSh6JlF%WB0?r#U}z-+Nir< zd=_rC@sYSg`3K`H!(W95t8f_n^-v{d$W4TrbSR|}GzPu7JfM}Y4VnOyQrr{9vdrLm z=!%2?EDpT&IWR^xF&7EHy8_YItXEgNH$Ig=cdUGV3YV%T+ASggDLY^W;f?RNJBQjx zoJid-pF4D?d2#`j(dw9__Hv_0hvU4%gQZ0;yw9B0>brg4VFNFgdmDzRuK|7#%YE#m zmfhBCSl#U*UADgD@vu1f#qP3BzYp@<#6P4ty0fwBy^}|G{)QsW6K)WsET8)o6F+u} zg;6&qjKD?0vJZ1KH(MQc#&+}0U;mNYeJ)p7q&;UlrtjI&`RjXg!GdwxJ(0Aw=%!&i zxtI=1A?nCL8*CW4YP=|*C{5EeAct9S>od<~9b}n_Vm+IGZZ&WKYm{M-{?v$zC{`H6 zsc;J)r89SbPXcl_%sFiDZyM?U%8(V3H-=g{K{+{IJBw-d^LQz+LlakNG;^W0(r;;V zT@7R*v^EIGY8&VUfXF27b{3f<;>Kqti2xcPOvMZkv5AG5H=U6H_RE93H2gbMCh!#Z zdrKQIb+IkbEcjZzlMI{W6*5cN=s5zJQ@I=#Zdhx+)p*UF0n`=}Vh%)A@jE8M_AbwV zI{+!=oJR%|$gvn{nyj-K0fqcZzv|UyVGuo0{R^hRF}EonD5d20h=Z^TBWO#wcBIW( z2gitUxWl%3b+PNl^FFLu1u-;O9GBT3EATW&1FN+8%qYCfXu?R(m2e_Qi|h(#q%qRm zZ<+$fb^8I?GqphiNTow5rBsfJgV5HU;YSn8wX}-+8}Z^G>m!MHx4o&iW3HBXG5~-b7kl<)>`!}umCbR*LvBwTp zJ#o-U$%*F+&lgvBuh(eH(tHI;DR88^e{R~6^7S?5YzpXjwPyd_766PTUZeWEC=siT zm!oWW@yEW;^S`62<#FdY@5FwJ#+=(n}dno%Afnog80`U3$3FWggWQXLuibn0lBuCT_y*h?zD6SFdcSfu129dpjaNmP3`vK z^jwfDf&iPOczJxteUN+I#b-^^UoV_XW42Y_!c7m@{?c)1*^IN_sxad4wlgPZ8zii-tl|S$MYU} zyS#X2<4qoK5`=%_$>RqsP>p5HRAeAHNN{Qxgopu)b4 z4w0nk$gdo3Zq?fL02{OZxS~)W;c?l${i(BKT<%dAow_&__0^5u4edpXv3rC$=3s)1 z1dWjgT7DG1mRDPfhe8mTd&LEPK@Ww$$5p;(G+8(Jx#*;2WU!k7uICl-K87)tE{!;S zcV+Cm4=NQ*S8#zC*9%t9qzZsw9sCX?HK_uhHIk80)PvAs!$%*A1#GQ=^&?+8j7J3Q z%1wOz9%y_?WYo3lVgW;4@E2ZnsM-RwX*ln7q^a}2?W_f0DRpN}{pW$+keS%eS|H_* z#}^+IG{CWK;fLpIS>Duu2wgn<-fuk*TBP83v5o}on_v)lyTb-2;^SnXycSGxpp*J&1|CBB=Ikq3Fdbg5aL_j;XuA2q7uf{#IqmwD3@@ghaFB<^ed~F2ZrPCuB>udE z+!ThKAqc#}4d@s@SSUu-n3*#b3qdBnfe3o5%az~0L>Jo%g8p1qWYBu1;N|d27!xaU zVvr6Ac02qp1HcZ_w9>AVp7I9VG%~coya;XJ4s91Rs2*I`At=V!dmz1bNBjb~)4oP_ zz_Ax}Iu@juM7Y9Vfxd;QHIePrgl5y23VIPjI-uSKnyUhVY2b@#Of#u=4$NEuZ8(g- z9eq#Y;=Vp;pI4xaUn_ei?6wx38nECu2mX_aau?c=U{MBUBb1{JzrXun4EElTAlwbS zhyoXr6HJ1!)yt{|Qxy~#{-vcX3xK&I%i{ccpzUmFOxZ9FKpHie(832D|F(UN(pC!f zf-3elG*tl1LU}#*UE@JE#vh_&0dUXJnjcbj>^+FZaKMax@fl>kbohg^Q?sd3#%fQJ z92-)f!j0RQ9?`1@HQ}=*1cCuX^-^f~vOst#)4yCB^;L)|(@ZiFPGbldR}Y&#WU7XV z0OB-Ty4VQVlmVn}1I%E#!7VOBlah(K1p7Se7jPN4Ms4~>$!S5ymJ>3)I?alSp%A-N z7_~wJ`oVn$X;<`ZT~4N_d`fMk-|oEMFrwapI?#g442O@UknN)gXWIvz%}+1k%=zn` zZ`=Nk>4$89MKyL3bK zzA822QlyLV?LsA&mm-NVD8`KWPVaB6-}?Ra+iN}h`Rvbgp8c%zti9LzoO77v>#nM# zrvv~{UFYHE2LLRGFd*RMuIVbHRqk9_-T_SacyBrUe}+sZ6NyCK-QCU2%{4VOrKP2* zsj2?{{>zsyr&6i%#KSKpcgme|qMxrn(6-+Y$v$_pp=*4c^v(+a^@eqBE&(YYf0V@} z4S1+z-;WD13i`zxkraCye?Clg+~}v-s;)SkVU^DQ%&x5(x%xw#p>rrf|5{9X zlTG1(y~>*24t-TH~hs!Mi+MfGjjIo_2L~*R_pm z{soMwS>bY}ZfD88%xHC4bz1~iRU+;WEyV1EC8FdThKa1cr#E?_g%_d%g^j}of!R7_ zT>FJ{n=TdWReg7#y`ad(4SWgh+#h>gH1L)85M=W|jk+tdGEC6f3;cnusBH_kGxQGZ z%aD#}tST2gnOLF~HnSM7Gw%T%a`xkgk9s+I|B6x*&b&cZJI_-!WrK4r!M@4vQcy{RqHsPER(RKo?x=Vg#!|jGzipRGtwHZ~ z*7L7J(^A&DxhNIxd;+eL-3|KHFL2EDj_4>%+z9Wk`2HD;wf}7pDVfuPHLn!pD$V9; zFr(`V(m4W#AqdYTfYYtCgsk#Z6UefGDK$o4f)}j+;|02(nButmNJ;AN z=uj-tI_PYs$h2=q5L{V+8SLOn=reT|&>$0yN>jgAg9x;t8xut9I3KW|ENoKY@bFG? zKGii*hcPBd zCkujXGXHMc!{I3hS)AvmI>W7Za(LERVq;;g0W`%#cM0EdTciXUTYvroVI&StJ_WSg z(3r8u=w$itH@Bq#a@hB$td>+1IuP%*Oc29{ zhkJ83O3X_u4lme(w=^Zttl+JVH~71h;MM}H!B;KC^LNLrKg{|F`Y{5b75?JGFFFK7 zbZ#vL34KG7ciX1ku=`i=<|qc?$wG>Y)N;2C1!=!dpz$bn+hI#NVC#o`w1!#&5RNu< znn)$lA>V6np4Skfy?4W3tCf263$XV&%Js-k;iXWr;M`_>u9k2#5K+yKY0KM9a|pEJr4X{7-Utw!{`ih@`)K-)cdZ($qAV{8!61 zGfI#c^%|2$^+88h(o2vBr-4CM8n2~)#3d{rscWvGe57+V)Zse`=8|`}2$ju5(znv6 z1T%Q)kBUR?MIfAKaZ(GqKed!L>=-_@xy)@VV)O9^=0Ty28WcTI-gVy!zO<_DZXj8( zZaiyG$J%58FEJY&wr-K9&e6V{j<Bb5jAP7SR-RO`Sl5$4Gle8aFqc_< z+SNq$BUw32ex)~-PoeELNX!I#DPhOXnJzp{RI&l zKRBDW%-C-`3x2ise|~z6ATUBlFD#t8U4w`7y)e{gQsawkq$u==MsBgId$+a2vmt44bK-;(k=~@ekU(B=#97&A-7#7m2e+1hw#W44Po$7n-#Z;(jMW)x3PPD32)C@xb41UvO z;fD%>jvHHtLWdTO@5y$NmVIq+vRbg} zdS|SI*RyEbP!joVl~re+{Ua(=IB2%LD1GVMazdq1D8+VY9bl5VvuCQk%8@z`3Od0& z(c(npn!uh<1E2o66k;&3!ADs`+lba)6m*<1TxteQ9cx3AI3ztk2X@lA6iLdIZ4_Mj zIrnr{#c}5$Z(vJiY3yCcdQU-p5O1cTHe#MiMZFLe<_3-Jftj84M6MD1@FGVU_LcMB zwYSG^sKi6(yudQDKqGfEDL!=X66_SA@`$Siq3)|MFeS6Ja<`J=+sc3+y{}hn7L0VO zeONPWxjSt!#!)YwT!INa>ki7XYTe*^aA83BwGg=4^%XlqIb)4gE8yWS01>$ixW^k4 zI7DxvVMhtY^EY^}9fmKWK$mP?PeuP+5FhZHkM1RMMbrc$k-Hc^pA1)aVOz4nQabm* z63kO4?L7@^3+|Fl2NJW7>mejEG@uAa=QO-BQsC4#-g_(_bLdSgA|f6l#y^Pd6@-VwH|48A`(Vhd0p-J zu|>;y%5&RfFFT>Rvk~kzqaeiOCeHGp7seuUekG(#;vtb;R`|P_jdtvwihbX_!^YNH zGo9_s;b(V7D(;g@sdb{Kw+Yca2KsufwQxl^uk8zbx22ScP8&gc?Xp@lu1X>|A!)&~ zqlIByWM+qX^8KZ-SP^OMBwXcyLrioM1G9P!`(Pr;92PxRHdDT@%W;E?HKF6i=#xN0 zFZ!yCb$_JVxl1|V!E0<~hjn@E5iE~N`=lH1%V>!+X7UG!obM_GjR@%OOtD*6JJb(^V5Cw-A-c% zdMOGUjJVxr-CQDxCewIbQYgGM9YuBecxz z?sw$9uPEc#wS~deRKK+gLSz@4l}5Jm397(sxD(!vFAG9%a+8(wH52zhHCa6tv`vBs zcsIq1pd+(KlePwf_%ZqVSG%U82z84!Zj-& zNXBI9&{)&n{n(Bgb=-{qBTdi>#$n8c9|xm)eNIrHlS!2Km)l5~DQ7{|_9)?d?505D zQMOZSFx)(jzw3kfy)`f#-UR5#V`WqamDa%Js^DDw+USDIe3b(>*lv7n1Lxsx8PzZLvU_x z?j?})-56^Rkm}-&eRAHzza?%_O$n6cxP~+tB;oelW#6NXj4N7n`46@Xzi3>&^A!Km zw@__Mbm<^tu0f49{~YN)D9iJ~q=J@L>o?BeeE8!7#}_9KNsDobGOG^zUpRk%d-rc! zmG13;B%Yb&qJwAP*Ll2TQ@$|S2WlHX51;Byw)2<`_UB${E02JV@}eyGqio9k=i9<= zULT$8;5 zyx#o_`ucypgsR5R;R1S>D^|`gDyn{d3YVTe`YAS0@aJDvdO@`ryh6;qH(_yK$NJBN z5xWnKVbQx5Nu3;A-i()O`ldIXtOzn2PCBm`(UM&eYgYLwyeY(5HTG3@1>36f{LHJc z(ep=!LXIlLYWJvJy|Byl@oMrj?Q_Px&_en%SRA@u(nf(&4zazLZ0+-C}mYLl!v-9P-S^$1F zv!CVpU}o>k?Ar?P#{$67yJmLJ%;ap;FF*n2X#o~}?_k_s066}@%pMk|cP(CCA~Mg- z?8P1+djQ-vvu9?8WW4<{v$us+&W?zL(Eg$7{y`f`qQPABZH;ASy$P0(ekV z@;d|pf`lM@RX`jKfRG{`8R4yE6!ddCEWOq&0&KQ7|HyeQQ-ioO+Snrig!QXSlgP&J zfhnDd0c0rZ2-?SxkK}|*FZcGt2!LneOR2D{G%sv8jyxd-CsmyxkK~|@-IZ+*K+XQm zGGT($;ZQPJTP9*=h^q`5Czon402u87a@uPn$2DhB`;z~k_moyjhm z9Q>S{cUk~Gxx>j!pS6>uJ!Kg6ON>DVifV>^#!}$$3EOPs7J%Q*t68ftV*i4xo+N@F zZg7-+z0&n*B>>Mp3y%O~rI`Xim{h`}5^8B5*^o6bHnyadW#3DPmgeB>y=oKJ?2k5Z zmi_c5_Gy$;5loIOlX)U}zg~+*j{_AJN~76A7GZzyxmg)Zt#zo7`Bs85&Svb{jQwLA z*P3K6wKJhsLitu3d8FAM`zsvB4^Pl$12dqP=e>-m%z&t^#r`VCwU#GnBcBlfLdNMx zU0^E;e)83h*X+|3Bkj$JFnXi}486D}P;MqGaQHd)Zm5nf1kyg}Op-oQG9i;t)Gn6U zNw0LL7#$+NExBQC$kTVUT%bW%^E^K5b?R%C=CN*ECepw9<18=lzE;u7enaye3#dg% zdpCK3gOoq_HaD)HV-^98^`|1f3*TJ^5h7Qrtv?8b?xCUpT6dS|ABJ=p74)^4j&a^! z*N5+3qX2qx;(PtelFdZDb93tMH48u)h<%{!ByeO2{|5)Tn@oj~MW`vTjN?e9zU^Q< zjaw?BdjKfKve|g*e_FCq!f^=UNnc&=1t2G35!QJ89~2P~^ft(jJJ_`2THsg!-cjU% zH2mMJ0)?~wZ&qZoqzJAA5Mh1!m@@#&DNRmz>Lc(66XWqQT1`3Z00000NkvXXu0mjf De`NbI literal 0 HcmV?d00001 diff --git a/img/home-icon.svg b/img/home-icon.svg new file mode 100644 index 0000000..b70218f --- /dev/null +++ b/img/home-icon.svg @@ -0,0 +1,20 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/img/icon-arrow-right-blue.svg b/img/icon-arrow-right-blue.svg new file mode 100644 index 0000000..bef3c0c --- /dev/null +++ b/img/icon-arrow-right-blue.svg @@ -0,0 +1,9 @@ + + + + + + diff --git a/img/icon-discord.svg b/img/icon-discord.svg new file mode 100644 index 0000000..6915da4 --- /dev/null +++ b/img/icon-discord.svg @@ -0,0 +1,17 @@ + + + + + + + + + + diff --git a/img/icon-facebook.svg b/img/icon-facebook.svg new file mode 100644 index 0000000..c77af97 --- /dev/null +++ b/img/icon-facebook.svg @@ -0,0 +1,13 @@ + + + + + + + + diff --git a/img/icon-github.svg b/img/icon-github.svg new file mode 100644 index 0000000..556e479 --- /dev/null +++ b/img/icon-github.svg @@ -0,0 +1,16 @@ + + + + + + + + diff --git a/img/icon-linkedin.svg b/img/icon-linkedin.svg new file mode 100644 index 0000000..bcc3752 --- /dev/null +++ b/img/icon-linkedin.svg @@ -0,0 +1,19 @@ + + + + + + + + diff --git a/img/icon-twitter.svg b/img/icon-twitter.svg new file mode 100644 index 0000000..918d695 --- /dev/null +++ b/img/icon-twitter.svg @@ -0,0 +1,16 @@ + + + + + + + + diff --git a/img/p-alphabet-round-icon.png b/img/p-alphabet-round-icon.png new file mode 100644 index 0000000000000000000000000000000000000000..b841eefb7fd9e2419622c662ae6f0b4e6142c0a4 GIT binary patch literal 3753 zcmaJ^dss|s`+nA1GgC92rjt%I)0TEBH9Dw>)+8!BHFP8$P)XR;7A0v`2P7eOd$S`m z)OJWGhY)HOa;64RDvTUbiX75$W_^?WeZSxJ{qwEsdanC^?q|L4`_J>P`&l~#eqI_Z zV-^5t_;`CR20)b*1yxK5*N>UsR)U+rH*lU;qOX$vkB6ctp-|Y+(17E3VPRoJL(bwMGa-_Ce`ejhpZmf6%l|{p)9|n@*dZwQ4iOu!hHpUrE zIzUNF);*S-KG*%u zNlyQ^)up^4szy^V``&iZ4D!0aiFfsPtpGD)9$o93BG$@V<9C@~i@~vLdi`5d@)c=T z0;hZAt`doe71U%Ob#C~y*5VAdcuYO3J?LXEWrCE+2j3+GC_-q7w8pn8nN6qtXB(AS z7^kUixgK4H<7>~3yk;6H%29P;aAuZ&x~p~ltf3>{KAnQ+B3JTb!V6mY7~XqwC^CaQ z&P=uuM%ratER1^dIYpD6o*jAZRm&~WPr|Cb-Axwc7k>-u=+=M!L~dX{0di- zZI_ERsCB~Utnm~h%*ZKR=V`yqR#lPp(Q=6^uel6jm@QMseX;T4Cnyeb07EpId>X$) zdHq`98TL#Wt~C26-i-GZ=9{<^r0uK~$b$-Q3sI_HO@#7oxWR0-T>W(4P<)HEHzuY_ zAr6VgOZhouq(!^GUCe#f0}He0c$}VW1X3` z>?@MGj-fK_$J0B5R$`+tv=+5)C3!eeEBs_%^S(sSCkY$CSR%NadQ7d%DqASt2K+=&r7ScZwb-7wF`UQzvzwaLHCi*cehDOGowToe>$|rKI;f?D1ZwXizohULg97w=c zF^E=E_BWx{J|l>az^P)WMXI0qAndnQ*{f8@cVZgJrGgKPLBUemnqdwtMwMtbmX=ZG zj&q@OH+}xVg{(KK#A4It6B0e{FW~oRJ`W|=GQ+j=Iwj0TU1C?GsY|ZJ*f9W-m*UeD zE2@1Fb{jggFXehHix8;gwpG#Z=tJ=YUYx|xDg=_zS7LcQxfYSS>NjW1PCpOtU9MWn zW2@7Vf+d(rcT0Ac8gbva)mvh08wt_7bJDu^XUD1&_n(M^hXu!$i;=)tYH~q5b-+EA7wZxY$79_h#b`m1JS-rgfRcACF(Vptj-t{*^37pdxS4 zp!K5mxAT*;20Y%n#YyFX-?G5h%fv)_f^qd8$}0|Ei&u=El+8m@r>kX$!BA=O4y|;x z?8#vM0*k4nc|O~-)lQ9kF-6rdMxuYn8SMF_xCb?0M17xdV#8ERAXn9z^ZDP`yn+JH zx_ET}W;QVa_F>$wCa;1W89?OlNmZza{G-zW%+*n2@YW>mCp4HmGz8FLAWkrZ3U z)vFNSG;m9mOTa_OpC4p+HT!hiF+t=&3@7AuAcPgz-Fmi1@+T9B51kypgXV20{uFvO z+fPvuk$M>m0@GY|qnKo88lwCLl`1W|w1uxmn1arS6W_M?BIIWJdkwAZP^2;tBL|m-MCr^max)A<{Q>&Fg9PG$OhrQ#GZGi|y#>P`Nj)dZXWal8BIHHHx8&6? zN8=zQ527}+hZwjT9P7fX;x!`mf59#;UNv+kZU$lPT5p>%wU0*au6wOASrws9Y#fVL zH<5AZxAx4RR*2MDDkhoSYPvZcDu0iNu9_sM$Y`>27MW?GwEn zn0Hyk6-yTc%hhJUvC(;8hUsqwM<#q0imwOd zshnMrAVrBiZFW##p9KZ2v@#<;c&|mD5{KRtOl94P76g89(zl)2*P zEZ}*CPjSZ}Dw8R8{D0WN%gTIE$V`2ep?2+Vga6qE}4UIX7Y|#x4 zus4OMg4+qj@sXV!YAB)NG^VQTt|_PF+OW&T5KIGxY8A$d$fUuGyKh3iGjNBUk_zMK zf$~3j3(lcs89sV{FGZXr-0Xc7M43p}>trzga1XP@BzcW|3053}KeY2XDthu$SX*q z9UeJwWn{gRcX*NobZ9mQ9|#lLs;)UztO2VFRx@(+ZLXplnLx7Img42&T6zQPQW&0L zZB=0rBtM;XegfM%?pNfWap1W#z6~(KP2jiM{s-fcZfKM$4_x&2Jr14rldZ}QOPc_p z&T{=4HH2hWn}Q3nQlxnIO}`yKfXsBgvYL4ltK&ghp$W0lN3eqNT!TA4 zw}}-rk4l9756!uDYV`>#rN+kPX2i%`7}yRydrTTA{two4M{qSSh>kRKP`4*Ln3~Zs(*w;!Hqpy5Gva=&re4=c1(2>X&?^4KJkT`sQ!#A)e3BX#;zU-n~V=vXp6_3xzhc!MyFEic8hN>6UTm)Og6T% zouu9{r*YK{jU!cG=iGiTOf|BM(xJ_#%6-Ir%#q9&2_1~oq2hMX5-yFjjAa=wi~KEs zcb6y4FPE!LXX`QCpA5zjJ+qq7nUrqIHM6r{kETU^*nHmk{&8)`$-A4*Yxlpes?%;* zUQ@$U-+NNz^RuddBWslWUG&ngzjTwp;p^FAmA;E{b{E|Sn-c1VUb0SAhKEF@NiI~puRLMbXZnq8defPaXgcn Vp4fIHD&v2fKJ)xMD?OsJ{snz&D>DE9 literal 0 HcmV?d00001 diff --git a/img/prototype.svg b/img/prototype.svg new file mode 100644 index 0000000..cf39ebd --- /dev/null +++ b/img/prototype.svg @@ -0,0 +1,148 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/img/reddit-logo.png b/img/reddit-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..e628090ccc3f66cc56b08a33eb1f5e2253d175e7 GIT binary patch literal 4638 zcmV+(65;KMP)Px`*hxe|RCr$Poei*6RTam7=e{tdfeNNH(0rDXgMl-C6h)aIK<4)_nTVrBr8$E- zTG`;oU^#v;YT1y?_)%gyB8^2UXut!d45JbzC7r+~o|-MRjKHHCul&XQ>PyMGF3~vBC@z{&}9X8{!F#yGGUT9@Nv>X5iF&xGM!vp5Sd-OTnK*EUX&(D}Mozab!FCY+_ z3VP>-BARsga0By_oTB-pS0}psTY%|4Dqvdl4Kt$Z1(6Yu|qf{UKy)gb|U;(>X zo~JJ%Hvn{Iy#{K0HsSGLp3)!}0CZ-(2Jlza&*D6mjm`n!Phl1%X5n*8N91b) zK^ciT*apr35bVmm!xea(4IE;;8KX(7Azo}7KKJ2quPX!52>=3-@9_;hJ`cQah%?Fc z@SK9cIsiKQ-U%K;yP%6(z2$7O*@9I7beM0GHL<$HE-0~-5;xiefi(bhnD3DJs%i%y zumXS%^K)RnBM>wUfDZF>WWECsGz5SS^P9l@C|8L>+66ocGme#W4>y*ArNE(z%;JF=i6luU(+ej=>VBQU<`mj?|1P%d?pP! z_wLar!RRq?U^l$H9(Hbn9a~_x$`8o^wP>AvJ7LR4*z!VSbtZR1oJBLe9)4reOCtab zX}y0?H+*d#9DWQ89|8aUH|*IRGV0aM;m_yS=z~HSfAUNicRcJrJgoEW|AByD^LqHj z_hGA5r=Dap-Hyi(O^iY_08HTrc+47-djFg&;M{M+2V~zW_6YOec^h_Z(*uPt|JQ&wjG7rqW7N9%RD9w?6bbSO!Z zn6GM=?;raSJiZ{5D)ydjcwEbyEYXQ9(gHx)>R$j%9ddeK5&pWxFnKz>tDHi(y%*l< z3+(|B-#U*1#qg1pdg^^AjK6gzykMlZb$W6Sdzt60L6TbQK6(Hc>Q=w#|L;5qAOCDf zEMfj5H-`qBF#q6jaKH#S`a~E#)(jMB9C-A0Sn@;rQb03~AQb>Z+Ui%#Icy?abuS$G z$?z}2_&YC#zy2ZoT|n@z{b1CXia}=!6i+RJhrSE1N=&HLM{8A37XU-r>W|VoZUFv% zDcnB?UU{iDM{0rM>?`1qkLhz#B+akxg8yvS|3ikfLWO$pqRS^ zj+vs%BKXN1`1OzBb;&fh{-jEQngD1E!%xvWZUEjq1J*8qclMfdM3#TkuM8bZX^+?% zcg=mfa3zN>Q4T6;m;g|E_l^5O3e$}fw zQXm-s?K1m?K<8fv=UfKwKPV)Ztyw7`m~cWU3C2$jfgmDZRv7CZhd)0R&ipKD#vGJ2 zr9dqJEXHU0kahY*2^iP^HjOf2`UksV)PdnLQXECH7qH_M*z=}28yoqeh7=eCfO3@2 zoxstJwAT{f#`V8+x)B4g*n>`#0)qiCh3hDB89rs1SnN?X)VsI?NJMla24F*-$>HM! zDNqG~wle;N-f;&|+yj^cPjYR-XAvIX^?HI-^x&xmKsy9x} z0B9fMPw3{-05sxDj)h}tU;qGt`0wXIe9Bv5Id-ZUO|_Q+SfOxC4O9ToZsNb)48RHm zs?Pci_r?f|-Akn1`lY9Iu_DNLlqy+Ao4!gUV{ zP=7lbfEBVoONq<96>JCrFckH_7X2R&)8DsPro!|dfv0$WQYn3FRjy;3NpR}MmC;=YSPcZ@GBdFeqP(Z zN^C1#Eg3k<;1a^EUkOh=T%8~x-<`M}_$4;k0^Pgjgp z6)d7`i{C$PJ5i)ouTK{V;C2NN)z|gJwpiiodtl9C7`9&+Pgf*SufzCwndq2AEgW%N zC1Y5kgrchy`BvE$A{ixT@VVt~?Fx_Co<9wDD}b~t**L+}%<;NfYsS|DM^r6*_{j96 zQB{|uj<0$!S%PE%WAZomhy-{7IL%(2k(#c$4^BG2bwsxlB9to?AndolUec)Qhe(ho z?}I1rb)yn|7P$k!v{`V$_3)vgnN98zD^+#7%ODm*aFM8h-&&6W5X<(m1#rw%GsNdc z)fQ{WO$Kop08?3KeG@3Y*WP0EE?jqANrAEoun&NG$E_5Ox-HV7>raP2K54tN&x#FT z9{{Gutu!*MEGKz1(q6N^l_b(t57}N8Zfs2e$Z1`&uKxSR&^nd_oYYAkruq3M3{|Iu zJ^ysrZs5f$pB$pnzf9^gDQh!hsnV_^0Gx3toHr+2%iDg031Sqzydm73i{{n|2*hhB z>DkAfnx36&(l^zv#0#jc~mn*`qk^5;*N*Hz076vbNOor_h6k!I2gk6wPu{hDNpA z{Y{Xx>Nj3XFKu#AL%rH`K__(x2(DQK6F-;UKa&!!Qr4V0f^0EB5z3SZwwPg>Eo6J# zGJexO=z_~B0Bnmyor9a=1Hb7B_(X$VZd2WR@&$0=4KVfyT{hG&C+g>?bHjK=+h++l z1%MKXy2quYkt@q_YL_p7Q!Z@G7&O(rHtioB+arx`_1^p3EH;3g0D2)Fn#W-pwkQVK zl#3d-kh1_F9{O$A^`N-v`rDQl)HU-%TFenYz@_-i!Nb9ttxRzn>cG?;nw0yO1ppE* z{u?^j5vMFNA9c>Cc z8(ZL}c1ISFh2NasS^yAcEW9?1x2t!Fg9M^{`Fn8WBs;s2y1!B@wfce0S|ApLB^z7V7b{PVe)ceu-2LvTq^H30J37qBS> zWY11~cEi8n7h=qTyPl zQs|MJe2zV)#0eP>41Ldg?Sz2I?%%^V!9V|=owa&j4p{%Hc+zq4?$v;_ZexpBymB{A zhYhPL!Ee@p^S8v$|n?I7_#NCn6r zd{F~Fm&o}sPK`k|phiQu(~f0PU{F`Ms~bO)2&%G-)YUF>B3L~4K=;6lG={0`X`lJI zUX*iMed+H9iT^|Z1R%&$3P^+Fj)O0Kr?F&?G$1Gfsbr2Mg6)32cHpHg#!I^*_Ltoa z>sRE({KVN`8vuD3gHa~fiXBYaw#|Ctr;C%x@`?3}!#i>I*91Ty1@iC-#Jg$3FjRF$ zw{7MBasX3sB3b!lGASqjA33^moUHmbxFvi{Xg7%Juyrz7bC_Rq_SXkMGo*kr48@8+ z?;5j9nA3EiI`;LgH^O96dE}I`VF|~!*kX9JPt9Sz^q}VKuM2>tNdakCC3(7B)29K( z)_disiZ3B;)nC7!y0bqW0C}$nl2n4tHGMur4_j$2cOWP1SBr!}DkfTkLR^+phP`KseuOx(U{6jJY%dstQu z;#ZL4X@u#;nC;O~NW9-1x>%Mr`?YL<0aeg!gD{Oz(c@a8Zr|>Sp_+(Z86$PYe^+i` zc^Cb^Vz%S~D7Ub3728hNP8*}ndyjAOHVeHqXq=)Eiw8)nKiDULTY{_QU1bxeb;9Jm zQWtVExkv~3sh+q?SosO$ef-*(_l{CzJ8AZbY2qZxV z1oQ5#%5Ils7gD20_)JRV(>Y&0pgW+Kwt#~y?D%LACK`GTeB|p zoGt_FRUUxAV=6F%ekOO8i3XquE&aRM#|qt@J%ic@iaJpNRW?C*{7g5c;8co^uRw=O z4vf88%{m9bfL$QIf{t-mpZy`@9|oSo<7J!1Uef^(EE78_p*8?=ob}O8)5Y>m zmj}74fTWI<531~DD655V#&5&p={$|RG$0fL0E5&*7YDMBX?Vd=_7hs#e-IV*CAQXr=qU zr2tUF#XLM + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/img/robot-hero.svg b/img/robot-hero.svg new file mode 100644 index 0000000..75325dd --- /dev/null +++ b/img/robot-hero.svg @@ -0,0 +1,862 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/img/streamline.svg b/img/streamline.svg new file mode 100644 index 0000000..cba76db --- /dev/null +++ b/img/streamline.svg @@ -0,0 +1,75 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/img/twitter-logo.svg b/img/twitter-logo.svg new file mode 100644 index 0000000..c0364cc --- /dev/null +++ b/img/twitter-logo.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/img/undraw_docusaurus_mountain.svg b/img/undraw_docusaurus_mountain.svg new file mode 100644 index 0000000..af961c4 --- /dev/null +++ b/img/undraw_docusaurus_mountain.svg @@ -0,0 +1,171 @@ + + Easy to Use + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/img/undraw_docusaurus_react.svg b/img/undraw_docusaurus_react.svg new file mode 100644 index 0000000..94b5cf0 --- /dev/null +++ b/img/undraw_docusaurus_react.svg @@ -0,0 +1,170 @@ + + Powered by React + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/img/undraw_docusaurus_tree.svg b/img/undraw_docusaurus_tree.svg new file mode 100644 index 0000000..d9161d3 --- /dev/null +++ b/img/undraw_docusaurus_tree.svg @@ -0,0 +1,40 @@ + + Focus on What Matters + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/img/write.svg b/img/write.svg new file mode 100644 index 0000000..31e8f3d --- /dev/null +++ b/img/write.svg @@ -0,0 +1,113 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/index.html b/index.html new file mode 100644 index 0000000..2913609 --- /dev/null +++ b/index.html @@ -0,0 +1,32 @@ + + + + + +Effortless Kafka integration for web services | FastKafka + + + + + + + + + + +

+

Open-source framework for building asynchronous web

services that interact with Kafka

Open-source framework for building asynchronous web services that interact with Kafka

Swim with the stream…ing services

WRITE

producers & consumers for Kafka topics in a simplified way

PROTOTYPE

quickly & develop high-performance Kafka-based services

STREAMLINE

your workflow & accelerate your progress

Check out our code-generation feature!

Let us know what you need solved and we’ll generate the FastKafka code for you!

You get what you expect

Function decorators with type hints specifying Pydantic classes for JSON encoding/decoding, automatic message routing and documentation generation.

Built on top of Pydantic, AIOKafka and AsyncAPI, FastKafka simplifies the process of writing producers and consumers for Kafka topics, handling all the parsing, networking, task scheduling and data generation automatically.

FAQs

For anything not covered here, join our Discord

+ + + + \ No newline at end of file diff --git a/opensearch.xml b/opensearch.xml new file mode 100644 index 0000000..cb44de1 --- /dev/null +++ b/opensearch.xml @@ -0,0 +1,11 @@ + + + FastKafka + Search FastKafka + UTF-8 + https://fastkafka.airt.ai/img/AIRT_icon_blue.svg + + + https://fastkafka.airt.ai/ + \ No newline at end of file diff --git a/search/index.html b/search/index.html new file mode 100644 index 0000000..9e07960 --- /dev/null +++ b/search/index.html @@ -0,0 +1,32 @@ + + + + + +Search the documentation | FastKafka + + + + + + + + + + +
+

Search the documentation

+ + + + \ No newline at end of file diff --git a/sitemap.xml b/sitemap.xml new file mode 100644 index 0000000..56d579b --- /dev/null +++ b/sitemap.xml @@ -0,0 +1 @@ +https://fastkafka.airt.ai/demo/weekly0.5https://fastkafka.airt.ai/search/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/api/fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/api/fastkafka/encoder/avsc_to_pydantic/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/api/fastkafka/KafkaEvent/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/api/fastkafka/testing/ApacheKafkaBroker/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/api/fastkafka/testing/LocalRedpandaBroker/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/api/fastkafka/testing/Tester/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/CHANGELOG/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/cli/fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/cli/run_fastkafka_server_process/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/guides/Guide_00_FastKafka_Demo/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/guides/Guide_01_Intro/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/guides/Guide_02_First_Steps/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/guides/Guide_03_Authentication/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/guides/Guide_04_Github_Actions_Workflow/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/guides/Guide_05_Lifespan_Handler/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/guides/Guide_06_Benchmarking_FastKafka/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/guides/Guide_11_Consumes_Basics/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/guides/Guide_21_Produces_Basics/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/guides/Guide_22_Partition_Keys/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.5.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/api/fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/api/fastkafka/encoder/avro_decoder/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/api/fastkafka/encoder/avro_encoder/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/api/fastkafka/encoder/AvroBase/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/api/fastkafka/encoder/avsc_to_pydantic/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/api/fastkafka/encoder/json_decoder/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/api/fastkafka/encoder/json_encoder/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/api/fastkafka/EventMetadata/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/api/fastkafka/executors/DynamicTaskExecutor/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/api/fastkafka/executors/SequentialExecutor/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/api/fastkafka/KafkaEvent/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/api/fastkafka/testing/ApacheKafkaBroker/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/api/fastkafka/testing/LocalRedpandaBroker/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/api/fastkafka/testing/Tester/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/CHANGELOG/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/cli/fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/cli/run_fastkafka_server_process/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/CONTRIBUTING/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/guides/Guide_00_FastKafka_Demo/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/guides/Guide_01_Intro/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/guides/Guide_02_First_Steps/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/guides/Guide_03_Authentication/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/guides/Guide_04_Github_Actions_Workflow/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/guides/Guide_05_Lifespan_Handler/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/guides/Guide_06_Benchmarking_FastKafka/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/guides/Guide_11_Consumes_Basics/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/guides/Guide_21_Produces_Basics/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/guides/Guide_22_Partition_Keys/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/guides/Guide_23_Batch_Producing/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.6.0/LICENSE/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/api/fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/api/fastkafka/encoder/avro_decoder/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/api/fastkafka/encoder/avro_encoder/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/api/fastkafka/encoder/AvroBase/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/api/fastkafka/encoder/avsc_to_pydantic/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/api/fastkafka/encoder/json_decoder/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/api/fastkafka/encoder/json_encoder/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/api/fastkafka/EventMetadata/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/api/fastkafka/executors/DynamicTaskExecutor/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/api/fastkafka/executors/SequentialExecutor/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/api/fastkafka/KafkaEvent/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/api/fastkafka/testing/ApacheKafkaBroker/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/api/fastkafka/testing/LocalRedpandaBroker/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/api/fastkafka/testing/Tester/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/CHANGELOG/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/cli/fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/cli/run_fastkafka_server_process/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/CONTRIBUTING/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_00_FastKafka_Demo/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_01_Intro/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_02_First_Steps/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_03_Authentication/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_04_Github_Actions_Workflow/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_05_Lifespan_Handler/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_06_Benchmarking_FastKafka/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_11_Consumes_Basics/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_12_Batch_Consuming/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_21_Produces_Basics/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_22_Partition_Keys/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_23_Batch_Producing/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_24_Using_Multiple_Kafka_Clusters/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_30_Using_docker_to_deploy_fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_31_Using_redpanda_to_test_fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/weekly0.5https://fastkafka.airt.ai/docs/0.7.0/LICENSE/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/api/fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/api/fastkafka/encoder/avro_decoder/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/api/fastkafka/encoder/avro_encoder/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/api/fastkafka/encoder/AvroBase/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/api/fastkafka/encoder/avsc_to_pydantic/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/api/fastkafka/encoder/json_decoder/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/api/fastkafka/encoder/json_encoder/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/api/fastkafka/EventMetadata/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/api/fastkafka/executors/DynamicTaskExecutor/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/api/fastkafka/executors/SequentialExecutor/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/api/fastkafka/KafkaEvent/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/api/fastkafka/testing/ApacheKafkaBroker/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/api/fastkafka/testing/LocalRedpandaBroker/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/api/fastkafka/testing/Tester/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/CHANGELOG/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/cli/fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/cli/run_fastkafka_server_process/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/CONTRIBUTING/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_00_FastKafka_Demo/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_01_Intro/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_02_First_Steps/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_03_Authentication/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_04_Github_Actions_Workflow/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_05_Lifespan_Handler/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_06_Benchmarking_FastKafka/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_11_Consumes_Basics/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_12_Batch_Consuming/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_21_Produces_Basics/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_22_Partition_Keys/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_23_Batch_Producing/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_24_Using_Multiple_Kafka_Clusters/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_30_Using_docker_to_deploy_fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_31_Using_redpanda_to_test_fastkafka/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/weekly0.5https://fastkafka.airt.ai/docs/0.7.1/LICENSE/weekly0.5https://fastkafka.airt.ai/docs/next/weekly0.5https://fastkafka.airt.ai/docs/next/api/fastkafka/weekly0.5https://fastkafka.airt.ai/docs/next/api/fastkafka/encoder/avro_decoder/weekly0.5https://fastkafka.airt.ai/docs/next/api/fastkafka/encoder/avro_encoder/weekly0.5https://fastkafka.airt.ai/docs/next/api/fastkafka/encoder/AvroBase/weekly0.5https://fastkafka.airt.ai/docs/next/api/fastkafka/encoder/avsc_to_pydantic/weekly0.5https://fastkafka.airt.ai/docs/next/api/fastkafka/encoder/json_decoder/weekly0.5https://fastkafka.airt.ai/docs/next/api/fastkafka/encoder/json_encoder/weekly0.5https://fastkafka.airt.ai/docs/next/api/fastkafka/EventMetadata/weekly0.5https://fastkafka.airt.ai/docs/next/api/fastkafka/executors/DynamicTaskExecutor/weekly0.5https://fastkafka.airt.ai/docs/next/api/fastkafka/executors/SequentialExecutor/weekly0.5https://fastkafka.airt.ai/docs/next/api/fastkafka/KafkaEvent/weekly0.5https://fastkafka.airt.ai/docs/next/api/fastkafka/testing/ApacheKafkaBroker/weekly0.5https://fastkafka.airt.ai/docs/next/api/fastkafka/testing/LocalRedpandaBroker/weekly0.5https://fastkafka.airt.ai/docs/next/api/fastkafka/testing/Tester/weekly0.5https://fastkafka.airt.ai/docs/next/CHANGELOG/weekly0.5https://fastkafka.airt.ai/docs/next/cli/fastkafka/weekly0.5https://fastkafka.airt.ai/docs/next/cli/run_fastkafka_server_process/weekly0.5https://fastkafka.airt.ai/docs/next/CONTRIBUTING/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_00_FastKafka_Demo/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_01_Intro/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_02_First_Steps/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_03_Authentication/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_04_Github_Actions_Workflow/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_05_Lifespan_Handler/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_06_Benchmarking_FastKafka/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_11_Consumes_Basics/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_12_Batch_Consuming/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_21_Produces_Basics/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_22_Partition_Keys/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_23_Batch_Producing/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_24_Using_Multiple_Kafka_Clusters/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_30_Using_docker_to_deploy_fastkafka/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_31_Using_redpanda_to_test_fastkafka/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/weekly0.5https://fastkafka.airt.ai/docs/next/guides/Guide_33_Using_Tester_class_to_test_fastkafka/weekly0.5https://fastkafka.airt.ai/docs/next/LICENSE/weekly0.5https://fastkafka.airt.ai/docs/weekly0.5https://fastkafka.airt.ai/docs/api/fastkafka/weekly0.5https://fastkafka.airt.ai/docs/api/fastkafka/encoder/avro_decoder/weekly0.5https://fastkafka.airt.ai/docs/api/fastkafka/encoder/avro_encoder/weekly0.5https://fastkafka.airt.ai/docs/api/fastkafka/encoder/AvroBase/weekly0.5https://fastkafka.airt.ai/docs/api/fastkafka/encoder/avsc_to_pydantic/weekly0.5https://fastkafka.airt.ai/docs/api/fastkafka/encoder/json_decoder/weekly0.5https://fastkafka.airt.ai/docs/api/fastkafka/encoder/json_encoder/weekly0.5https://fastkafka.airt.ai/docs/api/fastkafka/EventMetadata/weekly0.5https://fastkafka.airt.ai/docs/api/fastkafka/executors/DynamicTaskExecutor/weekly0.5https://fastkafka.airt.ai/docs/api/fastkafka/executors/SequentialExecutor/weekly0.5https://fastkafka.airt.ai/docs/api/fastkafka/KafkaEvent/weekly0.5https://fastkafka.airt.ai/docs/api/fastkafka/testing/ApacheKafkaBroker/weekly0.5https://fastkafka.airt.ai/docs/api/fastkafka/testing/LocalRedpandaBroker/weekly0.5https://fastkafka.airt.ai/docs/api/fastkafka/testing/Tester/weekly0.5https://fastkafka.airt.ai/docs/CHANGELOG/weekly0.5https://fastkafka.airt.ai/docs/cli/fastkafka/weekly0.5https://fastkafka.airt.ai/docs/cli/run_fastkafka_server_process/weekly0.5https://fastkafka.airt.ai/docs/CONTRIBUTING/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_00_FastKafka_Demo/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_01_Intro/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_02_First_Steps/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_03_Authentication/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_04_Github_Actions_Workflow/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_05_Lifespan_Handler/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_06_Benchmarking_FastKafka/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_07_Encoding_and_Decoding_Messages_with_FastKafka/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_11_Consumes_Basics/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_12_Batch_Consuming/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_21_Produces_Basics/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_22_Partition_Keys/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_23_Batch_Producing/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_24_Using_Multiple_Kafka_Clusters/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_30_Using_docker_to_deploy_fastkafka/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_31_Using_redpanda_to_test_fastkafka/weekly0.5https://fastkafka.airt.ai/docs/guides/Guide_32_Using_fastapi_to_run_fastkafka_application/weekly0.5https://fastkafka.airt.ai/docs/LICENSE/weekly0.5https://fastkafka.airt.ai/weekly0.5 \ No newline at end of file