diff --git a/integration/benchmarks/test_blank_app.py b/integration/benchmarks/test_base_app.py similarity index 72% rename from integration/benchmarks/test_blank_app.py rename to integration/benchmarks/test_base_app.py index 7e7d552672..2eb8aad753 100644 --- a/integration/benchmarks/test_blank_app.py +++ b/integration/benchmarks/test_base_app.py @@ -9,7 +9,7 @@ from reflex.utils import build, path_ops -def BlankTemplate(): +def BaseApp(): """Test that background tasks work as expected.""" from rxconfig import config @@ -45,7 +45,7 @@ def index() -> rx.Component: app.add_page(index) -def BlankTemplate2(): +def BaseApp2(): """Test that background tasks work as expected.""" from rxconfig import config @@ -148,10 +148,10 @@ def selection() -> rx.Component: @pytest.fixture(scope="session") -def blank_template( +def base_app( tmp_path_factory, ) -> Generator[AppHarness, None, None]: - """Start Blank Template app at tmp_path via AppHarness. + """Start Base app at tmp_path via AppHarness. Args: tmp_path_factory: pytest tmp_path_factory fixture @@ -159,16 +159,16 @@ def blank_template( Yields: running AppHarness instance """ - root = tmp_path_factory.mktemp(f"blank_template") + root = tmp_path_factory.mktemp(f"base_app") - yield AppHarness.create(root=root, app_source=BlankTemplate) # type: ignore + yield AppHarness.create(root=root, app_source=BaseApp) # type: ignore @pytest.fixture(scope="session") -def blank_template_two_pages( +def base_app_two_pages( tmp_path_factory, ) -> Generator[AppHarness, None, None]: - """Start Blank Template app at tmp_path via AppHarness. + """Start Base app at tmp_path via AppHarness. Args: tmp_path_factory: pytest tmp_path_factory fixture @@ -176,23 +176,23 @@ def blank_template_two_pages( Yields: running AppHarness instance """ - root = tmp_path_factory.mktemp(f"blank_template_two_pages") + root = tmp_path_factory.mktemp(f"base_app_two_pages") - yield AppHarness.create(root=root, app_source=BlankTemplate2) # type: ignore + yield AppHarness.create(root=root, app_source=BaseApp2) # type: ignore @pytest.mark.benchmark( group="blank template", timer=time.perf_counter, disable_gc=True, warmup=False ) -def test_blank_template_compile_time(benchmark, blank_template): +def test_base_app_compile_time_cold(benchmark, base_app): def setup(): - with chdir(blank_template.app_path): - blank_template._initialize_app() - build.setup_frontend(blank_template.app_path) + with chdir(base_app.app_path): + base_app._initialize_app() + build.setup_frontend(base_app.app_path) def benchmark_fn(): - with chdir(blank_template.app_path): - blank_template.app_instance.compile_() + with chdir(base_app.app_path): + base_app.app_instance.compile_() benchmark.pedantic(benchmark_fn, setup=setup, rounds=10) @@ -200,15 +200,15 @@ def benchmark_fn(): @pytest.mark.benchmark( group="blank template", timer=time.perf_counter, disable_gc=True, warmup=False ) -def test_blank_template_two_pages_compile_time(benchmark, blank_template_two_pages): +def test_base_app_two_pages_compile_time_cold(benchmark, base_app_two_pages): def setup(): - with chdir(blank_template_two_pages.app_path): - blank_template_two_pages._initialize_app() - build.setup_frontend(blank_template_two_pages.app_path) + with chdir(base_app_two_pages.app_path): + base_app_two_pages._initialize_app() + build.setup_frontend(base_app_two_pages.app_path) def benchmark_fn(): - with chdir(blank_template_two_pages.app_path): - blank_template_two_pages.app_instance.compile_() + with chdir(base_app_two_pages.app_path): + base_app_two_pages.app_instance.compile_() path_ops.rm( os.path.join( constants.Dirs.WEB, "reflex.install_frontend_packages.cached" @@ -217,3 +217,42 @@ def benchmark_fn(): path_ops.rm(os.path.join(constants.Dirs.WEB, "node_modules")) benchmark.pedantic(benchmark_fn, setup=setup, rounds=10) + + +@pytest.mark.benchmark( + group="blank template", min_rounds=10, timer=time.perf_counter, disable_gc=True, warmup=False +) +def test_base_app_compile_time_warm(benchmark, base_app): + + def benchmark_fn(): + with chdir(base_app.app_path): + base_app.app_instance.compile_() + + benchmark(benchmark_fn, rounds=10) + + +@pytest.mark.benchmark( + group="blank template", min_rounds=10,timer=time.perf_counter, disable_gc=True, warmup=False +) +def test_base_app_two_pages_compile_time_warm(benchmark, base_app_two_pages): + + def benchmark_fn(): + with chdir(base_app_two_pages.app_path): + base_app_two_pages.app_instance.compile_() + path_ops.rm( + os.path.join( + constants.Dirs.WEB, "reflex.install_frontend_packages.cached" + ) + ) + path_ops.rm(os.path.join(constants.Dirs.WEB, "node_modules")) + + benchmark(benchmark_fn) + + +def test_base_app_hot_reload(): + pass + + + +def test_base_app_two_pages_hot_reload(): + pass \ No newline at end of file diff --git a/integration/benchmarks/test_large_app.py b/integration/benchmarks/test_large_app.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/integration/benchmarks/test_medium_size_app.py b/integration/benchmarks/test_medium_size_app.py new file mode 100644 index 0000000000..549bfed63b --- /dev/null +++ b/integration/benchmarks/test_medium_size_app.py @@ -0,0 +1,166 @@ +import os +import time +from typing import Generator + +import pytest + +from reflex import constants +from reflex.testing import AppHarness, chdir +from reflex.utils import build, path_ops + + +def MediumApp(): + """Test that background tasks work as expected.""" + from rxconfig import config + + import reflex as rx + + docs_url = "https://reflex.dev/docs/getting-started/introduction/" + filename = f"{config.app_name}/{config.app_name}.py" + college = [ + "Stanford University", + "Arizona", + "Arizona state", + "Baylor", + "Boston College", + "Boston University", + ] + + class State(rx.State): + """The app state.""" + + position: str + college: str + age: tuple[int, int] = (18, 50) + salary: tuple[int, int] = (0, 25000000) + + comp1 = rx.center( + rx.theme_panel(), + rx.vstack( + rx.heading("Welcome to Reflex!", size="9"), + rx.text("Get started by editing ", rx.code(filename)), + rx.button( + "Check out our docs!", + on_click=lambda: rx.redirect(docs_url), + size="4", + ), + align="center", + spacing="7", + font_size="2em", + ), + height="100vh", + ) + + comp2 = rx.vstack( + rx.hstack( + rx.vstack( + rx.select( + ["C", "PF", "SF", "PG", "SG"], + placeholder="Select a position. (All)", + on_change=State.set_position, + size="3", + ), + rx.select( + college, + placeholder="Select a college. (All)", + on_change=State.set_college, + size="3", + ), + ), + rx.vstack( + rx.vstack( + rx.hstack( + rx.badge("Min Age: ", State.age[0]), + rx.divider(orientation="vertical"), + rx.badge("Max Age: ", State.age[1]), + ), + rx.slider( + default_value=[18, 50], + min=18, + max=50, + on_value_commit=State.set_age, + ), + align_items="left", + width="100%", + ), + rx.vstack( + rx.hstack( + rx.badge("Min Sal: ", State.salary[0] // 1000000, "M"), + rx.divider(orientation="vertical"), + rx.badge("Max Sal: ", State.salary[1] // 1000000, "M"), + ), + rx.slider( + default_value=[0, 25000000], + min=0, + max=25000000, + on_value_commit=State.set_salary, + ), + align_items="left", + width="100%", + ), + ), + spacing="4", + ), + width="100%", + ) + + app = rx.App(state=rx.State) + + for i in range(1, 31): + if i % 2 == 1: + app.add_page(comp1, route=f"page{i}") + else: + app.add_page(comp2, route=f"page{i}") + + + +@pytest.fixture(scope="session") +def medium_app( + tmp_path_factory, +) -> Generator[AppHarness, None, None]: + """Start Blank Template app at tmp_path via AppHarness. + + Args: + tmp_path_factory: pytest tmp_path_factory fixture + + Yields: + running AppHarness instance + """ + root = tmp_path_factory.mktemp(f"medium_app") + + yield AppHarness.create(root=root, app_source=MediumApp) # type: ignore + + + +@pytest.mark.benchmark( + group="Medium sized app", min_rounds=10, timer=time.perf_counter, disable_gc=True, warmup=False +) +def test_medium_app_compile_time_cold(benchmark, medium_app): + def setup(): + with chdir(medium_app.app_path): + medium_app._initialize_app() + build.setup_frontend(medium_app.app_path) + + def benchmark_fn(): + with chdir(medium_app.app_path): + medium_app.app_instance.compile_() + + benchmark.pedantic(benchmark_fn, setup=setup, rounds=10) + + + +@pytest.mark.benchmark( + group="Medium sized app", min_rounds=10,timer=time.perf_counter, disable_gc=True, warmup=False +) +def test_medium_app_compile_time_warm(benchmark, medium_app): + + def benchmark_fn(): + with chdir(medium_app.app_path): + medium_app.app_instance.compile_() + + benchmark(benchmark_fn) + + + +def test_medium_app_hot_reload(): + pass