diff --git a/.gitignore b/.gitignore index b487314..6bf3d65 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,10 @@ build dist .nyunservice tests/ -nyuntam/text_generation/ +*text_generation +/.nyunservices +/datasets +/jobs +/logs +/models +/.cache \ No newline at end of file diff --git a/cli/cli.py b/cli/cli.py index 62ba26b..bacccee 100644 --- a/cli/cli.py +++ b/cli/cli.py @@ -1,4 +1,6 @@ import typer +import time +import psutil from pathlib import Path from version import __version__ from cli.docs import NYUN_TRADEMARK @@ -85,6 +87,9 @@ def run( You need to provide the path to the YAML or JSON script file you want to run. The script will be executed within the initialized workspace. """ + start_time = time.time() + process = psutil.Process(os.getpid()) + before_memory = process.memory_info().rss / (1024 ** 2) if not file_paths: typer.echo("Please provide the path(s) to the script file.") raise typer.Abort() @@ -152,6 +157,14 @@ def run( except Exception as e: typer.echo(f"Failed: {str(e)}") raise typer.Abort() + end_time = time.time() + + after_memory = process.memory_info().rss / (1024 ** 2) + memory_used = after_memory - before_memory + print(f"Memory Used: {memory_used:.2f} MB") + + execution_time = end_time - start_time + print(f"Execution Time: {execution_time:.6f} seconds") @app.command(help="Show the version of the Nyun CLI.") diff --git a/nyuntam/factory.py b/nyuntam/factory.py index c9f6511..93f79a8 100644 --- a/nyuntam/factory.py +++ b/nyuntam/factory.py @@ -133,7 +133,7 @@ def create_from_json( @classmethod def create_from_yaml( - cls, path: Union[str, Path] + cls, path: Union[str, Path], flag_dict = True ) -> Optional[Union["Factory", None]]: """Create a Factory instance from a YAML file.""" @@ -145,7 +145,11 @@ def create_from_yaml( with open(path, "r") as f: args = yaml.safe_load(f) - return cls.create_from_dict(args) + if flag_dict: + return cls.create_from_dict(args) + else: + return args + @property def algorithm(self) -> Algorithm: diff --git a/nyuntam/main.py b/nyuntam/main.py index d44f183..a45afd0 100644 --- a/nyuntam/main.py +++ b/nyuntam/main.py @@ -5,6 +5,7 @@ from nyuntam.factory import Factory from nyuntam.utils.logger import set_logger from nyuntam.commands import get_args +from nyuntam.utils.benchmark import create_benchmarks set_logger() @@ -18,7 +19,8 @@ def main(): factory = Factory.create_from_yaml(args.yaml_path) else: factory = Factory.create_from_json(args.json_path) - + + arguments = Factory.create_from_yaml(args.yaml_path, flag_dict = False) except Exception as e: logging.exception(f"Failed to create Factory instance: {e}") raise @@ -32,6 +34,7 @@ def main(): try: factory.run() logging.info("Job completed successfully.") + create_benchmarks(arguments) except Exception as e: logging.exception(f"Failed to run job: {e}") raise diff --git a/nyuntam/utils/benchmark.py b/nyuntam/utils/benchmark.py new file mode 100644 index 0000000..2c3ef76 --- /dev/null +++ b/nyuntam/utils/benchmark.py @@ -0,0 +1,49 @@ +import os +import logging + +def create_benchmarks(args: dict): + # Define log file path + log_file_path = "/user_data/logs/compression_results.log" + + # Configure logging to write to file + logging.basicConfig( + filename=log_file_path, + filemode="a", # Append mode + format="%(asctime)s - %(levelname)s - %(message)s", + level=logging.INFO + ) + + model_path = args["OUTPUT_PATH"] + quantised_model_path = args["MODEL_PATH"] + vendor = args["MODEL"].split("/")[0] + name = args["MODEL"].split("/")[1] + + original_model_path = os.path.join(model_path, vendor, name, "model.safetensors") + quantised_model_path = os.path.join(quantised_model_path, "model.safetensors") + + logging.info(f"Model Path: {original_model_path}") + logging.info(f"Quantised Model Path: {quantised_model_path}") + + try: + original_size = os.path.getsize(original_model_path) / (1024**2) # Convert to MB + quantized_size = os.path.getsize(quantised_model_path) / (1024**2) # Convert to MB + + compression_ratio = original_size / quantized_size if quantized_size > 0 else None + + log_message = ( + f"Original Model Size: {original_size:.2f} MB\n" + f"Quantized Model Size: {quantized_size:.2f} MB\n" + f"Compression Ratio: {compression_ratio:.2f}\n" if compression_ratio else "Error: Quantized model size is zero" + ) + + logging.info(log_message) + + # Also write the results to a separate file + with open(log_file_path, "a") as log_file: + log_file.write("\n" + log_message + "\n" + "="*40 + "\n") + + except FileNotFoundError as e: + logging.error(f"File not found: {e}") + except Exception as e: + logging.error(f"Unexpected error: {e}") +