Skip to content

Commit

Permalink
Merge branch 'main' of https://github.com/moverseai/moai
Browse files Browse the repository at this point in the history
  • Loading branch information
tzole1155 committed Jun 18, 2024
2 parents 8046d38 + df7c23d commit 22a981a
Show file tree
Hide file tree
Showing 6 changed files with 50 additions and 11 deletions.
2 changes: 1 addition & 1 deletion docker/serve/azure/pt220-cu118/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM ghcr.io/moverseai/moai/serve:v1.0.0-pt220-cu118
FROM ghcr.io/moverseai/moai/serve:v1.1.0-pt220-cu118

USER root

Expand Down
3 changes: 3 additions & 0 deletions moai/conf/engine/modules/rerun.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,6 @@ world_coordinates: RUF
add_floor: true
root: /
memory_limit: 75% #NOTE: can also be absolute, `4GB` for example
log:
handler: moai/logs # null
level: ${hydra:hydra_logging.root.level}
20 changes: 16 additions & 4 deletions moai/conf/examples/smplifyx/flows.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,28 @@ _moai_:
_out_: [init_trans]
preprocess:
embedding:
void: [color]
void:
- ${mi:"color[0:1, 1:2, :, :]"}
_out_: [embedding]
translation:
void: [color]
void:
- ${mi:"sq(color, 0)"}
_out_: [translation]
betas:
void: [color]
void:
- ${mi:"flatten(color, 1)"}
_out_: [betas]
_mi_1:
expression:
- ${mi:"embedding + zeros(embedding)"}
- ${mi:"embedding_1 / ones(embedding)"}
- ${mi:"flatten(embedding_2, 1)"}
- ${mi:"sq(embedding_3, 0)"}
- ${mi:"view(embedding_4, 1, 32)"}
_out_:
[embedding_1, embedding_2, embedding_3, embedding_4, embedding_5]
vposer1: # vposer2:
decode: [embedding]
decode: [embedding_5] # [embedding]
_out_: [decoded]
smplx:
pose: [decoded.pose]
Expand Down
22 changes: 22 additions & 0 deletions moai/engine/modules/rerun.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
import logging
import typing

import colour
import numpy as np
import toolz
from omegaconf.omegaconf import DictConfig

from moai.utils.color.colormap import random_color
from moai.utils.funcs import get

try:
import rerun as rr
Expand All @@ -16,6 +19,15 @@
__all__ = ["Rerun"]


class RerunFilter(logging.Filter):
def __init__(self) -> None:
super().__init__("rerun")

def filter(self, record):
record.msg = record.msg.removeprefix(":moai: ")
return record


class Rerun:
r"""
This logger should support all the different types of rr logging.
Expand All @@ -39,6 +51,7 @@ def __init__(
add_floor: bool = True,
root: str = "/",
memory_limit: str = "75%",
log: DictConfig = {},
) -> None:
# NOTE: https://github.com/iterative/dvc/issues/9731
rr.init(name)
Expand All @@ -62,6 +75,15 @@ def __init__(
if add_floor:
self._create_floor(root)
# NOTE: add more global gizmos (e.g. tr-axis)
if handler_name := get(log, "handler"):
handler = rr.LoggingHandler(handler_name)
handler.addFilter(RerunFilter())
logging.getLogger().addHandler(handler)
if level := get(log, "level"):
if isinstance(level, str):
level = level.upper()
level = logging.getLevelName(level)
logging.getLogger().setLevel(level)

def _create_scalar_plots(self, root: str, plots) -> None:
for plot in plots:
Expand Down
5 changes: 4 additions & 1 deletion moai/engine/progressbar.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import rich.progress
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks import RichProgressBar
from pytorch_lightning.callbacks.progress.rich_progress import RichProgressBarTheme

Expand All @@ -22,6 +23,8 @@
# NOTE: check https://github.com/Textualize/rich/discussions/482
# NOTE: check https://github.com/facebookresearch/EGG/blob/a139946a73d45553360a7f897626d1ae20759f12/egg/core/callbacks.py#L335
# NOTE: check https://github.com/Textualize/rich/discussions/921


class MoaiProgressBar(RichProgressBar):
def __init__(self) -> None:
super().__init__(
Expand All @@ -39,7 +42,7 @@ def __init__(self) -> None:
# CustomTimeColumn(style=self.theme.time),
# ProcessingSpeedColumn(style=self.theme.processing_speed),
# ]
def configure_columns(self, trainer: "pl.Trainer") -> list:
def configure_columns(self, trainer: Trainer) -> list:
original = super().configure_columns(trainer)
moai_column = rich.progress.TextColumn(":moai:")
spinner_column = rich.progress.SpinnerColumn(spinner_name="dots5")
Expand Down
9 changes: 4 additions & 5 deletions moai/serve/handlers/azure/blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,9 @@ def __call__(
) -> typing.Any:

if self.json_key not in json:
log.error(f"json key: {self.json_key}, not found in json request")
log.error(f"json key: {self.json_key}, not found in json request")
working_dir = json[self.json_key]


# initialize connection to Azure Blob Storage
connect_str = json[self.connection_string]
try:
Expand Down Expand Up @@ -103,7 +102,7 @@ def __init__(
# self.blob_service_client = BlobServiceClient.from_connection_string(
# connection_string,
# )

self.connection_string = connection_string
self.container_name = container_name
self.blob_paths = blob_paths
Expand All @@ -122,9 +121,9 @@ def __call__(
# NOTE: void is the input json response
# TODO: need to check batched inference
input_json = void[0].get("body") or void[0].get("raw")

if self.json_key not in input_json:
log.error(f"json key: {self.json_key}, not found in json request")
log.error(f"json key: {self.json_key}, not found in json request")
working_dir = input_json[self.json_key]

# initialize connection to Azure Blob Storage
Expand Down

0 comments on commit 22a981a

Please sign in to comment.