Skip to content

Commit

Permalink
Support pulling training dataset for model (#152)
Browse files Browse the repository at this point in the history
* Support fetching F&E training dataset for seg models

* Add LegacyTrainingDataset API

* Remove immature test

* Empty-Commit

* Add test_dataset.py for manual testing
  • Loading branch information
AsiaCao authored Oct 26, 2023
1 parent 145b3ab commit 391c991
Show file tree
Hide file tree
Showing 7 changed files with 578 additions and 15 deletions.
16 changes: 9 additions & 7 deletions landingai/common.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import math
import re
from functools import cached_property
from typing import Dict, List, Tuple
from typing import Dict, List, Optional, Tuple

import cv2
import numpy as np
Expand Down Expand Up @@ -122,10 +122,7 @@ def decoded_boolean_mask(self) -> np.ndarray:
1 means the pixel is the predicted class, 0 means the pixel is not.
"""
flattened_bitmap = decode_bitmap_rle(self.encoded_mask, self.encoding_map)
seg_mask_channel = np.array(flattened_bitmap, dtype=np.uint8).reshape(
self.mask_shape
)
return seg_mask_channel
return np.array(flattened_bitmap, dtype=np.uint8).reshape(self.mask_shape)

@cached_property
def decoded_index_mask(self) -> np.ndarray:
Expand Down Expand Up @@ -189,7 +186,9 @@ class InferenceMetadata(BaseModel):
)


def decode_bitmap_rle(bitmap: str, encoding_map: Dict[str, int]) -> List[int]:
def decode_bitmap_rle(
bitmap: str, encoding_map: Optional[Dict[str, int]] = None
) -> List[int]:
"""
Decode bitmap string to NumPy array.
Expand All @@ -198,12 +197,15 @@ def decode_bitmap_rle(bitmap: str, encoding_map: Dict[str, int]) -> List[int]:
bitmap:
Single run-length encoded bitmap string. For example: "5Z3N2Z".
encoding_map:
Dictionary with the enconding used to generate the bitmap. For example: {'Z':0, 'N':1}.
Dictionary with the enconding used to generate the bitmap.
If none, {'Z':0, 'N':1} will be used.
Return
-----
A flattened segmentation mask (with 0s and 1s) for a single class.
"""
if not encoding_map:
encoding_map = {"Z": 0, "N": 1}
flat_mask = []
bitmap_list = re.split("(Z|N)", bitmap)
for num, map_letter in zip(*[iter(bitmap_list)] * 2):
Expand Down
33 changes: 30 additions & 3 deletions landingai/data_management/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import json
import logging
import posixpath
from functools import lru_cache
from importlib.metadata import version
from typing import Any, Dict, Optional, Tuple, cast

Expand All @@ -14,13 +15,17 @@

METADATA_ITEMS = "metadata_items"
METADATA_UPDATE = "metadata_update"
METADATA_GET = "metadata_get"
MEDIA_LIST = "media_list"
MEDIA_REGISTER = "media_register"
MEDIA_SIGN = "media_sign"
MEDIA_DETAILS = "media_details"
MEDIA_UPDATE_SPLIT = "media_update_split"
GET_PROJECT_SPLIT = "get_project_split"
GET_PROJECT = "get_project"
GET_DEFECTS = "get_defects"
GET_PROJECT_MODEL_INFO = "get_project_model_info"
GET_FAST_TRAINING_EXPORT = "get_fast_training_export"


ROUTES = {
Expand All @@ -34,6 +39,16 @@
"endpoint": "api/dataset/update_media_split",
"method": requests.post,
},
GET_PROJECT_MODEL_INFO: {
"root_url": "LANDING_API",
"endpoint": "api/registered_model/get_project_model_info",
"method": requests.get,
},
GET_FAST_TRAINING_EXPORT: {
"root_url": "LANDING_API",
"endpoint": "api/dataset/export/fast_training_export",
"method": requests.get,
},
GET_DEFECTS: {
"root_url": "LANDING_API",
"endpoint": "api/defect/defects",
Expand All @@ -49,6 +64,11 @@
"endpoint": "api/{version}/object/medias_metadata",
"method": requests.post,
},
METADATA_GET: {
"root_url": "LANDING_API",
"endpoint": "api/{version}/object/metadata",
"method": requests.get,
},
MEDIA_REGISTER: {
"root_url": "LANDING_API",
"endpoint": "api/{version}/medias/new",
Expand All @@ -64,6 +84,11 @@
"endpoint": "api/{version}/dataset/medias",
"method": requests.get,
},
MEDIA_DETAILS: {
"root_url": "LANDING_API",
"endpoint": "api/dataset/media_details",
"method": requests.get,
},
GET_PROJECT: {
"root_url": "LANDING_API",
"endpoint": "api/{version}/project/with_users",
Expand All @@ -76,6 +101,7 @@
}
_API_VERSION = "v1"
_LOGGER = logging.getLogger(__name__)
_LRU_CACHE_SIZE = 1000


# Backward incompatible changes compared to LandingLens CLI:
Expand Down Expand Up @@ -186,8 +212,7 @@ def _api(
headers=headers,
verify=True,
)
_LOGGER.debug("Request URL: ", resp.request.url)
_LOGGER.debug("Request Body: ", resp.request.body)
_LOGGER.info(f"Request URL: {resp.request.url}")
_LOGGER.debug("Response Code: ", resp.status_code)
_LOGGER.debug("Response Reason: ", resp.reason)
_LOGGER.debug("Response Content (500 chars): ", resp.content[:500])
Expand All @@ -197,7 +222,8 @@ def _api(
"message"
]
except Exception as e:
error_message = e
_LOGGER.warning(f"Failed to parse error message into json: {e}")
error_message = resp.text
raise HttpError(
"HTTP request to LandingLens server failed with "
f"code {resp.status_code}-{resp.reason} and error message: \n"
Expand Down Expand Up @@ -255,6 +281,7 @@ def get_project_property(
raise HttpError(f"{property} Id not found")
return property_value

@lru_cache(maxsize=_LRU_CACHE_SIZE)
def get_metadata_mappings(
self, project_id: int
) -> Tuple[Dict[str, Any], Dict[int, str]]:
Expand Down
Loading

0 comments on commit 391c991

Please sign in to comment.