Skip to content

Commit

Permalink
Merge branch 'main' of github.com:hotosm/drone-tm into feat/task-stat…
Browse files Browse the repository at this point in the history
…s-endpoint
  • Loading branch information
Pradip-p committed Aug 8, 2024
2 parents f434de9 + e1d5d42 commit cdbd392
Show file tree
Hide file tree
Showing 24 changed files with 730 additions and 121 deletions.
7 changes: 3 additions & 4 deletions src/backend/app/db/db_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,14 +106,13 @@ class DbProject(Base):
organisation = relationship(DbOrganisation, backref="projects")

# flight params
# overlap_percent = cast(float, Column(Float, nullable=True))
front_overlap = cast(float, Column(Float, nullable=True))
side_overlap = cast(float, Column(Float, nullable=True))
gsd_cm_px = cast(float, Column(Float, nullable=True)) # in cm_px
altitude_from_ground = cast(float, Column(Float, nullable=True))
gsd_cm_px = cast(float, Column(Float, nullable=True))
camera_bearings = cast(list[int], Column(ARRAY(SmallInteger), nullable=True))
gimble_angles_degrees = cast(
list, Column(ARRAY(SmallInteger), nullable=True)
) # degrees
gimble_angles_degrees = cast(list, Column(ARRAY(SmallInteger), nullable=True))
is_terrain_follow = cast(bool, Column(Boolean, default=False))
dem_url = cast(str, Column(String, nullable=True))
hashtags = cast(list, Column(ARRAY(String))) # Project hashtag
Expand Down
31 changes: 21 additions & 10 deletions src/backend/app/migrations/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from app.db.db_models import Base
from geoalchemy2 import alembic_helpers
from alembic import context
from app.config import settings

Expand Down Expand Up @@ -38,14 +37,24 @@
# ... etc.


def include_object(object, name, type_, reflected, compare_to):
"""Ignore our excluded tables in the autogen sweep."""
if type_ == "table" and name in exclude_tables:
return False
# def include_object(object, name, type_, reflected, compare_to):
# """Ignore our excluded tables in the autogen sweep."""
# if type_ == "table" and name in exclude_tables:
# return False
# else:
# return alembic_helpers.include_object(
# object, name, type_, reflected, compare_to
# )


def include_name(name, type_, parent_names):
if type_ == "schema":
return name in [None, "public"]
elif type_ == "table":
# use schema_qualified_table_name directly
return parent_names["schema_qualified_table_name"] in target_metadata.tables
else:
return alembic_helpers.include_object(
object, name, type_, reflected, compare_to
)
return True


def run_migrations_offline() -> None:
Expand All @@ -65,7 +74,8 @@ def run_migrations_offline() -> None:
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
include_object=include_object,
# include_object=include_object,
include_name=include_name,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
Expand Down Expand Up @@ -95,7 +105,8 @@ def run_migrations_online() -> None:
with connectable.connect() as connection:
context.configure(
connection=connection,
include_object=include_object,
# include_object=include_object,
include_name=include_name,
target_metadata=target_metadata,
)

Expand Down
33 changes: 33 additions & 0 deletions src/backend/app/migrations/versions/5d38e368b3d2_.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
"""
Revision ID: 5d38e368b3d2
Revises: 87b6f9d734e8
Create Date: 2024-08-06 07:22:37.970226
"""

from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision: str = "5d38e368b3d2"
down_revision: Union[str, None] = "d862bfa31c36"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"projects", sa.Column("altitude_from_ground", sa.Float(), nullable=True)
)
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("projects", "altitude_from_ground")
# ### end Alembic commands ###
67 changes: 54 additions & 13 deletions src/backend/app/projects/project_crud.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,54 @@
from loguru import logger as log
import shapely.wkb as wkblib
from shapely.geometry import shape
from fastapi import HTTPException
from fastapi import HTTPException, UploadFile
from app.utils import merge_multipolygon
from fmtm_splitter.splitter import split_by_square
from fastapi.concurrency import run_in_threadpool
from databases import Database
from app.models.enums import ProjectStatus
from app.utils import generate_slug
from io import BytesIO
from app.s3 import add_obj_to_bucket
from app.config import settings


async def update_project_dem_url(db: Database, project_id: uuid.UUID, dem_url: str):
"""Update the DEM URL for a project."""
query = """
UPDATE projects
SET dem_url = :dem_url
WHERE id = :project_id
"""
await db.execute(query, {"dem_url": dem_url, "project_id": project_id})
return True


async def upload_dem_to_s3(project_id: uuid.UUID, dem_file: UploadFile) -> str:
"""Upload dem into S3.
Args:
project_id (int): The organisation id in the database.
dem_file (UploadFile): The logo image uploaded to FastAPI.
Returns:
dem_url(str): The S3 URL for the dem file.
"""
dem_path = f"/dem/{project_id}/dem.tif"

file_bytes = await dem_file.read()
file_obj = BytesIO(file_bytes)

add_obj_to_bucket(
settings.S3_BUCKET_NAME,
file_obj,
dem_path,
content_type=dem_file.content_type,
)

dem_url = f"{settings.S3_DOWNLOAD_ROOT}/{settings.S3_BUCKET_NAME}{dem_path}"

return dem_url


async def create_project_with_project_info(
Expand All @@ -20,7 +61,9 @@ async def create_project_with_project_info(
_id = uuid.uuid4()
query = """
INSERT INTO projects (
id, slug, author_id, name, description, per_task_instructions, status, visibility, outline, no_fly_zones, dem_url, output_orthophoto_url, output_pointcloud_url, output_raw_url, task_split_dimension, deadline_at, final_output, requires_approval_from_manager_for_locking, front_overlap, side_overlap, created_at)
id, slug, author_id, name, description, per_task_instructions, status, visibility, outline, no_fly_zones,
gsd_cm_px, front_overlap, side_overlap, final_output ,altitude_from_ground,is_terrain_follow, task_split_dimension, deadline_at,
requires_approval_from_manager_for_locking, created_at)
VALUES (
:id,
:slug,
Expand All @@ -32,16 +75,15 @@ async def create_project_with_project_info(
:visibility,
:outline,
:no_fly_zones,
:dem_url,
:output_orthophoto_url,
:output_pointcloud_url,
:output_raw_url,
:gsd_cm_px,
:front_overlap,
:side_overlap,
:final_output,
:altitude_from_ground,
:is_terrain_follow,
:task_split_dimension,
:deadline_at,
:final_output,
:requires_approval_from_manager_for_locking,
:front_overlap,
:side_overlap,
CURRENT_TIMESTAMP
)
RETURNING id
Expand All @@ -59,13 +101,12 @@ async def create_project_with_project_info(
"status": ProjectStatus.DRAFT.name,
"visibility": project_metadata.visibility.name,
"outline": str(project_metadata.outline),
"gsd_cm_px": project_metadata.gsd_cm_px,
"altitude_from_ground": project_metadata.altitude_from_ground,
"is_terrain_follow": project_metadata.is_terrain_follow,
"no_fly_zones": str(project_metadata.no_fly_zones)
if project_metadata.no_fly_zones is not None
else None,
"dem_url": project_metadata.dem_url,
"output_orthophoto_url": project_metadata.output_orthophoto_url,
"output_pointcloud_url": project_metadata.output_pointcloud_url,
"output_raw_url": project_metadata.output_raw_url,
"task_split_dimension": project_metadata.task_split_dimension,
"deadline_at": project_metadata.deadline_at,
"final_output": [item.value for item in project_metadata.final_output],
Expand Down
9 changes: 9 additions & 0 deletions src/backend/app/projects/project_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from shapely.geometry import shape, mapping
from shapely.ops import unary_union


router = APIRouter(
prefix=f"{settings.API_PREFIX}/projects",
responses={404: {"description": "Not found"}},
Expand Down Expand Up @@ -70,6 +71,7 @@ async def delete_project_by_id(
@router.post("/create_project", tags=["Projects"])
async def create_project(
project_info: project_schemas.ProjectIn,
dem: UploadFile = File(None),
db: Database = Depends(database.get_db),
user_data: AuthUser = Depends(login_required),
):
Expand All @@ -78,6 +80,13 @@ async def create_project(
project_id = await project_crud.create_project_with_project_info(
db, author_id, project_info
)

# Upload DEM to S3
dem_url = await project_crud.upload_dem_to_s3(project_id, dem) if dem else None

# Update dem url to database
await project_crud.update_project_dem_url(db, project_id, dem_url)

if not project_id:
raise HTTPException(
status_code=HTTPStatus.BAD_REQUEST, detail="Project creation failed"
Expand Down
19 changes: 13 additions & 6 deletions src/backend/app/projects/project_schemas.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import uuid
from pydantic import BaseModel, computed_field, Field, validator
import json
from pydantic import BaseModel, computed_field, Field, validator, model_validator
from typing import Any, Optional, Union, List
from geojson_pydantic import Feature, FeatureCollection, Polygon
from app.models.enums import FinalOutput, ProjectVisibility, State
Expand Down Expand Up @@ -31,14 +32,13 @@ class ProjectIn(BaseModel):
description: str
per_task_instructions: Optional[str] = None
task_split_dimension: Optional[int] = None
dem_url: Optional[str] = None
gsd_cm_px: float = None
gsd_cm_px: Optional[float] = None
altitude_from_ground: Optional[float] = None
front_overlap: Optional[float] = None
side_overlap: Optional[float] = None
is_terrain_follow: bool = False
outline_no_fly_zones: Optional[Union[FeatureCollection, Feature, Polygon]] = None
outline_geojson: Union[FeatureCollection, Feature, Polygon]
output_orthophoto_url: Optional[str] = None
output_pointcloud_url: Optional[str] = None
output_raw_url: Optional[str] = None
deadline_at: Optional[date] = None
visibility: Optional[ProjectVisibility] = ProjectVisibility.PUBLIC
final_output: List[FinalOutput] = Field(
Expand Down Expand Up @@ -82,6 +82,13 @@ def centroid(self) -> Optional[Any]:
return None
return write_wkb(read_wkb(self.outline).centroid)

@model_validator(mode="before")
@classmethod
def validate_to_json(cls, value):
if isinstance(value, str):
return cls(**json.loads(value))
return value


class TaskOut(BaseModel):
"""Base project model."""
Expand Down
98 changes: 98 additions & 0 deletions src/backend/app/s3.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from app.config import settings
from loguru import logger as log
from minio import Minio
from io import BytesIO
from typing import Any


def s3_client():
Expand Down Expand Up @@ -35,3 +37,99 @@ def is_connection_secure(minio_url: str):
raise ValueError(err)

return stripped_url, secure


def add_file_to_bucket(bucket_name: str, file_path: str, s3_path: str):
"""Upload a file from the filesystem to an S3 bucket.
Args:
bucket_name (str): The name of the S3 bucket.
file_path (str): The path to the file on the local filesystem.
s3_path (str): The path in the S3 bucket where the file will be stored.
"""
# Ensure s3_path starts with a forward slash
if not s3_path.startswith("/"):
s3_path = f"/{s3_path}"

client = s3_client()
client.fput_object(bucket_name, file_path, s3_path)


def add_obj_to_bucket(
bucket_name: str,
file_obj: BytesIO,
s3_path: str,
content_type: str = "application/octet-stream",
**kwargs: dict[str, Any],
):
"""Upload a BytesIO object to an S3 bucket.
Args:
bucket_name (str): The name of the S3 bucket.
file_obj (BytesIO): A BytesIO object containing the data to be uploaded.
s3_path (str): The path in the S3 bucket where the data will be stored.
content_type (str, optional): The content type of the uploaded file.
Default application/octet-stream.
kwargs (dict[str, Any]): Any other arguments to pass to client.put_object.
"""
# Strip "/" from start of s3_path (not required by put_object)
if s3_path.startswith("/"):
s3_path = s3_path.lstrip("/")

client = s3_client()
# Set BytesIO object to start, prior to .read()
file_obj.seek(0)

result = client.put_object(
bucket_name, s3_path, file_obj, file_obj.getbuffer().nbytes, **kwargs
)
log.debug(
f"Created {result.object_name} object; etag: {result.etag}, "
f"version-id: {result.version_id}"
)


def get_file_from_bucket(bucket_name: str, s3_path: str, file_path: str):
"""Download a file from an S3 bucket and save it to the local filesystem.
Args:
bucket_name (str): The name of the S3 bucket.
s3_path (str): The path to the file in the S3 bucket.
file_path (str): The path on the local filesystem where the S3
file will be saved.
"""
# Ensure s3_path starts with a forward slash
if not s3_path.startswith("/"):
s3_path = f"/{s3_path}"

client = s3_client()
client.fget_object(bucket_name, s3_path, file_path)


def get_obj_from_bucket(bucket_name: str, s3_path: str) -> BytesIO:
"""Download an S3 object from a bucket and return it as a BytesIO object.
Args:
bucket_name (str): The name of the S3 bucket.
s3_path (str): The path to the S3 object in the bucket.
Returns:
BytesIO: A BytesIO object containing the content of the downloaded S3 object.
"""
# Ensure s3_path starts with a forward slash
if not s3_path.startswith("/"):
s3_path = f"/{s3_path}"

client = s3_client()
response = None
try:
response = client.get_object(bucket_name, s3_path)
return BytesIO(response.read())
except Exception as e:
log.warning(f"Failed attempted download from S3 path: {s3_path}")
raise ValueError(str(e)) from e
finally:
if response:
response.close()
response.release_conn()
Loading

0 comments on commit cdbd392

Please sign in to comment.