Skip to content

Commit

Permalink
Ruff
Browse files Browse the repository at this point in the history
  • Loading branch information
dogversioning committed Feb 13, 2024
1 parent 2dea9d6 commit 4dce974
Show file tree
Hide file tree
Showing 22 changed files with 96 additions and 153 deletions.
24 changes: 9 additions & 15 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.9
python-version: '3.10'

- name: Install dependencies
run: |
Expand All @@ -23,23 +23,17 @@ jobs:
lint:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Install linters
run: |
python -m pip install --upgrade pip
pip install ".[dev]"
- name: Run pycodestyle
run: |
pycodestyle scripts src tests --max-line-length=88
- name: Run pylint
if: success() || failure() # still run pylint if above checks fail
run: |
pylint scripts src tests
- name: Run bandit
if: success() || failure() # still run bandit if above checks fail
run: |
bandit -r scripts src
- name: Run black
- name: Run ruff
if: success() || failure() # still run black if above checks fails
run: |
black --check --verbose .
ruff check
ruff format --check
23 changes: 8 additions & 15 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,17 +1,10 @@
default_install_hook_types: [pre-commit, pre-push]
repos:
- repo: https://github.com/psf/black
#this version is synced with the black mentioned in .github/workflows/ci.yml
rev: 22.12.0
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.2.1
hooks:
- id: black
entry: bash -c 'black "$@"; git add -u' --
# It is recommended to specify the latest version of Python
# supported by your project here, or alternatively use
# pre-commit's default_language_version, see
# https://pre-commit.com/#top_level-default_language_version
language_version: python3.9
- repo: https://github.com/pycqa/isort
rev: 5.12.0
hooks:
- id: isort
args: ["--profile", "black", "--filter-files"]
- name: Ruff formatting
id: ruff-format
- name: Ruff linting
id: ruff
stages: [pre-push]
45 changes: 24 additions & 21 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
[project]
name = "aggregator"
requires-python = ">= 3.9"
version = "0.1.3"
requires-python = ">= 3.10"
version = "0.3.0"
# This project is designed to run on the AWS serverless application framework (SAM).
# The project dependencies are handled via AWS layers. These are only required for
# local development.
dependencies= [
"arrow >=1.2.3",
"awswrangler >=2.19.0, <3",
"awswrangler >=3.5, <4",
"boto3",
"pandas >=1.5.0, <2"
"pandas >=2, <3"
]
authors = [
{ name="Matt Garber", email="matthew.garber@childrens.harvard.edu" },
Expand Down Expand Up @@ -45,23 +45,26 @@ test = [
"pytest-mock"
]
dev = [
"bandit",
"black==22.12.0",
"isort==5.12.0",
"ruff == 0.2.1",
"pre-commit",
"pylint",
"pycodestyle"
]
[tool.ruff]
target-version = "py310"

[tool.coverage.run]
command_line="-m pytest"
source=["./src/"]

[tool.coverage.report]
show_missing=true

[tool.isort]
profile = "black"
src_paths = ["src", "tests"]
skip_glob = [".aws_sam"]

[tool.ruff.lint]
select = [
"A", # prevent using keywords that clobber python builtins
"B", # bugbear: security warnings
"E", # pycodestyle
"F", # pyflakes
"I", # isort
"ISC", # implicit string concatenation
"PLE", # pylint errors
"RUF", # the ruff developer's own rules
"UP", # alert you when better syntax is available in your python version
]
ignore = [
# Recommended ingore from `ruff format` due to in-project conflicts with check.
# It's expected that this will be fixed in the coming months.
"ISC001"
]
2 changes: 1 addition & 1 deletion scripts/cumulus_upload_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def upload_file(cli_args):
if args["test"]:
args_dict["user"] = os.environ.get("CUMULUS_TEST_UPLOAD_USER", "general")
args_dict["file"] = (
f"{str(Path(__file__).resolve().parents[1])}"
f"{Path(__file__).resolve().parents[1]!s}"
f"/tests/test_data/count_synthea_patient.parquet"
)
args_dict["auth"] = os.environ.get("CUMULUS_TEST_UPLOAD_AUTH", "secretval")
Expand Down
3 changes: 1 addition & 2 deletions src/handlers/dashboard/filter_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,7 @@ def _parse_filter_req(filter_req):
return " AND ".join(_parse_filter_req(x) for x in filter_req.split(","))
filter_req_split = filter_req.split(":")
if (
filter_req_split[1]
in _FILTER_MAP_ONE_PARAM.keys() # pylint: disable=consider-iterating-dictionary
filter_req_split[1] in _FILTER_MAP_ONE_PARAM.keys() # pylint: disable=consider-iterating-dictionary
):
return _FILTER_MAP_ONE_PARAM[filter_req_split[1]] % filter_req_split[0]
return _FILTER_MAP_TWO_PARAM[filter_req_split[1]] % (
Expand Down
7 changes: 4 additions & 3 deletions src/handlers/dashboard/get_chart_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from ..shared.functions import get_latest_data_package_version, http_response


def _get_table_cols(table_name: str, version: str = None) -> list:
def _get_table_cols(table_name: str, version: str | None = None) -> list:
"""Returns the columns associated with a table.
Since running an athena query takes a decent amount of time due to queueing
Expand All @@ -29,7 +29,8 @@ def _get_table_cols(table_name: str, version: str = None) -> list:
s3_key = f"{prefix}/{version}/{table_name}__aggregate.csv"
s3_client = boto3.client("s3")
s3_iter = s3_client.get_object(
Bucket=s3_bucket_name, Key=s3_key # type: ignore[arg-type]
Bucket=s3_bucket_name,
Key=s3_key, # type: ignore[arg-type]
)["Body"].iter_lines()
return next(s3_iter).decode().split(",")

Expand All @@ -41,7 +42,7 @@ def _build_query(query_params: dict, filters: list, path_params: dict) -> str:
filter_str = get_filter_string(filters)
if filter_str != "":
filter_str = f"AND {filter_str}"
count_col = [c for c in columns if c.startswith("cnt")][0]
count_col = [next(c for c in columns if c.startswith("cnt"))][0]
columns.remove(count_col)
select_str = f"{query_params['column']}, sum({count_col}) as {count_col}"
group_str = f"{query_params['column']}"
Expand Down
3 changes: 1 addition & 2 deletions src/handlers/shared/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import json
import logging
from datetime import datetime, timezone
from typing import Optional

import boto3

Expand Down Expand Up @@ -77,7 +76,7 @@ def update_metadata(
data_package: str,
version: str,
target: str,
dt: Optional[datetime] = None,
dt: datetime | None = None,
meta_type: str = JsonFilename.TRANSACTIONS.value,
):
"""Safely updates items in metadata dictionary
Expand Down
13 changes: 6 additions & 7 deletions src/handlers/site_upload/api_gateway_authorizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
"""

# pylint: disable=invalid-name,pointless-string-statement
from __future__ import print_function

import os
import re
Expand All @@ -28,7 +27,7 @@ def lambda_handler(event, context):
if auth_token not in user_db.keys() or auth_header[0] != "Basic":
raise AuthError
except (AuthError, KeyError):
raise AuthError(event) # pylint: disable=raise-missing-from
raise AuthError(event) # noqa: B904

principalId = user_db[auth_token]["site"]

Expand Down Expand Up @@ -66,7 +65,7 @@ class HttpVerb:
ALL = "*"


class AuthPolicy(object): # pylint: disable=missing-class-docstring; # pragma: no cover
class AuthPolicy: # pylint: disable=missing-class-docstring; # pragma: no cover
awsAccountId = ""
"""The AWS account id the policy will be generated for. This is used to
create the method ARNs."""
Expand All @@ -81,8 +80,8 @@ class AuthPolicy(object): # pylint: disable=missing-class-docstring; # pragma:
conditions statement.
the build method processes these lists and generates the approriate
statements for the final policy"""
allowMethods = []
denyMethods = []
allowMethods = [] # noqa: RUF012
denyMethods = [] # noqa: RUF012

restApiId = "<<restApiId>>"
""" Replace the placeholder value with a default API Gateway API id to be used in
Expand Down Expand Up @@ -211,15 +210,15 @@ def allowMethodWithConditions(self, verb, resource, conditions):
methods and includes a condition for the policy statement. More on AWS policy
conditions here:
http://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html#Condition
""" # noqa: E501
"""
self._addMethod("Allow", verb, resource, conditions)

def denyMethodWithConditions(self, verb, resource, conditions):
"""Adds an API Gateway method (Http verb + Resource path) to the list of denied
methods and includes a condition for the policy statement. More on AWS policy
conditions here:
http://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html#Condition
""" # noqa: E501
"""
self._addMethod("Deny", verb, resource, conditions)

def build(self):
Expand Down
16 changes: 12 additions & 4 deletions src/handlers/site_upload/powerset_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def expand_and_concat_sets(
.reset_index()
# this last line makes "cnt" the first column in the set, matching the
# library style
.filter(["cnt"] + data_cols)
.filter(["cnt", *data_cols])
)
return agg_df

Expand All @@ -209,8 +209,9 @@ def generate_csv_from_parquet(bucket_name: str, bucket_root: str, subbucket_path
awswrangler.s3.to_csv(
last_valid_df,
(
f"s3://{bucket_name}/{bucket_root}/"
f"{subbucket_path}".replace(".parquet", ".csv")
f"s3://{bucket_name}/{bucket_root}/{subbucket_path}".replace(
".parquet", ".csv"
)
),
index=False,
quoting=csv.QUOTE_NONE,
Expand Down Expand Up @@ -250,7 +251,6 @@ def merge_powersets(manager: S3Manager) -> None:
e,
)
for latest_path in latest_file_list:

if manager.version not in latest_path:
continue
site_specific_name = get_s3_site_filename_suffix(latest_path)
Expand Down Expand Up @@ -315,6 +315,14 @@ def merge_powersets(manager: S3Manager) -> None:
manager.site,
)
manager.update_local_metadata(TransactionKeys.LAST_AGGREGATION.value)

if df.empty:
manager.merge_error_handler(
latest_path,
subbucket_path,
OSError("File not found"),
)

manager.write_local_metadata()

# In this section, we are trying to accomplish two things:
Expand Down
20 changes: 10 additions & 10 deletions template.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ Resources:
Properties:
FunctionName: !Sub 'CumulusAggFetchAuthorizer-${DeployStage}'
Handler: src/handlers/site_upload/api_gateway_authorizer.lambda_handler
Runtime: python3.9
Runtime: python3.10
MemorySize: 128
Timeout: 100
Description: Validates credentials before providing signed urls
Expand All @@ -83,7 +83,7 @@ Resources:
Properties:
FunctionName: !Sub 'CumulusAggFetchUploadUrl-${DeployStage}'
Handler: src/handlers/site_upload/fetch_upload_url.upload_url_handler
Runtime: python3.9
Runtime: python3.10
MemorySize: 128
Timeout: 100
Description: Generates a presigned URL for uploading files to S3
Expand Down Expand Up @@ -113,7 +113,7 @@ Resources:
Properties:
FunctionName: !Sub 'CumulusAggProcessUpload-${DeployStage}'
Handler: src/handlers/site_upload/process_upload.process_upload_handler
Runtime: python3.9
Runtime: python3.10
MemorySize: 128
Timeout: 800
Description: Handles initial relocation of upload data
Expand Down Expand Up @@ -142,7 +142,7 @@ Resources:
FunctionName: !Sub 'CumulusAggPowersetMerge-${DeployStage}'
Layers: [arn:aws:lambda:us-east-1:336392948345:layer:AWSSDKPandas-Python39:1]
Handler: src/handlers/site_upload/powerset_merge.powerset_merge_handler
Runtime: python3.9
Runtime: python3.10
MemorySize: 8192
Timeout: 800
Description: Merges and aggregates powerset count data
Expand Down Expand Up @@ -173,7 +173,7 @@ Resources:
FunctionName: !Sub 'CumulusAggStudyPeriod-${DeployStage}'
Layers: [arn:aws:lambda:us-east-1:336392948345:layer:AWSSDKPandas-Python39:1]
Handler: src/handlers/site_upload/study_period.study_period_handler
Runtime: python3.9
Runtime: python3.10
MemorySize: 512
Timeout: 800
Description: Handles metadata outside of upload/processing for studies
Expand Down Expand Up @@ -201,7 +201,7 @@ Resources:
FunctionName: !Sub 'CumulusAggCacheAPI-${DeployStage}'
Layers: [arn:aws:lambda:us-east-1:336392948345:layer:AWSSDKPandas-Python39:1]
Handler: src/handlers/site_upload/cache_api.cache_api_handler
Runtime: python3.9
Runtime: python3.10
MemorySize: 512
Timeout: 800
Description: Caches selected database queries to S3
Expand Down Expand Up @@ -249,7 +249,7 @@ Resources:
FunctionName: !Sub 'CumulusAggDashboardGetChartData-${DeployStage}'
Layers: [arn:aws:lambda:us-east-1:336392948345:layer:AWSSDKPandas-Python39:1]
Handler: src/handlers/dashboard/get_chart_data.chart_data_handler
Runtime: python3.9
Runtime: python3.10
MemorySize: 2048
Timeout: 100
Description: Retrieve data for chart display in Cumulus Dashboard
Expand Down Expand Up @@ -296,7 +296,7 @@ Resources:
Properties:
FunctionName: !Sub 'CumulusAggDashboardGetMetadata-${DeployStage}'
Handler: src/handlers/dashboard/get_metadata.metadata_handler
Runtime: python3.9
Runtime: python3.10
MemorySize: 128
Timeout: 100
Description: Retrieve data about site uploads
Expand Down Expand Up @@ -344,7 +344,7 @@ Resources:
FunctionName: !Sub 'CumulusAggDashboardDataPackages-${DeployStage}'
Layers: [arn:aws:lambda:us-east-1:336392948345:layer:AWSSDKPandas-Python39:1]
Handler: src/handlers/dashboard/get_data_packages.data_packages_handler
Runtime: python3.9
Runtime: python3.10
MemorySize: 512
Timeout: 100
Description: Retrieve data for chart display in Cumulus Dashboard
Expand Down Expand Up @@ -400,7 +400,7 @@ Resources:
Properties:
FunctionName: !Sub 'CumulusAggDashboardStudyPeriods-${DeployStage}'
Handler: src/handlers/dashboard/get_study_periods.study_periods_handler
Runtime: python3.9
Runtime: python3.10
MemorySize: 128
Timeout: 100
Description: Retrieve data about the study period
Expand Down
2 changes: 1 addition & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@
import boto3
import pytest
from moto import mock_athena, mock_s3, mock_sns
from scripts.credential_management import create_auth, create_meta

from scripts.credential_management import create_auth, create_meta
from src.handlers.shared.enums import BucketPath, JsonFilename
from src.handlers.shared.functions import write_metadata
from tests.utils import (
Expand Down
Loading

0 comments on commit 4dce974

Please sign in to comment.