Skip to content

Commit e05aca9

Browse files
committed
updated modelbpa to use TOM
1 parent fb55954 commit e05aca9

39 files changed

+1810
-1482
lines changed

src/sempy_labs/__init__.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@
6666
resolve_report_name,
6767
# language_validate
6868
)
69+
6970
# from sempy_labs._model_auto_build import model_auto_build
7071
from sempy_labs._model_bpa import model_bpa_rules, run_model_bpa
7172
from sempy_labs._model_dependencies import (
@@ -125,7 +126,7 @@
125126
#'list_sqlendpoints',
126127
#'list_tables',
127128
"list_warehouses",
128-
'list_workspace_role_assignments',
129+
"list_workspace_role_assignments",
129130
"create_warehouse",
130131
"update_item",
131132
"create_abfss_path",
@@ -141,20 +142,20 @@
141142
"resolve_report_id",
142143
"resolve_report_name",
143144
#'language_validate',
144-
#"model_auto_build",
145+
# "model_auto_build",
145146
"model_bpa_rules",
146147
"run_model_bpa",
147148
"measure_dependency_tree",
148149
"get_measure_dependencies",
149150
"get_model_calc_dependencies",
150151
"export_model_to_onelake",
151-
'qso_sync',
152-
'qso_sync_status',
153-
'set_qso',
154-
'list_qso_settings',
155-
'disable_qso',
156-
'set_semantic_model_storage_format',
157-
'set_workspace_default_storage_format',
152+
"qso_sync",
153+
"qso_sync_status",
154+
"set_qso",
155+
"list_qso_settings",
156+
"disable_qso",
157+
"set_semantic_model_storage_format",
158+
"set_workspace_default_storage_format",
158159
"refresh_semantic_model",
159160
"cancel_dataset_refresh",
160161
"translate_semantic_model",
@@ -174,5 +175,5 @@
174175
"delete_user_from_workspace",
175176
"update_workspace_user",
176177
"list_workspace_users",
177-
"assign_workspace_to_dataflow_storage"
178+
"assign_workspace_to_dataflow_storage",
178179
]

src/sempy_labs/_ai.py

Lines changed: 26 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,9 @@ def generate_measure_descriptions(
7979

8080
validModels = ["gpt-35-turbo", "gpt-35-turbo-16k", "gpt-4"]
8181
if gpt_model not in validModels:
82-
raise ValueError(f"{icons.red_dot} The '{gpt_model}' model is not a valid model. Enter a gpt_model from this list: {validModels}.")
82+
raise ValueError(
83+
f"{icons.red_dot} The '{gpt_model}' model is not a valid model. Enter a gpt_model from this list: {validModels}."
84+
)
8385

8486
dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
8587

@@ -114,8 +116,7 @@ def generate_measure_descriptions(
114116
)
115117

116118
# Update the model to use the new descriptions
117-
#with connect_semantic_model(dataset=dataset, workspace=workspace, readonly=False) as tom:
118-
119+
# with connect_semantic_model(dataset=dataset, workspace=workspace, readonly=False) as tom:
119120

120121
# for t in m.Tables:
121122
# tName = t.Name
@@ -171,33 +172,43 @@ def generate_aggs(
171172
numericTypes = ["Int64", "Double", "Decimal"]
172173

173174
if any(value not in aggTypes for value in columns.values()):
174-
raise ValueError(f"{icons.red_dot} Invalid aggregation type(s) have been specified in the 'columns' parameter. Valid aggregation types: {aggTypes}.")
175+
raise ValueError(
176+
f"{icons.red_dot} Invalid aggregation type(s) have been specified in the 'columns' parameter. Valid aggregation types: {aggTypes}."
177+
)
175178

176179
dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
177180
dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
178181
dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
179182
dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
180183
if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
181-
raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode. This function is only relevant for Direct Lake semantic models.")
182-
184+
raise ValueError(
185+
f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode. This function is only relevant for Direct Lake semantic models."
186+
)
187+
183188
dfC_filtT = dfC[dfC["Table Name"] == table_name]
184189

185190
if len(dfC_filtT) == 0:
186-
raise ValueError(f"{icons.red_dot} The '{table_name}' table does not exist in the '{dataset}' semantic model within the '{workspace}' workspace.")
191+
raise ValueError(
192+
f"{icons.red_dot} The '{table_name}' table does not exist in the '{dataset}' semantic model within the '{workspace}' workspace."
193+
)
187194

188195
dfC_filt = dfC[
189196
(dfC["Table Name"] == table_name) & (dfC["Column Name"].isin(columnValues))
190197
]
191198

192199
if len(columns) != len(dfC_filt):
193-
raise ValueError(f"{icons.red_dot} Columns listed in '{columnValues}' do not exist in the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace.")
200+
raise ValueError(
201+
f"{icons.red_dot} Columns listed in '{columnValues}' do not exist in the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace."
202+
)
194203

195204
# Check if doing sum/count/min/max etc. on a non-number column
196205
for col, agg in columns.items():
197206
dfC_col = dfC_filt[dfC_filt["Column Name"] == col]
198207
dataType = dfC_col["Data Type"].iloc[0]
199208
if agg in aggTypesAggregate and dataType not in numericTypes:
200-
raise ValueError(f"{icons.red_dot} The '{col}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types can be aggregated as '{aggTypesAggregate}' aggregation types.")
209+
raise ValueError(
210+
f"{icons.red_dot} The '{col}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types can be aggregated as '{aggTypesAggregate}' aggregation types."
211+
)
201212

202213
# Create/update lakehouse delta agg table
203214
aggSuffix = "_agg"
@@ -213,7 +224,9 @@ def generate_aggs(
213224
dfI_filt = dfI[(dfI["Id"] == sqlEndpointId)]
214225

215226
if len(dfI_filt) == 0:
216-
raise ValueError(f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter.")
227+
raise ValueError(
228+
f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter."
229+
)
217230

218231
lakehouseName = dfI_filt["Display Name"].iloc[0]
219232
lakehouse_id = resolve_lakehouse_id(
@@ -328,7 +341,9 @@ def generate_aggs(
328341
col.DataType = System.Enum.Parse(TOM.DataType, dType)
329342

330343
m.Tables[aggTableName].Columns.Add(col)
331-
print(f"{icons.green_dot} The '{aggTableName}'[{cName}] column has been added.")
344+
print(
345+
f"{icons.green_dot} The '{aggTableName}'[{cName}] column has been added."
346+
)
332347

333348
# Create relationships
334349
relMap = {"m": "Many", "1": "One", "0": "None"}

src/sempy_labs/_connections.py

Lines changed: 36 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -64,13 +64,19 @@ def create_connection_cloud(
6464
"Connection Id": o.get("id"),
6565
"Connection Name": o.get("name"),
6666
"Connectivity Type": o.get("connectivityType"),
67-
"Connection Type": o.get("connectionDetails",{}).get("type"),
68-
"Connection Path": o.get("connectionDetails",{}).get("path"),
67+
"Connection Type": o.get("connectionDetails", {}).get("type"),
68+
"Connection Path": o.get("connectionDetails", {}).get("path"),
6969
"Privacy Level": o.get("privacyLevel"),
70-
"Credential Type": o.get("credentialDetails",{}).get("credentialType"),
71-
"Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
72-
"Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
73-
"Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
70+
"Credential Type": o.get("credentialDetails", {}).get("credentialType"),
71+
"Single Sign On Type": o.get("credentialDetails", {}).get(
72+
"singleSignOnType"
73+
),
74+
"Connection Encryption": o.get("credentialDetails", {}).get(
75+
"connectionEncryption"
76+
),
77+
"Skip Test Connection": o.get("credentialDetails", {}).get(
78+
"skipTestConnection"
79+
),
7480
}
7581
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
7682

@@ -140,13 +146,19 @@ def create_connection_on_prem(
140146
"Connection Name": o.get("name"),
141147
"Gateway ID": o.get("gatewayId"),
142148
"Connectivity Type": o.get("connectivityType"),
143-
"Connection Type": o.get("connectionDetails",{}).get("type"),
144-
"Connection Path": o.get("connectionDetails",{}).get("path"),
149+
"Connection Type": o.get("connectionDetails", {}).get("type"),
150+
"Connection Path": o.get("connectionDetails", {}).get("path"),
145151
"Privacy Level": o.get("privacyLevel"),
146-
"Credential Type": o.get("credentialDetails",{}).get("credentialType"),
147-
"Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
148-
"Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
149-
"Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
152+
"Credential Type": o.get("credentialDetails", {}).get("credentialType"),
153+
"Single Sign On Type": o.get("credentialDetails", {}).get(
154+
"singleSignOnType"
155+
),
156+
"Connection Encryption": o.get("credentialDetails", {}).get(
157+
"connectionEncryption"
158+
),
159+
"Skip Test Connection": o.get("credentialDetails", {}).get(
160+
"skipTestConnection"
161+
),
150162
}
151163
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
152164

@@ -218,13 +230,19 @@ def create_connection_vnet(
218230
"Connection Name": o.get("name"),
219231
"Gateway ID": o.get("gatewayId"),
220232
"Connectivity Type": o.get("connectivityType"),
221-
"Connection Type": o.get("connectionDetails",{}).get("type"),
222-
"Connection Path": o.get("connectionDetails",{}).get("path"),
233+
"Connection Type": o.get("connectionDetails", {}).get("type"),
234+
"Connection Path": o.get("connectionDetails", {}).get("path"),
223235
"Privacy Level": o.get("privacyLevel"),
224-
"Credential Type": o.get("credentialDetails",{}).get("credentialType"),
225-
"Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
226-
"Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
227-
"Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
236+
"Credential Type": o.get("credentialDetails", {}).get("credentialType"),
237+
"Single Sign On Type": o.get("credentialDetails", {}).get(
238+
"singleSignOnType"
239+
),
240+
"Connection Encryption": o.get("credentialDetails", {}).get(
241+
"connectionEncryption"
242+
),
243+
"Skip Test Connection": o.get("credentialDetails", {}).get(
244+
"skipTestConnection"
245+
),
228246
}
229247
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
230248

src/sempy_labs/_dax.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def evaluate_dax_impersonation(
5050

5151
request_body = {
5252
"queries": [{"query": dax_query}],
53-
"impersonatedUserName": user_name
53+
"impersonatedUserName": user_name,
5454
}
5555

5656
client = fabric.PowerBIRestClient()

src/sempy_labs/_generate_semantic_model.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,9 @@ def create_blank_semantic_model(
3737
min_compat = 1500
3838

3939
if compatibility_level < min_compat:
40-
raise ValueError(f"{icons.red_dot} Compatiblity level must be at least {min_compat}.")
40+
raise ValueError(
41+
f"{icons.red_dot} Compatiblity level must be at least {min_compat}."
42+
)
4143

4244
tmsl = f"""
4345
{{
@@ -90,7 +92,9 @@ def create_semantic_model_from_bim(
9092
dfI_filt = dfI[(dfI["Display Name"] == dataset)]
9193

9294
if len(dfI_filt) > 0:
93-
raise ValueError(f"{icons.red_dot} '{dataset}' already exists as a semantic model in the '{workspace}' workspace.")
95+
raise ValueError(
96+
f"{icons.red_dot} '{dataset}' already exists as a semantic model in the '{workspace}' workspace."
97+
)
9498

9599
client = fabric.FabricRestClient()
96100
defPBIDataset = {"version": "1.0", "settings": {}}

src/sempy_labs/_helper_functions.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -200,7 +200,9 @@ def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None):
200200
return obj
201201

202202

203-
def resolve_lakehouse_name(lakehouse_id: Optional[UUID] = None, workspace: Optional[str] = None):
203+
def resolve_lakehouse_name(
204+
lakehouse_id: Optional[UUID] = None, workspace: Optional[str] = None
205+
):
204206
"""
205207
Obtains the name of the Fabric lakehouse.
206208
@@ -223,7 +225,7 @@ def resolve_lakehouse_name(lakehouse_id: Optional[UUID] = None, workspace: Optio
223225
if workspace is None:
224226
workspace_id = fabric.get_workspace_id()
225227
workspace = fabric.resolve_workspace_name(workspace_id)
226-
228+
227229
if lakehouse_id is None:
228230
lakehouse_id = fabric.get_lakehouse_id()
229231

@@ -420,10 +422,14 @@ def save_as_delta_table(
420422
write_mode = write_mode.lower()
421423

422424
if write_mode not in writeModes:
423-
raise ValueError(f"{icons.red_dot} Invalid 'write_type' parameter. Choose from one of the following values: {writeModes}.")
425+
raise ValueError(
426+
f"{icons.red_dot} Invalid 'write_type' parameter. Choose from one of the following values: {writeModes}."
427+
)
424428

425429
if " " in delta_table_name:
426-
raise ValueError(f"{icons.red_dot} Invalid 'delta_table_name'. Delta tables in the lakehouse cannot have spaces in their names.")
430+
raise ValueError(
431+
f"{icons.red_dot} Invalid 'delta_table_name'. Delta tables in the lakehouse cannot have spaces in their names."
432+
)
427433

428434
dataframe.columns = dataframe.columns.str.replace(" ", "_")
429435

@@ -470,7 +476,9 @@ def language_validate(language: str):
470476
elif len(df_filt2) == 1:
471477
lang = df_filt2["Language"].iloc[0]
472478
else:
473-
raise ValueError(f"{icons.red_dot} The '{language}' language is not a valid language code. Please refer to this link for a list of valid language codes: {url}.")
479+
raise ValueError(
480+
f"{icons.red_dot} The '{language}' language is not a valid language code. Please refer to this link for a list of valid language codes: {url}."
481+
)
474482

475483
return lang
476484

0 commit comments

Comments
 (0)