Skip to content

Commit

Permalink
feat: remove unnecessary settings
Browse files Browse the repository at this point in the history
(cherry picked from commit a5aa990)
  • Loading branch information
Ian2012 committed Apr 11, 2024
1 parent 30722b5 commit 7e170ce
Show file tree
Hide file tree
Showing 22 changed files with 71 additions and 169 deletions.
10 changes: 0 additions & 10 deletions tutoraspects/patches/k8s-deployments
Original file line number Diff line number Diff line change
Expand Up @@ -112,16 +112,6 @@ spec:
value: "1m"
- name: RALPH_RUNSERVER_PORT
value: "{{RALPH_PORT}}"
- name: RALPH_SENTRY_DSN
value: "{{RALPH_SENTRY_DSN}}"
- name: RALPH_EXECUTION_ENVIRONMENT
value: "{{RALPH_EXECUTION_ENVIRONMENT}}"
- name: RALPH_SENTRY_CLI_TRACES_SAMPLE_RATE
value: "{{RALPH_SENTRY_CLI_TRACES_SAMPLE_RATE}}"
- name: RALPH_SENTRY_LRS_TRACES_SAMPLE_RATE
value: "{{RALPH_SENTRY_LRS_TRACES_SAMPLE_RATE}}"
- name: RALPH_SENTRY_IGNORE_HEALTH_CHECKS
value: "{{RALPH_SENTRY_IGNORE_HEALTH_CHECKS}}"
{% if RALPH_EXTRA_SETTINGS %}
{% for key, value in RALPH_EXTRA_SETTINGS.items() %}
- name: {{key}}
Expand Down
2 changes: 0 additions & 2 deletions tutoraspects/patches/k8s-jobs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@ spec:
value: /opt/venv
- name: XAPI_SCHEMA
value: {{ ASPECTS_XAPI_DATABASE }}
- name: ASPECTS_ENROLLMENT_EVENTS_TABLE
value: {{ ASPECTS_ENROLLMENT_EVENTS_TABLE }}
- name: DBT_STATE
value: {{ DBT_STATE_DIR }}
image: {{ DOCKER_IMAGE_ASPECTS }}
Expand Down
1 change: 0 additions & 1 deletion tutoraspects/patches/local-docker-compose-jobs-services
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ aspects-job:
image: {{ DOCKER_IMAGE_ASPECTS }}
environment:
- XAPI_SCHEMA={{ ASPECTS_XAPI_DATABASE }}
- ASPECTS_ENROLLMENT_EVENTS_TABLE={{ ASPECTS_ENROLLMENT_EVENTS_TABLE }}
volumes:
- ../../env/plugins/aspects/apps/aspects:/app/aspects
- ../../env/plugins/aspects/apps/aspects/scripts/:/app/aspects/scripts:ro
Expand Down
1 change: 0 additions & 1 deletion tutoraspects/patches/openedx-common-settings
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ SUPERSET_CONFIG = {
EVENT_SINK_CLICKHOUSE_PII_MODELS = {{ EVENT_SINK_PII_MODELS }}

ASPECTS_INSTRUCTOR_DASHBOARDS = {{ ASPECTS_INSTRUCTOR_DASHBOARDS }}
SUPERSET_EXTRA_FILTERS_FORMAT = {{ ASPECTS_SUPERSET_EXTRA_FILTERS_FORMAT }}
SUPERSET_DASHBOARD_LOCALES = {{ SUPERSET_DASHBOARD_LOCALES }}
{% if ASPECTS_ENABLE_INSTRUCTOR_DASHBOARD_PLUGIN %}
try:
Expand Down
89 changes: 2 additions & 87 deletions tutoraspects/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,37 +107,12 @@
"individual-learner": "abae8a25-1ba4-4653-81bd-d3937a162a11",
},
),
("ASPECTS_SUPERSET_EXTRA_FILTERS_FORMAT", []),
# ClickHouse xAPI settings
("ASPECTS_XAPI_DATABASE", "xapi"),
("ASPECTS_RAW_XAPI_TABLE", "xapi_events_all"),
("ASPECTS_XAPI_TRANSFORM_MV", "xapi_events_all_parsed_mv"),
("ASPECTS_XAPI_TABLE", "xapi_events_all_parsed"),
# ClickHouse top-level materialized views
("ASPECTS_ENROLLMENT_TRANSFORM_MV", "enrollment_events_mv"),
("ASPECTS_ENROLLMENT_EVENTS_TABLE", "enrollment_events"),
("ASPECTS_VIDEO_PLAYBACK_TRANSFORM_MV", "video_playback_events_mv"),
("ASPECTS_VIDEO_PLAYBACK_EVENTS_TABLE", "video_playback_events"),
("ASPECTS_PROBLEM_TRANSFORM_MV", "problem_events_mv"),
("ASPECTS_PROBLEM_EVENTS_TABLE", "problem_events"),
("ASPECTS_NAVIGATION_TRANSFORM_MV", "navigation_events_mv"),
("ASPECTS_NAVIGATION_EVENTS_TABLE", "navigation_events"),
("ASPECTS_GRADING_TRANSFORM_MV", "grading_events_mv"),
("ASPECTS_GRADING_EVENTS_TABLE", "grading_events"),
("ASPECTS_FORUM_TRANSFORM_MV", "forum_events_mv"),
("ASPECTS_FORUM_EVENTS_TABLE", "forum_events"),
("ASPECTS_COMPLETION_EVENTS_TABLE", "completion_events"),
("ASPECTS_COMPLETION_TRANSFORM_MV", "completion_events_mv"),
# ClickHouse event sink settings
("ASPECTS_EVENT_SINK_DATABASE", "event_sink"),
("ASPECTS_EVENT_SINK_NODES_TABLE", "course_blocks"),
("ASPECTS_EVENT_SINK_RELATIONSHIPS_TABLE", "course_relationships"),
("ASPECTS_EVENT_SINK_OVERVIEWS_TABLE", "course_overviews"),
("ASPECTS_EVENT_SINK_USER_PROFILE_TABLE", "user_profile"),
("ASPECTS_EVENT_SINK_EXTERNAL_ID_TABLE", "external_id"),
("ASPECTS_EVENT_SINK_CLICKHOUSE_TIMEOUT_SECS", "5"),
("ASPECTS_EVENT_SINK_RECENT_BLOCKS_TABLE", "most_recent_course_blocks"),
("ASPECTS_EVENT_SINK_RECENT_BLOCKS_MV", "most_recent_course_blocks_mv"),
("ASPECTS_EVENT_SINK_CLICKHOUSE_TIMEOUT_SECS", 5),
# Vector settings
("ASPECTS_DOCKER_HOST_SOCK_PATH", "/var/run/docker.sock"),
("ASPECTS_VECTOR_STORE_TRACKING_LOGS", False),
Expand Down Expand Up @@ -239,11 +214,6 @@
("RALPH_PORT", "8100"),
("RALPH_ENABLE_PUBLIC_URL", False),
("RALPH_RUN_HTTPS", False),
("RALPH_SENTRY_DSN", ""),
("RALPH_EXECUTION_ENVIRONMENT", "development"),
("RALPH_SENTRY_CLI_TRACES_SAMPLE_RATE", 1.0),
("RALPH_SENTRY_LRS_TRACES_SAMPLE_RATE", 0.1),
("RALPH_SENTRY_IGNORE_HEALTH_CHECKS", True),
("RALPH_EXTRA_SETTINGS", {}),
######################
# Superset Settings
Expand Down Expand Up @@ -276,7 +246,6 @@
("SUPERSET_LMS_EMAIL", "superset/lms-admin@aspects.invalid"),
("SUPERSET_OWNERS", []),
# Set to 0 to have no row limit.
("SUPERSET_ROW_LIMIT", 100_000),
(
"SUPERSET_METADATA_SQLALCHEMY_URI",
"mysql://{{SUPERSET_DB_USERNAME}}:{{SUPERSET_DB_PASSWORD}}"
Expand All @@ -289,27 +258,8 @@
"Superset Metadata": "{{SUPERSET_METADATA_SQLALCHEMY_URI}}",
},
),
("SUPERSET_SENTRY_DSN", ""),
(
"SUPERSET_TALISMAN_CONFIG",
{
"content_security_policy": {
"default-src": ["'self'", "'unsafe-inline'", "'unsafe-eval'"],
"img-src": ["'self'", "data:"],
"worker-src": ["'self'", "blob:"],
"connect-src": [
"'self'",
"https://api.mapbox.com",
"https://events.mapbox.com",
],
"object-src": "'none'",
}
},
),
("SUPERSET_TALISMAN_ENABLED", True),
# These are languages that Superset itself supports, it does not currently
# support different locales for a language.
("SUPERSET_DEFAULT_LOCALE", "en"),
(
"SUPERSET_SUPPORTED_LANGUAGES",
{
Expand Down Expand Up @@ -362,10 +312,6 @@
# short since mostly most of the savings comes from the
# course cache anyway.
("SUPERSET_USER_PERMISSIONS_CACHE_TIMEOUT", 120),
# This controls the cache time of the user's course list
# only, limiting the number of LMS calls since they are
# rate limited. This can be cleared by logging back in.
("SUPERSET_USER_COURSES_CACHE_TIMEOUT", 300),
("SUPERSET_BLOCK_STUDENT_ACCESS", True),
# This setting allows Superset to run behind a reverse proxy in HTTPS and
# redirect to the correct http/s based on the headers sent from the proxy.
Expand All @@ -376,6 +322,7 @@
# in the Superset database. This is useful for keeping the columns up to
# date with the latest changes in DBT.
("SUPERSET_REFRESH_DATASETS", False),
("SUPERSET_SENTRY_DSN", ""),
######################
# dbt Settings
# For the most part you shouldn't have to touch these
Expand All @@ -386,40 +333,8 @@
("DBT_BRANCH", "v3.12.0"),
("DBT_SSH_KEY", ""),
("DBT_STATE_DIR", "/app/aspects/dbt_state/"),
# This is a pip compliant list of Python packages to install to run dbt
# make sure packages with versions are enclosed in double quotes
("EXTRA_DBT_PACKAGES", []),
# This is the name of the database dbt will write to
("DBT_PROFILE_TARGET_DATABASE", "reporting"),
# Validate TLS certificate if using TLS/SSL
("DBT_PROFILE_VERIFY", "True"),
# Use TLS (native protocol) or HTTPS (http protocol)
("DBT_PROFILE_SECURE", "{{ CLICKHOUSE_SECURE_CONNECTION }}"),
# Number of times to retry a "retryable" database exception (such as a 503
# 'Service Unavailable' error)
("DBT_PROFILE_RETRIES", "3"),
# Use gzip compression if truthy (http), or compression type for a native
# connection
("DBT_PROFILE_COMPRESSION", "lz4"),
# Timeout in seconds to establish a connection to ClickHouse
("DBT_PROFILE_CONNECT_TIMEOUT", "10"),
# Timeout in seconds to receive data from the ClickHouse server
("DBT_PROFILE_SEND_RECEIVE_TIMEOUT", "300"),
# Use specific settings designed to improve operation on replicated databases
# (recommended for ClickHouse Cloud)
("DBT_PROFILE_CLUSTER_MODE", "False"),
# Use the experimental `delete+insert` as the default incremental strategy.
("DBT_PROFILE_USE_LW_DELETES", "False"),
# Validate that clickhouse support the atomic EXCHANGE TABLES command. (Not
# needed for most ClickHouse versions)
("DBT_PROFILE_CHECK_EXCHANGE", "False"),
# A dictionary/mapping of custom ClickHouse settings for the connection -
# default is empty.
("DBT_PROFILE_CUSTOM_SETTINGS", ""),
# Timeout for server ping
("DBT_PROFILE_SYNC_REQUEST_TIMEOUT", "5"),
# Compression block size if compression is enabled, this is the default value
("DBT_PROFILE_COMPRESS_BLOCK_SIZE", "1048576"),
]
)

Expand Down
24 changes: 12 additions & 12 deletions tutoraspects/templates/aspects/apps/aspects/dbt/profiles.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@ aspects: # this needs to match the profile in your dbt_project.yml file
cluster: {{ CLICKHOUSE_CLUSTER_NAME }}

# These are ClickHouse provider values and map directly to ClickHouse connection settings.
verify: {{ DBT_PROFILE_VERIFY }}
secure: {{ DBT_PROFILE_SECURE }}
retries: {{ DBT_PROFILE_RETRIES }}
compression: {{ DBT_PROFILE_COMPRESSION }}
connect_timeout: {{ DBT_PROFILE_CONNECT_TIMEOUT }}
send_receive_timeout: {{ DBT_PROFILE_SEND_RECEIVE_TIMEOUT }}
cluster_mode: {{ DBT_PROFILE_CLUSTER_MODE }}
use_lw_deletes: {{ DBT_PROFILE_USE_LW_DELETES }}
check_exchange: {{ DBT_PROFILE_CHECK_EXCHANGE }}
custom_settings: {{ DBT_PROFILE_CUSTOM_SETTINGS }}
sync_request_timeout: {{ DBT_PROFILE_SYNC_REQUEST_TIMEOUT }}
compress_block_size: {{ DBT_PROFILE_COMPRESS_BLOCK_SIZE }}
verify: True
secure: {{ CLICKHOUSE_SECURE_CONNECTION }}
retries: 3
compression: lz4
connect_timeout: 10
send_receive_timeout: 300
cluster_mode: false
use_lw_deletes: false
check_exchange: false
sync_request_timeout: 5
compress_block_size: 1048576
{{ patch("dbt-profiles") | indent(6)}}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
def upgrade():
op.execute(
f"""
CREATE TABLE IF NOT EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_OVERVIEWS_TABLE }}
CREATE TABLE IF NOT EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.course_overviews
{on_cluster}
(
org String NOT NULL,
Expand All @@ -35,7 +35,7 @@ def upgrade():
)
op.execute(
f"""
CREATE TABLE IF NOT EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_NODES_TABLE }}
CREATE TABLE IF NOT EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.course_blocks
{on_cluster}
(
org String NOT NULL,
Expand All @@ -54,7 +54,7 @@ def upgrade():
)
op.execute(
f"""
CREATE TABLE IF NOT EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_RELATIONSHIPS_TABLE }}
CREATE TABLE IF NOT EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.course_relationships
{on_cluster}
(
course_key String NOT NULL,
Expand All @@ -72,14 +72,14 @@ def upgrade():

def downgrade():
op.execute(
"DROP TABLE IF EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_RELATIONSHIPS_TABLE }}"
"DROP TABLE IF EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.course_relationships"
f"{on_cluster}"
)
op.execute(
"DROP TABLE IF EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_NODES_TABLE }}"
"DROP TABLE IF EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.course_blocks"
f"{on_cluster}"
)
op.execute(
"DROP TABLE IF EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_OVERVIEWS_TABLE }}"
"DROP TABLE IF EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.course_overviews"
f"{on_cluster}"
)
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
def upgrade():
op.execute(
f"""
CREATE TABLE IF NOT EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_USER_PROFILE_TABLE }}
CREATE TABLE IF NOT EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.user_profile
{on_cluster}
(
id Int32 NOT NULL,
Expand Down Expand Up @@ -53,6 +53,6 @@ def upgrade():

def downgrade():
op.execute(
"DROP TABLE IF EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_USER_PROFILE_TABLE }}"
"DROP TABLE IF EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.user_profile"
f"{on_cluster}"
)
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@
def drop_objects():
op.execute(
f"""
DROP TABLE IF EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_RECENT_BLOCKS_TABLE }}
DROP TABLE IF EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.most_recent_course_blocks
{on_cluster}
"""
)

op.execute(
f"""
DROP VIEW IF EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_RECENT_BLOCKS_MV }}
DROP VIEW IF EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.most_recent_course_blocks_mv
{on_cluster}
"""
)
Expand All @@ -53,7 +53,7 @@ def upgrade():

op.execute(
f"""
CREATE TABLE IF NOT EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_RECENT_BLOCKS_TABLE }}
CREATE TABLE IF NOT EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.most_recent_course_blocks
{on_cluster}
(
location String NOT NULL,
Expand All @@ -73,9 +73,9 @@ def upgrade():

op.execute(
f"""
create materialized view if not exists {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_RECENT_BLOCKS_MV }}
create materialized view if not exists {{ ASPECTS_EVENT_SINK_DATABASE }}.most_recent_course_blocks_mv
{on_cluster}
to {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_RECENT_BLOCKS_TABLE }} as
to {{ ASPECTS_EVENT_SINK_DATABASE }}.most_recent_course_blocks as
select
location,
display_name,
Expand All @@ -87,13 +87,13 @@ def upgrade():
course_key,
dump_id,
time_last_dumped
from {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_NODES_TABLE }}
from {{ ASPECTS_EVENT_SINK_DATABASE }}.course_blocks
"""
)

op.execute(
"""
insert into {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_RECENT_BLOCKS_TABLE }} (
insert into {{ ASPECTS_EVENT_SINK_DATABASE }}.most_recent_course_blocks (
location, display_name, display_name_with_location, section, subsection, unit, graded, course_key, dump_id, time_last_dumped
)
select
Expand All @@ -107,7 +107,7 @@ def upgrade():
course_key,
dump_id,
time_last_dumped
from {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_NODES_TABLE }};
from {{ ASPECTS_EVENT_SINK_DATABASE }}.course_blocks
"""
)

Expand All @@ -134,7 +134,7 @@ def upgrade():
course_key,
graded,
display_name_with_location
from {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_RECENT_BLOCKS_TABLE }}
from {{ ASPECTS_EVENT_SINK_DATABASE }}.most_recent_course_blocks
final
"
))
Expand Down Expand Up @@ -178,15 +178,15 @@ def downgrade():
db '{{ ASPECTS_EVENT_SINK_DATABASE }}'
query "with most_recent_blocks as (
select org, course_key, location, max(edited_on) as last_modified
from {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_NODES_TABLE }}
from {{ ASPECTS_EVENT_SINK_DATABASE }}.course_blocks
group by org, course_key, location
)
select
location,
display_name,
course_key,
JSONExtractBool(xblock_data_json, 'graded') as graded
from {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_NODES_TABLE }} co
from {{ ASPECTS_EVENT_SINK_DATABASE }}.course_blocks co
inner join most_recent_blocks mrb on
co.org = mrb.org and
co.course_key = mrb.course_key and
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
def upgrade():
op.execute(
f"""
CREATE TABLE IF NOT EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_EXTERNAL_ID_TABLE }}
CREATE TABLE IF NOT EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.external_id
{on_cluster}
(
`external_user_id` UUID NOT NULL,
Expand All @@ -41,6 +41,6 @@ def upgrade():

def downgrade():
op.execute(
"DROP TABLE IF EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.{{ ASPECTS_EVENT_SINK_EXTERNAL_ID_TABLE }}"
"DROP TABLE IF EXISTS {{ ASPECTS_EVENT_SINK_DATABASE }}.external_id"
f"{on_cluster}"
)
Loading

0 comments on commit 7e170ce

Please sign in to comment.