diff --git a/apps/challenges/github_interface.py b/apps/challenges/github_interface.py new file mode 100644 index 0000000000..1868b91b6f --- /dev/null +++ b/apps/challenges/github_interface.py @@ -0,0 +1,423 @@ +import base64 +import logging + +import requests +import yaml + +logger = logging.getLogger(__name__) + +URLS = {"contents": "/repos/{}/contents/{}", "repos": "/repos/{}"} + + +class GithubInterface: + """ + Interface to communicate with GitHub API for bi-directional sync. + Handles reading and updating files in a GitHub repository. + """ + + def __init__(self, GITHUB_REPOSITORY, GITHUB_BRANCH, GITHUB_AUTH_TOKEN): + self.GITHUB_AUTH_TOKEN = GITHUB_AUTH_TOKEN + self.GITHUB_REPOSITORY = GITHUB_REPOSITORY + self.BRANCH = GITHUB_BRANCH or "challenge" + self.COMMIT_PREFIX = "evalai_bot: Update {}" + + def get_request_headers(self): + """Get headers for GitHub API requests.""" + headers = {"Authorization": "token {}".format(self.GITHUB_AUTH_TOKEN)} + return headers + + def make_request(self, url, method, params=None, data=None): + """Make a request to the GitHub API.""" + if params is None: + params = {} + if data is None: + data = {} + + url = self.get_github_url(url) + headers = self.get_request_headers() + response = None + try: + response = requests.request( + method=method, + url=url, + headers=headers, + params=params, + json=data, + ) + response.raise_for_status() + except requests.exceptions.RequestException as e: + logger.info( + "EvalAI is not able to establish connection with github: {}".format( + str(e) + ) + ) + return None + return response.json() + + def get_github_url(self, url): + """Build full GitHub API URL.""" + base_url = "https://api.github.com" + url = "{0}{1}".format(base_url, url) + return url + + def get_content_from_path(self, path): + """ + Gets the file content information in JSON format from the repository. + Ref: https://docs.github.com/en/rest/reference/repos#contents + """ + url = URLS.get("contents").format(self.GITHUB_REPOSITORY, path) + params = {"ref": self.BRANCH} + response = self.make_request(url, "GET", params) + return response + + def get_data_from_path(self, path): + """ + Gets the file data in string format from the repository. + Decodes the base64 content from get_content_from_path. + """ + content_response = self.get_content_from_path(path) + string_data = None + if content_response and content_response.get("content"): + string_data = base64.b64decode(content_response["content"]).decode( + "utf-8", errors="ignore" + ) + return string_data + + def update_content_from_path(self, path, content, changed_field=None): + """ + Updates the file content in the repository at particular path. + Ref: https://docs.github.com/en/rest/reference/repos#create-or-update-file-contents + """ + url = URLS.get("contents").format(self.GITHUB_REPOSITORY, path) + + # Get existing content to get SHA (required for updates) + existing_content = self.get_content_from_path(path) + + # Create specific commit message + if changed_field: + commit_message = ( + f"evalai_bot: Update {path} - changed field: {changed_field}" + ) + else: + commit_message = self.COMMIT_PREFIX.format(path) + + if existing_content and existing_content.get("sha"): + # File exists, update it + data = { + "message": commit_message, + "branch": self.BRANCH, + "sha": existing_content.get("sha"), + "content": content, + } + else: + # File doesn't exist, create it + data = { + "message": commit_message, + "branch": self.BRANCH, + "content": content, + } + + response = self.make_request(url, "PUT", data=data) + return response + + def update_data_from_path(self, path, data, changed_field=None): + """ + Updates the file data at particular path with the provided string data. + Encodes to base64 and calls update_content_from_path. + """ + content = base64.b64encode(bytes(data, "utf-8")).decode("utf-8") + return self.update_content_from_path(path, content, changed_field) + + def is_repository(self): + """Check if the repository exists and is accessible.""" + url = URLS.get("repos").format(self.GITHUB_REPOSITORY) + repo_response = self.make_request(url, "GET") + return True if repo_response else False + + def _read_text_from_file_field(self, value): + """Best-effort read of text from a Django FileField-like value.""" + if value is None: + return None + try: + # Django FieldFile has open/read + if hasattr(value, "open"): + value.open("rb") + data = value.read() + value.close() + elif hasattr(value, "read"): + data = value.read() + else: + data = str(value) + if isinstance(data, bytes): + try: + return data.decode("utf-8") + except Exception: + return data.decode("latin-1", errors="ignore") + return str(data) + except Exception: + return None + + def _process_field_value(self, field, value): + """ + Process a field value for GitHub sync. + Returns the processed value or None if processing failed. + """ + if value is None: + return None + + try: + if field in ["start_date", "end_date"] and hasattr( + value, "strftime" + ): + return value.strftime("%Y-%m-%d %H:%M:%S") + elif ( + field + in [ + "description", + "evaluation_details", + "terms_and_conditions", + "submission_guidelines", + ] + and value + ): + # Extract the actual content from HTML fields + if hasattr(value, "read"): + try: + value.seek(0) + content = value.read().decode("utf-8") + return content + except Exception: + return str(value) + else: + return str(value) + elif field in ["image", "evaluation_script"] and value: + # For YAML, store filename/path if available + if hasattr(value, "name"): + return value.name + else: + return str(value) + elif isinstance(value, (list, tuple)): + clean_list = [] + for item in value: + if hasattr(item, "pk"): + clean_list.append(item.pk) + elif hasattr(item, "id"): + clean_list.append(item.id) + else: + clean_list.append(item) + return clean_list + else: + if hasattr(value, "pk"): + return value.pk + elif hasattr(value, "id"): + return value.id + else: + return value + except Exception as e: + logger.error(f"Error processing field {field}: {str(e)}") + return None + + def update_challenge_config(self, challenge, changed_field): + """ + Update challenge configuration in GitHub repository. + Only updates the specific field that changed. + """ + try: + # Get existing challenge config to preserve structure + existing_config = self.get_data_from_path("challenge_config.yaml") + if existing_config: + try: + config_data = yaml.safe_load(existing_config) + if not isinstance(config_data, dict): + config_data = {} + except yaml.YAMLError: + logger.warning( + "Existing challenge_config.yaml is not valid YAML, starting fresh" + ) + config_data = {} + else: + config_data = {} + + # File fields logic (update the referenced file content) + if changed_field in {"evaluation_script"}: + file_path = config_data.get(changed_field) + if not file_path: + logger.warning( + f"No path for '{changed_field}' in challenge_config.yaml; " + "skipping file update" + ) + return False + current_text = self.get_data_from_path(file_path) + new_text = self._read_text_from_file_field( + getattr(challenge, changed_field, None) + ) + if new_text is None or new_text == current_text: + return True + return bool( + self.update_data_from_path( + file_path, new_text, changed_field + ) + ) + + # Non-file field: update YAML key with processed value + if hasattr(challenge, changed_field): + current_value = getattr(challenge, changed_field) + processed_value = self._process_field_value( + changed_field, current_value + ) + if processed_value is None: + logger.warning( + f"Could not process changed field: {changed_field}" + ) + return False + + # Skip if value unchanged to avoid empty commit + if config_data.get(changed_field) == processed_value: + return True + + config_data[changed_field] = processed_value + else: + logger.error( + f"Field {changed_field} not found on challenge model" + ) + return False + + # Convert back to YAML + yaml_content = yaml.dump( + config_data, + default_flow_style=False, + allow_unicode=True, + sort_keys=False, + ) + + # Add documentation header + header_comment = ( + "# If you are not sure what all these fields mean, " + "please refer our documentation here:\n" + "# https://evalai.readthedocs.io/en/latest/configuration.html\n" + ) + yaml_content = header_comment + yaml_content + + # Update the file in GitHub + success = self.update_data_from_path( + "challenge_config.yaml", yaml_content, changed_field + ) + return bool(success) + + except Exception as e: + logger.error(f"Error updating challenge config: {str(e)}") + return False + + def update_challenge_phase_config(self, challenge_phase, changed_field): + """ + Update challenge phase configuration in GitHub repository. + Only updates the specific field that changed. + """ + try: + # Get existing challenge config to preserve structure + existing_config = self.get_data_from_path("challenge_config.yaml") + if existing_config: + try: + config_data = yaml.safe_load(existing_config) + if not isinstance(config_data, dict): + config_data = {} + except yaml.YAMLError: + logger.warning( + "Existing challenge_config.yaml is not valid YAML, starting fresh" + ) + config_data = {} + else: + config_data = {} + + # Initialize challenge_phases section if it doesn't exist + if "challenge_phases" not in config_data: + config_data["challenge_phases"] = [] + + # Locate the target phase by codename + target_index = None + for i, phase in enumerate(config_data["challenge_phases"]): + if phase.get("codename") == getattr( + challenge_phase, "codename", None + ): + target_index = i + break + + if target_index is None: + logger.error( + f"Phase with codename {getattr(challenge_phase, 'codename', None)} " + "not found" + ) + return False + + # File field mapping in YAML + yaml_key_map = {"test_annotation": "test_annotation_file"} + yaml_key = yaml_key_map.get(changed_field, changed_field) + + # File field for phase: update referenced file content + if changed_field in {"test_annotation"}: + file_path = config_data["challenge_phases"][target_index].get( + yaml_key + ) + if not file_path: + logger.warning( + f"No path for '{yaml_key}' in challenge_config.yaml; " + "skipping file update" + ) + return False + current_text = self.get_data_from_path(file_path) + new_text = self._read_text_from_file_field( + getattr(challenge_phase, changed_field, None) + ) + if new_text is None or new_text == current_text: + return True + return ( + True + if self.update_data_from_path( + file_path, new_text, changed_field + ) + else False + ) + + # Non-file field: update YAML entry for that phase + if hasattr(challenge_phase, changed_field): + value = getattr(challenge_phase, changed_field) + processed_value = self._process_field_value( + changed_field, value + ) + if processed_value is None: + logger.warning( + f"Could not process changed phase field: {changed_field}" + ) + return False + # Skip if unchanged + if ( + config_data["challenge_phases"][target_index].get(yaml_key) + == processed_value + ): + return True + config_data["challenge_phases"][target_index][ + yaml_key + ] = processed_value + else: + logger.error( + f"Field {changed_field} not found on challenge_phase model" + ) + return False + + # Convert back to YAML + yaml_content = yaml.dump( + config_data, + default_flow_style=False, + allow_unicode=True, + sort_keys=False, + ) + + # Update the file in GitHub + success = self.update_data_from_path( + "challenge_config.yaml", yaml_content, changed_field + ) + return bool(success) + + except Exception as e: + logger.error(f"Error updating challenge phase config: {str(e)}") + return False diff --git a/apps/challenges/github_sync_config.py b/apps/challenges/github_sync_config.py new file mode 100644 index 0000000000..a972b5307c --- /dev/null +++ b/apps/challenges/github_sync_config.py @@ -0,0 +1,36 @@ +""" +Configuration for bi-directional GitHub sync. +Defines which Challenge and ChallengePhase fields should be synced to GitHub. +""" + +# Challenge fields that should be synced to GitHub +CHALLENGE_SYNC_FIELDS = { + "title", + "short_description", + "description", + "terms_and_conditions", + "submission_guidelines", + "evaluation_details", + "start_date", + "end_date", + "evaluation_script", +} + +# ChallengePhase fields that should be synced to GitHub +CHALLENGE_PHASE_SYNC_FIELDS = { + "name", + "description", + "start_date", + "end_date", + "max_submissions_per_day", + "max_submissions_per_month", + "max_submissions", + "is_public", + "is_submission_public", + "test_annotation", +} + +# Fields that are file-based (content stored in separate files) +CHALLENGE_FILE_FIELDS = {"evaluation_script"} + +CHALLENGE_PHASE_FILE_FIELDS = {"test_annotation"} diff --git a/apps/challenges/github_utils.py b/apps/challenges/github_utils.py new file mode 100644 index 0000000000..d10031c6c8 --- /dev/null +++ b/apps/challenges/github_utils.py @@ -0,0 +1,374 @@ +""" +Celery tasks and utilities for bi-directional GitHub sync. +Syncs Challenge and ChallengePhase changes from EvalAI UI to GitHub repositories. +""" + +import logging +import threading + +from celery import shared_task +from django.core import serializers + +from .github_interface import GithubInterface +from .github_sync_config import ( + CHALLENGE_PHASE_SYNC_FIELDS, + CHALLENGE_SYNC_FIELDS, +) + +logger = logging.getLogger(__name__) + +# Thread-local storage for per-request sync context +_sync_context = threading.local() + + +def get_sync_context(): + """Get the current request's sync context.""" + if not hasattr(_sync_context, "synced_models"): + _sync_context.synced_models = set() + if not hasattr(_sync_context, "payload_keys"): + _sync_context.payload_keys = set() + if not hasattr(_sync_context, "is_github_source"): + _sync_context.is_github_source = False + return _sync_context + + +def reset_sync_context(): + """Reset sync context for a new request.""" + _sync_context.synced_models = set() + _sync_context.payload_keys = set() + _sync_context.is_github_source = False + + +def mark_synced(model_name, pk): + """Mark a model instance as synced in this request.""" + ctx = get_sync_context() + ctx.synced_models.add((model_name, pk)) + + +def is_synced(model_name, pk): + """Check if a model instance was already synced in this request.""" + ctx = get_sync_context() + return (model_name, pk) in ctx.synced_models + + +def set_payload_keys(keys): + """Set the request payload keys for field inference.""" + ctx = get_sync_context() + ctx.payload_keys = set(keys) if keys else set() + + +def get_payload_keys(): + """Get the request payload keys.""" + ctx = get_sync_context() + return ctx.payload_keys + + +def set_github_source(is_github): + """Mark request as originating from GitHub.""" + ctx = get_sync_context() + ctx.is_github_source = is_github + + +def is_github_source(): + """Check if request originated from GitHub.""" + ctx = get_sync_context() + return ctx.is_github_source + + +def deserialize_object(serialized_object): + """Deserialize a Django object from JSON.""" + deserialized_object = None + for obj in serializers.deserialize("json", serialized_object): + deserialized_object = obj.object + return deserialized_object + + +def get_changed_field_from_update_fields(update_fields, sync_fields): + """ + Determine which syncable field was changed based on update_fields. + Returns the field name if exactly one syncable field was updated. + """ + if not update_fields: + return None + + changed_sync_fields = set(update_fields) & sync_fields + if len(changed_sync_fields) == 1: + return changed_sync_fields.pop() + return None + + +def get_changed_field_from_payload(payload_keys, sync_fields): + """ + Determine which syncable field was changed based on request payload. + Returns the field name if exactly one syncable field was in the payload. + """ + if not payload_keys: + return None + + changed_sync_fields = set(payload_keys) & sync_fields + if len(changed_sync_fields) == 1: + return changed_sync_fields.pop() + return None + + +@shared_task +def github_challenge_sync(serialized_challenge, changed_field): + """ + Celery task to sync a Challenge change to GitHub. + + Args: + serialized_challenge: JSON serialized Challenge object + changed_field: The field that was changed + """ + try: + challenge = deserialize_object(serialized_challenge) + if not challenge: + logger.error("Failed to deserialize challenge for GitHub sync") + return False + + # Verify GitHub config is present + if not challenge.github_repository or not challenge.github_token: + logger.debug( + f"Challenge {challenge.pk} missing github_repository or github_token, " + "skipping sync" + ) + return False + + # Create GitHub interface + github = GithubInterface( + challenge.github_repository, + challenge.github_branch, + challenge.github_token, + ) + + # Verify repository access + if not github.is_repository(): + logger.error( + f"Cannot access GitHub repository: {challenge.github_repository}" + ) + return False + + # Sync the changed field + success = github.update_challenge_config(challenge, changed_field) + if success: + logger.info( + f"Successfully synced {changed_field} for challenge {challenge.pk}" + ) + else: + logger.error( + f"Failed to sync {changed_field} for challenge {challenge.pk}" + ) + return success + + except Exception as e: + logger.error(f"Error in github_challenge_sync: {str(e)}") + return False + + +@shared_task +def github_challenge_phase_sync( + serialized_challenge_phase, serialized_challenge, changed_field +): + """ + Celery task to sync a ChallengePhase change to GitHub. + + Args: + serialized_challenge_phase: JSON serialized ChallengePhase object + serialized_challenge: JSON serialized Challenge object (parent) + changed_field: The field that was changed + """ + try: + challenge_phase = deserialize_object(serialized_challenge_phase) + challenge = deserialize_object(serialized_challenge) + + if not challenge_phase or not challenge: + logger.error( + "Failed to deserialize challenge_phase or challenge for GitHub sync" + ) + return False + + # Verify GitHub config is present on parent challenge + if not challenge.github_repository or not challenge.github_token: + logger.debug( + f"Challenge {challenge.pk} missing github_repository or github_token, " + "skipping phase sync" + ) + return False + + # Create GitHub interface + github = GithubInterface( + challenge.github_repository, + challenge.github_branch, + challenge.github_token, + ) + + # Verify repository access + if not github.is_repository(): + logger.error( + f"Cannot access GitHub repository: {challenge.github_repository}" + ) + return False + + # Sync the changed field + success = github.update_challenge_phase_config( + challenge_phase, changed_field + ) + if success: + logger.info( + f"Successfully synced {changed_field} for challenge phase " + f"{challenge_phase.pk}" + ) + else: + logger.error( + f"Failed to sync {changed_field} for challenge phase " + f"{challenge_phase.pk}" + ) + return success + + except Exception as e: + logger.error(f"Error in github_challenge_phase_sync: {str(e)}") + return False + + +def trigger_challenge_sync(challenge, update_fields=None): + """ + Trigger GitHub sync for a Challenge if conditions are met. + + Args: + challenge: Challenge model instance + update_fields: List of fields that were updated (from model.save()) + """ + # Skip if not configured for GitHub sync + if not challenge.github_repository or not challenge.github_token: + return + + # Skip if already synced in this request (prevent loops) + if is_synced("Challenge", challenge.pk): + logger.debug( + f"Challenge {challenge.pk} already synced in this request" + ) + return + + # Skip if request came from GitHub (prevent loops) + if is_github_source(): + logger.debug("Skipping sync - request originated from GitHub") + return + + # Determine which field changed + changed_field = get_changed_field_from_update_fields( + update_fields, CHALLENGE_SYNC_FIELDS + ) + + # If not from update_fields, try payload keys + if not changed_field: + changed_field = get_changed_field_from_payload( + get_payload_keys(), CHALLENGE_SYNC_FIELDS + ) + + # Fallback: compare original field values if available + if not changed_field: + for field in CHALLENGE_SYNC_FIELDS: + original_attr = f"_original_{field}" + if hasattr(challenge, original_attr): + original_value = getattr(challenge, original_attr) + current_value = getattr(challenge, field) + if original_value != current_value: + changed_field = field + logger.debug( + f"Detected changed field via original value comparison: " + f"{field} for challenge {challenge.pk}" + ) + break + + if not changed_field: + logger.debug( + f"Could not determine changed field for challenge {challenge.pk}" + ) + return + + # Mark as synced to prevent re-entry + mark_synced("Challenge", challenge.pk) + + # Serialize and queue the sync task + serialized_challenge = serializers.serialize("json", [challenge]) + github_challenge_sync.delay(serialized_challenge, changed_field) + logger.info( + f"Queued GitHub sync for challenge {challenge.pk}, field: {changed_field}" + ) + + +def trigger_challenge_phase_sync(challenge_phase, update_fields=None): + """ + Trigger GitHub sync for a ChallengePhase if conditions are met. + + Args: + challenge_phase: ChallengePhase model instance + update_fields: List of fields that were updated (from model.save()) + """ + # Get parent challenge + challenge = challenge_phase.challenge + + # Skip if not configured for GitHub sync + if not challenge.github_repository or not challenge.github_token: + return + + # Skip if already synced in this request (prevent loops) + if is_synced("ChallengePhase", challenge_phase.pk): + logger.debug( + f"ChallengePhase {challenge_phase.pk} already synced in this request" + ) + return + + # Skip if request came from GitHub (prevent loops) + if is_github_source(): + logger.debug("Skipping phase sync - request originated from GitHub") + return + + # Determine which field changed + changed_field = get_changed_field_from_update_fields( + update_fields, CHALLENGE_PHASE_SYNC_FIELDS + ) + + # If not from update_fields, try payload keys + if not changed_field: + changed_field = get_changed_field_from_payload( + get_payload_keys(), CHALLENGE_PHASE_SYNC_FIELDS + ) + + # Fallback: compare original field values if available + if not changed_field: + for field in CHALLENGE_PHASE_SYNC_FIELDS: + original_attr = f"_original_{field}" + if hasattr(challenge_phase, original_attr): + original_value = getattr(challenge_phase, original_attr) + current_value = getattr(challenge_phase, field) + if original_value != current_value: + changed_field = field + logger.debug( + f"Detected changed field via original value comparison: " + f"{field} for challenge phase {challenge_phase.pk}" + ) + break + + if not changed_field: + logger.debug( + f"Could not determine changed field for challenge phase " + f"{challenge_phase.pk}" + ) + return + + # Mark as synced to prevent re-entry + mark_synced("ChallengePhase", challenge_phase.pk) + + # Serialize and queue the sync task + serialized_challenge_phase = serializers.serialize( + "json", [challenge_phase] + ) + serialized_challenge = serializers.serialize("json", [challenge]) + github_challenge_phase_sync.delay( + serialized_challenge_phase, serialized_challenge, changed_field + ) + logger.info( + f"Queued GitHub sync for challenge phase {challenge_phase.pk}, " + f"field: {changed_field}" + ) diff --git a/apps/challenges/migrations/0114_add_github_token_field.py b/apps/challenges/migrations/0114_add_github_token_field.py new file mode 100644 index 0000000000..1ca8135b11 --- /dev/null +++ b/apps/challenges/migrations/0114_add_github_token_field.py @@ -0,0 +1,23 @@ +# Generated by Django 2.2.20 on 2025-07-09 16:00 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("challenges", "0113_add_github_branch_field_and_unique_constraint"), + ] + + operations = [ + migrations.AddField( + model_name="challenge", + name="github_token", + field=models.CharField( + max_length=500, + null=True, + blank=True, + default="", + ), + ), + ] diff --git a/apps/challenges/models.py b/apps/challenges/models.py index 823309d9d1..c558a2db0f 100644 --- a/apps/challenges/models.py +++ b/apps/challenges/models.py @@ -190,6 +190,10 @@ def __init__(self, *args, **kwargs): github_branch = models.CharField( max_length=200, null=True, blank=True, default="" ) + # GitHub Personal Access Token for bi-directional sync + github_token = models.CharField( + max_length=500, null=True, blank=True, default="" + ) # The number of vCPU for a Fargate worker for the challenge. Default value # is 0.25 vCPU. worker_cpu_cores = models.IntegerField(null=True, blank=True, default=512) @@ -315,6 +319,28 @@ def update_sqs_retention_period_for_challenge( challenge.save() +@receiver(signals.post_save, sender="challenges.Challenge") +def challenge_details_sync(sender, instance, created, **kwargs): + """ + Signal to sync Challenge changes to GitHub repository. + Triggered on post_save when a Challenge is updated. + """ + # Skip on create - only sync updates + if created: + return + + # Skip if no GitHub config + if not instance.github_repository or not instance.github_token: + return + + # Import here to avoid circular imports + from challenges.github_utils import trigger_challenge_sync + + # Get update_fields from kwargs if available + update_fields = kwargs.get("update_fields") + trigger_challenge_sync(instance, update_fields) + + class DatasetSplit(TimeStampedModel): name = models.CharField(max_length=100) codename = models.CharField(max_length=100) @@ -457,6 +483,31 @@ def post_save_connect(field_name, sender): post_save_connect("test_annotation", ChallengePhase) +@receiver(signals.post_save, sender="challenges.ChallengePhase") +def challenge_phase_details_sync(sender, instance, created, **kwargs): + """ + Signal to sync ChallengePhase changes to GitHub repository. + Triggered on post_save when a ChallengePhase is updated. + """ + # Skip on create - only sync updates + if created: + return + + # Get parent challenge + challenge = instance.challenge + + # Skip if no GitHub config on parent challenge + if not challenge.github_repository or not challenge.github_token: + return + + # Import here to avoid circular imports + from challenges.github_utils import trigger_challenge_phase_sync + + # Get update_fields from kwargs if available + update_fields = kwargs.get("update_fields") + trigger_challenge_phase_sync(instance, update_fields) + + class Leaderboard(TimeStampedModel): schema = JSONField() diff --git a/apps/challenges/serializers.py b/apps/challenges/serializers.py index 4e933be0a6..e1bb102ed1 100644 --- a/apps/challenges/serializers.py +++ b/apps/challenges/serializers.py @@ -24,6 +24,9 @@ class ChallengeSerializer(serializers.ModelSerializer): is_active = serializers.ReadOnlyField() domain_name = serializers.SerializerMethodField() + github_token = serializers.CharField( + max_length=500, write_only=True, required=False, allow_blank=True + ) def get_domain_name(self, obj): return obj.get_domain_display() @@ -96,6 +99,7 @@ class Meta: "sqs_retention_period", "github_repository", "github_branch", + "github_token", ) @@ -243,6 +247,10 @@ class ZipChallengeSerializer(ChallengeSerializer): Serializer used for creating challenge through zip file. """ + github_token = serializers.CharField( + max_length=500, write_only=True, required=False, allow_blank=True + ) + def __init__(self, *args, **kwargs): super(ZipChallengeSerializer, self).__init__(*args, **kwargs) @@ -299,6 +307,7 @@ class Meta: "cli_version", "github_repository", "github_branch", + "github_token", "vpc_cidr", "subnet_1_cidr", "subnet_2_cidr",