From de58737660ad3b9dc438b903ec91a7874b22aa5e Mon Sep 17 00:00:00 2001 From: Matthew Li Date: Fri, 15 Nov 2024 10:53:21 -0800 Subject: [PATCH] Allow all users to export jobs to CSV (#651) * Add class that defines user access to jobs * Add class for storing job search filters in session storage * Refactor job views to use new utilities * Unhide export button for all users * Update tests --- .../core/statistics/templates/job_list.html | 2 - .../core/statistics/tests/test_job_views.py | 260 +++++++++++++++--- coldfront/core/statistics/utils_/__init__.py | 0 .../utils_/job_accessibility_manager.py | 61 ++++ .../statistics/utils_/job_query_filtering.py | 45 +++ coldfront/core/statistics/views.py | 171 ++++++------ 6 files changed, 409 insertions(+), 130 deletions(-) create mode 100644 coldfront/core/statistics/utils_/__init__.py create mode 100644 coldfront/core/statistics/utils_/job_accessibility_manager.py diff --git a/coldfront/core/statistics/templates/job_list.html b/coldfront/core/statistics/templates/job_list.html index d73b4c5d9..ccd355783 100644 --- a/coldfront/core/statistics/templates/job_list.html +++ b/coldfront/core/statistics/templates/job_list.html @@ -114,7 +114,6 @@

Job List

{% endif %} - {% if user.is_superuser %}
- {% endif %} {% if job_list %}
diff --git a/coldfront/core/statistics/tests/test_job_views.py b/coldfront/core/statistics/tests/test_job_views.py index 132b8f3e6..86b35fa8f 100644 --- a/coldfront/core/statistics/tests/test_job_views.py +++ b/coldfront/core/statistics/tests/test_job_views.py @@ -1,7 +1,11 @@ import copy -from django.urls import reverse from http import HTTPStatus +from urllib.parse import urlencode + +from django.contrib.auth.models import User +from django.urls import reverse +from django.utils import timezone from coldfront.core.project.models import * from coldfront.core.user.models import UserProfile @@ -9,9 +13,6 @@ from coldfront.core.utils.tests.test_base import TestBase from coldfront.core.statistics.models import Job -from django.contrib.auth.models import User -from django.utils import timezone - class TestJobBase(TestBase): """A base class for testing job views.""" @@ -69,6 +70,9 @@ def setUp(self): self.project2 = Project.objects.create( name='project2', status=active_project_status) + self.project3 = Project.objects.create( + name='project3', status=active_project_status) + self.project1_user1 = ProjectUser.objects.create( user=self.user1, project=self.project1, @@ -116,6 +120,22 @@ def setUp(self): self.staff.is_staff = True self.staff.save() + ProjectUser.objects.create( + user=self.admin, + project=self.project3, + role=user_project_role, + status=self.active_project_user_status) + ProjectUser.objects.create( + user=self.staff, + project=self.project3, + role=user_project_role, + status=self.active_project_user_status) + ProjectUser.objects.create( + user=self.manager, + project=self.project3, + role=user_project_role, + status=self.active_project_user_status) + self.password = 'password' for user in User.objects.all(): @@ -242,6 +262,18 @@ def test_admin_contents(user): test_admin_contents(self.admin) test_admin_contents(self.staff) + def test_export_button_visible(self): + """Testing if 'Export Job List to CSV' button is visible.""" + url = reverse('slurm-job-list') + response = self.get_response(self.user1, url) + self.assertNotContains(response, 'Export Job List to CSV') + + response = self.get_response(self.staff, url) + self.assertNotContains(response, 'Export Job List to CSV') + + response = self.get_response(self.admin, url) + self.assertContains(response, 'Export Job List to CSV') + def test_pagination(self): """Testing pagination of list view""" @@ -259,6 +291,16 @@ def test_pagination(self): self.assertContains(response, 'Next') self.assertContains(response, 'Previous') + def test_saves_filters_in_session(self): + """Testing if cleaned form data is saved in session""" + url = reverse('slurm-job-list') + '?show_all_jobs=on&username=user1&partition=test_partition' + self.get_response(self.admin, url) + + self.assertEqual(self.client.session.get('job_search_filters')['username'], + self.user1.username) + self.assertEqual(self.client.session.get('job_search_filters')['partition'], + 'test_partition') + def test_status_colors(self): """Test different status colors""" @@ -378,44 +420,63 @@ def test_admin_access(self): self.assert_has_access(self.staff, True, url) -class ExportJobListView(TestJobBase): - """A class for testing SlurmJobDetailView""" +class TestExportJobListView(TestJobBase): + """A class for testing ExportJobListView.""" def setUp(self): """Set up test data.""" super().setUp() - def test_access(self): - url = reverse('export-job-list') + self.admin_job = Job.objects.create( + jobslurmid='11111', + submitdate=self.current_time - datetime.timedelta(days=3), + startdate=self.current_time - datetime.timedelta(days=2), + enddate=self.current_time - datetime.timedelta(days=1), + userid=self.admin, + accountid=self.project3, + jobstatus='COMPLETING', + partition='test_partition1') + self.staff_job = Job.objects.create( + jobslurmid='22222', + submitdate=self.current_time - datetime.timedelta(days=3), + startdate=self.current_time - datetime.timedelta(days=2), + enddate=self.current_time - datetime.timedelta(days=1), + userid=self.staff, + accountid=self.project3, + jobstatus='COMPLETING', + partition='test_partition2') + self.manager_job_on_project3 = Job.objects.create( + jobslurmid='33333', + submitdate=self.current_time - datetime.timedelta(days=3), + startdate=self.current_time - datetime.timedelta(days=2), + enddate=self.current_time - datetime.timedelta(days=1), + userid=self.manager, + accountid=self.project3, + jobstatus='COMPLETING', + partition='test_partition2') + + def _assert_csv_job_ids(self, user, query_params, expected_jobs): + """Assert that the CSV generated by the given User and query + parameters has exactly the given jobs, based on ID.""" + self.client.login(username=user.username, password=self.password) - self.assert_has_access(self.user1, False, url) - self.assert_has_access(self.user2, False, url) - self.assert_has_access(self.admin, True, url) - self.assert_has_access(self.staff, False, url) + list_url = f'{reverse("slurm-job-list")}?{urlencode(query_params)}' + self.get_response(user, list_url) - def test_job_list_view(self): - """Testing if 'Export Job List to CSV' button appears""" - url = reverse('slurm-job-list') - response = self.get_response(self.user1, url) - self.assertNotContains(response, 'Export Job List to CSV') + export_url = reverse('export-job-list') + response = self.client.get(export_url) + csv_lines = copy.deepcopy(list(response.streaming_content)) - response = self.get_response(self.staff, url) - self.assertNotContains(response, 'Export Job List to CSV') + actual_job_slurm_ids = set() + for line in csv_lines[1:]: + line = line.decode('utf-8') + actual_job_slurm_ids.add(line[:line.find(',')]) - response = self.get_response(self.admin, url) - self.assertContains(response, 'Export Job List to CSV') + expected_job_slurm_ids = {job.jobslurmid for job in expected_jobs} - def test_job_list_saves_session(self): - """Testing if cleaned form data is saved in session""" - url = reverse('slurm-job-list') + '?show_all_jobs=on&username=user1&partition=testpartition' - self.get_response(self.admin, url) + self.assertEqual(actual_job_slurm_ids, expected_job_slurm_ids) - self.assertEqual(self.client.session.get('job_search_form_data')['username'], - self.user1.username) - self.assertEqual(self.client.session.get('job_search_form_data')['partition'], - 'testpartition') - - def create_job_csv_line(self, job): + def _create_job_csv_line(self, job): job_line = f'{job.jobslurmid},{job.userid.username},' \ f'{job.accountid.name},{job.partition},' \ f'{job.jobstatus},{job.submitdate},' \ @@ -424,6 +485,123 @@ def create_job_csv_line(self, job): return job_line + def test_view_access(self): + """Test that all users have access to the view.""" + url = reverse('export-job-list') + + self.assert_has_access(self.user1, True, url) + self.assert_has_access(self.user2, True, url) + self.assert_has_access(self.admin, True, url) + self.assert_has_access(self.staff, True, url) + + def test_admin_job_accessibility(self): + """Test that admins can export all jobs across all users, as + well as their own jobs.""" + # Superuser + query_params = { + 'show_all_jobs': 'on', + } + expected_jobs = ( + self.job1, self.job2, self.admin_job, self.staff_job, + self.manager_job_on_project3) + self._assert_csv_job_ids(self.admin, query_params, expected_jobs) + + query_params = {} + expected_jobs = {self.admin_job} + self._assert_csv_job_ids(self.admin, query_params, expected_jobs) + + # Staff + query_params = { + 'show_all_jobs': 'on', + } + expected_jobs = ( + self.job1, self.job2, self.admin_job, self.staff_job, + self.manager_job_on_project3) + self._assert_csv_job_ids(self.staff, query_params, expected_jobs) + + query_params = {} + expected_jobs = {self.staff_job} + self._assert_csv_job_ids(self.staff, query_params, expected_jobs) + + def test_pi_manager_accessibility(self): + """Test that project PIs and managers can export all jobs under + projects that they manage, as well as their own jobs.""" + # PI of project1 + query_params = {} + expected_jobs = (self.job1,) + self._assert_csv_job_ids(self.pi, query_params, expected_jobs) + + # Manager of project1 (and regular user of project3) + query_params = {} + expected_jobs = (self.job1, self.manager_job_on_project3) + self._assert_csv_job_ids(self.manager, query_params, expected_jobs) + + def test_user_accessibility(self): + """Test that regular users can only export their own jobs.""" + # user1 + query_params = {} + expected_jobs = (self.job1, self.job2) + self._assert_csv_job_ids(self.user1, query_params, expected_jobs) + + # user2 + query_params = {} + expected_jobs = () + self._assert_csv_job_ids(self.user2, query_params, expected_jobs) + + def test_show_all_jobs_ineffective_for_non_admins(self): + """Test that non-superuser/staff users may not view all jobs by + specifying 'show_all_jobs'.""" + # user2 + query_params = { + 'show_all_jobs': 'on', + } + expected_jobs = () + self._assert_csv_job_ids(self.user2, query_params, expected_jobs) + + # PI of project1 + query_params = { + 'show_all_jobs': 'on', + } + expected_jobs = (self.job1,) + self._assert_csv_job_ids(self.pi, query_params, expected_jobs) + + def test_filters_saved_in_session(self): + """Test that filters saved by the list view are accessible to + and considered by this view.""" + self.client.login(username=self.user1.username, password=self.password) + export_url = reverse('export-job-list') + + # Without visiting the list view first, no filters are set. All the + # user's jobs are exported. + response = self.client.get(export_url) + csv_lines = copy.deepcopy(list(response.streaming_content)) + self.assertIn(f'jobslurmid,username,project_name,partition,' + f'jobstatus,submitdate,startdate,enddate', + csv_lines[0].decode('utf-8')) + job1_line = self._create_job_csv_line(self.job1) + job2_line = self._create_job_csv_line(self.job2) + self.assertIn(job1_line, csv_lines[1].decode('utf-8')) + self.assertIn(job2_line, csv_lines[2].decode('utf-8')) + self.assertEqual(len(csv_lines), 3) + + # The user sets some filters on the list view. + query_params = { + 'username': 'user1', + 'partition': 'test_partition1', + } + list_url = f'{reverse("slurm-job-list")}?{urlencode(query_params)}' + self.get_response(self.user1, list_url) + + # Only filtered jobs should be exported. + response = self.client.get(export_url) + csv_lines = copy.deepcopy(list(response.streaming_content)) + self.assertIn(f'jobslurmid,username,project_name,partition,' + f'jobstatus,submitdate,startdate,enddate', + csv_lines[0].decode('utf-8')) + job1_line = self._create_job_csv_line(self.job1) + self.assertIn(job1_line, csv_lines[1].decode('utf-8')) + self.assertEqual(len(csv_lines), 2) + def test_exported_csv_no_filtering(self): """Testing if exported CSV file has correct data""" @@ -431,7 +609,7 @@ def test_exported_csv_no_filtering(self): self.client.login(username=self.admin.username, password=self.password) session = self.client.session - session['job_search_form_data'] = {'status': '', + session['job_search_filters'] = {'status': '', 'jobslurmid': '', 'project_name': '', 'username': '', @@ -453,12 +631,16 @@ def test_exported_csv_no_filtering(self): f'jobstatus,submitdate,startdate,enddate', csv_lines[0].decode('utf-8')) - job1_line = self.create_job_csv_line(self.job1) - job2_line = self.create_job_csv_line(self.job2) + job1_line = self._create_job_csv_line(self.job1) + job2_line = self._create_job_csv_line(self.job2) + admin_job_line = self._create_job_csv_line(self.admin_job) + staff_job_line = self._create_job_csv_line(self.staff_job) self.assertIn(job1_line, csv_lines[1].decode('utf-8')) self.assertIn(job2_line, csv_lines[2].decode('utf-8')) - self.assertEqual(len(csv_lines), 3) + self.assertIn(admin_job_line, csv_lines[3].decode('utf-8')) + self.assertIn(staff_job_line, csv_lines[4].decode('utf-8')) + self.assertEqual(len(csv_lines), 6) def test_exported_csv_with_filtering(self): """Testing if exported CSV file has correct data""" @@ -467,7 +649,7 @@ def test_exported_csv_with_filtering(self): self.client.login(username=self.admin.username, password=self.password) session = self.client.session - session['job_search_form_data'] = {'status': '', + session['job_search_filters'] = {'status': '', 'jobslurmid': '', 'project_name': '', 'username': '', @@ -489,7 +671,9 @@ def test_exported_csv_with_filtering(self): f'jobstatus,submitdate,startdate,enddate,service_units', csv_lines[0].decode('utf-8')) - job1_line = self.create_job_csv_line(self.job1) + job1_line = self._create_job_csv_line(self.job1) + admin_job_line = self._create_job_csv_line(self.admin_job) self.assertIn(job1_line, csv_lines[1].decode('utf-8')) - self.assertEqual(len(csv_lines), 2) + self.assertIn(admin_job_line, csv_lines[2].decode('utf-8')) + self.assertEqual(len(csv_lines), 3) diff --git a/coldfront/core/statistics/utils_/__init__.py b/coldfront/core/statistics/utils_/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/coldfront/core/statistics/utils_/job_accessibility_manager.py b/coldfront/core/statistics/utils_/job_accessibility_manager.py new file mode 100644 index 000000000..a9415cc68 --- /dev/null +++ b/coldfront/core/statistics/utils_/job_accessibility_manager.py @@ -0,0 +1,61 @@ +from django.db.models import Q + +from coldfront.core.project.models import ProjectUser +from coldfront.core.statistics.models import Job + + +class JobAccessibilityManager(object): + """A class that defines how read access to job data is delegated to + users.""" + + def __init__(self, *args, **kwargs): + self._valid_project_user_role_names = ( + 'Manager', 'Principal Investigator') + self._valid_project_user_status_names = ('Active', 'Pending - Remove') + + def can_user_access_job(self, user, job): + """Return whether the given User can access the given Job.""" + if self._user_has_global_access(user): + return True + + is_user_project_pi_or_manager = ProjectUser.objects.filter( + project=job.accountid, + user=user, + role__name__in=self._valid_project_user_role_names, + status__name__in=self._valid_project_user_status_names).exists() + if is_user_project_pi_or_manager: + return True + + if user == job.userid: + return True + + return False + + def get_jobs_accessible_to_user(self, user, include_global=False): + """Return a queryset of Jobs accessible to the given User. + Optionally include all Jobs across all users, which will only be + returned if the User has global access.""" + if self._user_has_global_access(user) and include_global: + return Job.objects.all() + + submitted_by_user_q = Q(userid=user) + + managed_project_pks = list( + ProjectUser.objects.filter( + user=user, + role__name__in=self._valid_project_user_role_names, + status__name__in=self._valid_project_user_status_names, + ).values_list('project', flat=True)) + submitted_under_managed_project_q = Q(accountid__in=managed_project_pks) + + return Job.objects.filter( + submitted_by_user_q | submitted_under_managed_project_q) + + @staticmethod + def _user_has_global_access(user): + """Return whether the given User has global access to all Jobs.""" + if user.is_superuser: + return True + if user.has_perm('statistics.view_job'): + return True + return False diff --git a/coldfront/core/statistics/utils_/job_query_filtering.py b/coldfront/core/statistics/utils_/job_query_filtering.py index 36298cf00..ff6eece85 100644 --- a/coldfront/core/statistics/utils_/job_query_filtering.py +++ b/coldfront/core/statistics/utils_/job_query_filtering.py @@ -1,3 +1,7 @@ +import copy + +from datetime import datetime + from coldfront.core.utils.common import display_time_zone_date_to_utc_datetime @@ -70,3 +74,44 @@ def job_query_filtering(job_list, data): job_list = job_list.filter(enddate__gt=end_date) return job_list + + +class JobSearchFilterSessionStorage(object): + """A class that stores job search filters in the user's session for + retrieval.""" + + def __init__(self, request): + self._request = request + self._session_key = 'job_search_filters' + self._date_keys = ('submitdate', 'startdate', 'enddate') + self._date_format = '%m/%d/%Y, %H:%M:%S' + + def get(self): + if self._session_key not in self._request.session: + return {} + + serialized_filters = self._request.session[self._session_key] + + return self._deserialize_filters(serialized_filters) + + def set(self, filters): + self._request.session[self._session_key] = self._serialize_filters( + filters) + + def _deserialize_filters(self, filters): + filters_copy = copy.deepcopy(filters) + for date_key in self._date_keys: + serialized_date = filters_copy.get(date_key, None) + if serialized_date is not None: + filters_copy[date_key] = datetime.strptime( + serialized_date, self._date_format) + return filters_copy + + def _serialize_filters(self, filters): + filters_copy = copy.deepcopy(filters) + for date_key in self._date_keys: + unserialized_date = filters_copy.get(date_key, None) + if unserialized_date is not None: + filters_copy[date_key] = datetime.strftime( + unserialized_date, self._date_format) + return filters_copy diff --git a/coldfront/core/statistics/views.py b/coldfront/core/statistics/views.py index 948fb0ee7..e451722f6 100644 --- a/coldfront/core/statistics/views.py +++ b/coldfront/core/statistics/views.py @@ -1,29 +1,27 @@ -import copy import csv import itertools -from datetime import datetime +import logging from django.contrib import messages from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator -from django.db.models import Q from django.http import StreamingHttpResponse +from django.shortcuts import redirect +from django.urls import reverse +from django.utils.html import strip_tags from django.views import View from django.views.generic import DetailView, ListView, TemplateView -from django.utils.html import strip_tags -from coldfront.core.project.models import Project, ProjectUser +from coldfront.core.project.models import ProjectUser from coldfront.core.statistics.models import Job from coldfront.core.statistics.forms import JobSearchForm - -import logging - -from coldfront.core.statistics.utils_.job_query_filtering import \ - job_query_filtering +from coldfront.core.statistics.utils_.job_accessibility_manager import JobAccessibilityManager +from coldfront.core.statistics.utils_.job_query_filtering import job_query_filtering +from coldfront.core.statistics.utils_.job_query_filtering import JobSearchFilterSessionStorage from coldfront.core.utils.common import Echo + logger = logging.getLogger(__name__) -DATE_FORMAT = '%m/%d/%Y, %H:%M:%S' class SlurmJobListView(LoginRequiredMixin, @@ -32,14 +30,6 @@ class SlurmJobListView(LoginRequiredMixin, paginate_by = 30 context_object_name = 'job_list' - def save_form_to_session(self, data): - new_data = copy.deepcopy(data) - for date in ['submitdate', 'startdate', 'enddate']: - if data.get(date, None): - new_data[date] = data.get(date, None).strftime(DATE_FORMAT) - self.request.session['job_search_form_data'] = new_data - self.request.session.modified = True - def get_queryset(self): order_by = self.request.GET.get('order_by') if order_by: @@ -60,22 +50,19 @@ def get_queryset(self): is_pi=is_pi) if job_search_form.is_valid(): - data = job_search_form.cleaned_data - self.save_form_to_session(data) + job_filters = job_search_form.cleaned_data - if data.get('show_all_jobs') and \ - (self.request.user.is_superuser or - self.request.user.has_perm('statistics.view_job')): - job_list = Job.objects.all() - else: - proj_set = Project.objects.filter( - projectuser__user__username=self.request.user, - projectuser__status__name__in=['Active', 'Pending - Remove'], - projectuser__role__name__in=['Principal Investigator', 'Manager']) - job_list = Job.objects.filter(Q(accountid__in=proj_set) | - Q(userid=self.request.user)) + session_storage = JobSearchFilterSessionStorage(self.request) + session_storage.set(job_filters) - job_list = job_query_filtering(job_list, data) + show_all_jobs = job_filters.get('show_all_jobs', False) + + job_accessibility_manager = JobAccessibilityManager() + accessible_jobs = \ + job_accessibility_manager.get_jobs_accessible_to_user( + self.request.user, include_global=show_all_jobs) + + job_list = job_query_filtering(accessible_jobs, job_filters) else: job_list = Job.objects.none() @@ -169,26 +156,17 @@ class SlurmJobDetailView(LoginRequiredMixin, def test_func(self): """ UserPassesTestMixin Tests""" - if self.request.user.is_superuser: - return True - - if self.request.user.has_perm('statistics.view_job'): - return True - job_obj = self.get_object() - if job_obj.accountid.projectuser_set.filter( - user=self.request.user, - status__name__in=['Active', 'Pending - Remove'], - role__name__in=['Principal Investigator', 'Manager']).exists(): - return True + job_accessibility_manager = JobAccessibilityManager() + is_job_accessible = job_accessibility_manager.can_user_access_job( + self.request.user, job_obj) - if job_obj.userid == self.request.user: - return True + if not is_job_accessible: + message = 'You do not have permission to view the previous page.' + messages.error(self.request, message) - messages.error( - self.request, 'You do not have permission to view the previous page.') - return False + return is_job_accessible def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) @@ -213,48 +191,61 @@ class ExportJobListView(LoginRequiredMixin, UserPassesTestMixin, View): + MAX_JOBS_EXPORTABLE = 100000 + def test_func(self): - """ UserPassesTestMixin Tests""" - if self.request.user.is_superuser: - return True + """Allow access to all users. + + Access to specific jobs is determined by the dispatch method. + """ + return True def dispatch(self, request, *args, **kwargs): - if self.test_func(): - data = copy.deepcopy(self.request.session.get('job_search_form_data')) - - job_list = Job.objects.all() - - if data: - for date in ['submitdate', 'startdate', 'enddate']: - if data.get(date, None): - data[date] = \ - datetime.strptime(data.get(date, None), DATE_FORMAT) - job_list = job_query_filtering(job_list, data) - - echo_buffer = Echo() - writer = csv.writer(echo_buffer) - header = ('jobslurmid', - 'username', - 'project_name', - 'partition', - 'jobstatus', - 'submitdate', - 'startdate', - 'enddate', - 'service_units') - job_list = job_list.values_list('jobslurmid', - 'userid__username', - 'accountid__name', - 'partition', - 'jobstatus', - 'submitdate', - 'startdate', - 'enddate', - 'amount') - rows = (writer.writerow(row) for row in itertools.chain([header], job_list.iterator())) - response = StreamingHttpResponse(rows, content_type="text/csv") - response["Content-Disposition"] = 'attachment; filename="job_list.csv"' - - return response + session_storage = JobSearchFilterSessionStorage(request) + job_filters = session_storage.get() + + show_all_jobs = job_filters.get('show_all_jobs', False) + + job_accessibility_manager = JobAccessibilityManager() + accessible_jobs = job_accessibility_manager.get_jobs_accessible_to_user( + self.request.user, include_global=show_all_jobs) + + if job_filters: + filtered_jobs = job_query_filtering(accessible_jobs, job_filters) else: - return super().dispatch(request, *args, **kwargs) + filtered_jobs = accessible_jobs + + num_jobs = filtered_jobs.count() + if num_jobs > self.MAX_JOBS_EXPORTABLE: + message = ( + f'Your search produced too many results to export. Please ' + f'limit your search to have fewer than ' + f'{self.MAX_JOBS_EXPORTABLE} results.') + messages.error(request, message) + return redirect(reverse('slurm-job-list')) + + return self._get_response(filtered_jobs) + + @staticmethod + def _get_response(jobs): + """Return a response that streams a CSV containing the requested + jobs to an attachment named 'job_list.csv' to be downloaded.""" + header = ( + 'jobslurmid', 'username', 'project_name', 'partition', 'jobstatus', + 'submitdate', 'startdate', 'enddate', 'service_units') + job_fields = ( + 'jobslurmid', 'userid__username', 'accountid__name', 'partition', + 'jobstatus', 'submitdate', 'startdate', 'enddate', 'amount') + + job_values_iterator = jobs.values_list(*job_fields).iterator() + + echo_buffer = Echo() + writer = csv.writer(echo_buffer) + rows = ( + writer.writerow(row) + for row in itertools.chain([header], job_values_iterator)) + + response = StreamingHttpResponse(rows, content_type='text/csv') + response['Content-Disposition'] = 'attachment; filename="job_list.csv"' + + return response