From 649972a9495d7f408369969bdaf3cfe4b3646d8c Mon Sep 17 00:00:00 2001 From: Marcos Prieto Date: Tue, 24 Sep 2024 13:08:25 +0200 Subject: [PATCH] Expose indivual grade status --- lms/models/grading_sync.py | 11 +++++ lms/services/auto_grading.py | 9 +++- lms/views/dashboard/api/grading.py | 20 +++++++-- .../lms/views/dashboard/api/grading_test.py | 41 +++++++++++++++++-- 4 files changed, 74 insertions(+), 7 deletions(-) diff --git a/lms/models/grading_sync.py b/lms/models/grading_sync.py index a7eccb7110..047773c69b 100644 --- a/lms/models/grading_sync.py +++ b/lms/models/grading_sync.py @@ -73,3 +73,14 @@ class GradingSyncGrade(CreatedUpdatedMixin, Base): success: Mapped[bool | None] = mapped_column() """Whether or not this grade has been synced to the LMS""" + + @property + def status(self) -> AutoGradingSyncStatus: + if self.success is None: + return AutoGradingSyncStatus.IN_PROGRESS + + return ( + AutoGradingSyncStatus.FINISHED + if self.success + else AutoGradingSyncStatus.FAILED + ) diff --git a/lms/services/auto_grading.py b/lms/services/auto_grading.py index 40ae6222d7..1c6395692b 100644 --- a/lms/services/auto_grading.py +++ b/lms/services/auto_grading.py @@ -1,4 +1,5 @@ from sqlalchemy import select +from sqlalchemy.orm import subqueryload from lms.js_config_types import AnnotationMetrics from lms.models import ( @@ -49,7 +50,13 @@ def create_grade_sync( return grading_sync def _search_query(self, assignment, statuses: list[str] | None = None): - query = select(GradingSync).where(GradingSync.assignment_id == assignment.id) + query = ( + select(GradingSync) + .where(GradingSync.assignment_id == assignment.id) + .options( + subqueryload(GradingSync.grades).subqueryload(GradingSyncGrade.lms_user) + ) + ) if statuses: query = query.where(GradingSync.status.in_(statuses)) diff --git a/lms/views/dashboard/api/grading.py b/lms/views/dashboard/api/grading.py index c7f5642b58..f0a1e5330f 100644 --- a/lms/views/dashboard/api/grading.py +++ b/lms/views/dashboard/api/grading.py @@ -4,7 +4,7 @@ from pyramid.view import view_config from sqlalchemy import select -from lms.models import LMSUser +from lms.models import GradingSync, LMSUser from lms.security import Permissions from lms.services import AutoGradingService from lms.services.dashboard import DashboardService @@ -75,7 +75,7 @@ def create_grading_sync(self): lms_user_grades, ) self.request.add_finished_callback(self._start_sync_grades) - return {"status": grading_sync.status} + return self._serialize_grading_sync(grading_sync) @view_config( route_name="api.dashboard.assignments.grading.sync", @@ -86,7 +86,7 @@ def create_grading_sync(self): def get_grading_sync(self): assignment = self.dashboard_service.get_request_assignment(self.request) if grading_sync := self.auto_grading_service.get_last_sync(assignment): - return {"status": grading_sync.status} + return self._serialize_grading_sync(grading_sync) self.request.response.status_int = 404 return {"message": f"No existing grading sync for assignment:{assignment.id}"} @@ -97,3 +97,17 @@ def _start_sync_grades(_request) -> None: We use this helper method instead of a lambda to make the test asserts easier. """ # noqa: D205 sync_grades.delay() + + @staticmethod + def _serialize_grading_sync(grading_sync: GradingSync) -> dict: + return { + "status": grading_sync.status, + "grades": [ + { + "h_userid": grade.lms_user.h_userid, + "grade": grade.grade, + "status": grade.status, + } + for grade in grading_sync.grades + ], + } diff --git a/tests/unit/lms/views/dashboard/api/grading_test.py b/tests/unit/lms/views/dashboard/api/grading_test.py index a84f75e7b3..0c525334cb 100644 --- a/tests/unit/lms/views/dashboard/api/grading_test.py +++ b/tests/unit/lms/views/dashboard/api/grading_test.py @@ -1,6 +1,7 @@ from unittest.mock import Mock import pytest +from h_matchers import Any from lms.views.dashboard.api.grading import DashboardGradingViews from tests import factories @@ -87,15 +88,22 @@ def test_create_grading_sync( {student_1: 0.5, student_2: 1}, ) assert response == { - "status": auto_grading_service.create_grade_sync.return_value.status + "status": auto_grading_service.create_grade_sync.return_value.status, + "grades": [], } pyramid_request.add_finished_callback.assert_called_once_with( views._start_sync_grades # noqa: SLF001 ) def test_get_grading_sync( - self, auto_grading_service, pyramid_request, views, dashboard_service + self, + auto_grading_service, + pyramid_request, + views, + dashboard_service, + grading_sync, ): + auto_grading_service.get_last_sync.return_value = grading_sync response = views.get_grading_sync() dashboard_service.get_request_assignment.assert_called_once_with( @@ -105,7 +113,14 @@ def test_get_grading_sync( dashboard_service.get_request_assignment.return_value ) assert response == { - "status": auto_grading_service.get_last_sync.return_value.status + "status": grading_sync.status, + "grades": Any.list.containing( + [ + {"grade": 1.0, "h_userid": "STUDENT_1", "status": "in_progress"}, + {"grade": 0.0, "h_userid": "STUDENT_2", "status": "finished"}, + {"grade": 0.5, "h_userid": "STUDENT_3", "status": "failed"}, + ] + ), } def test_get_grading_sync_not_found( @@ -136,6 +151,26 @@ def assignment(self, lti_v13_application_instance, db_session): def lms_user(self): return factories.LMSUser(lti_v13_user_id="LTI_V13_USER_ID") + @pytest.fixture + def grading_sync(self, assignment, db_session): + student_1 = factories.LMSUser(h_userid="STUDENT_1") + student_2 = factories.LMSUser(h_userid="STUDENT_2") + student_3 = factories.LMSUser(h_userid="STUDENT_3") + grading_sync = factories.GradingSync(assignment=assignment) + db_session.flush() + + factories.GradingSyncGrade( + lms_user=student_1, grade=1, grading_sync=grading_sync, success=None + ) + factories.GradingSyncGrade( + lms_user=student_2, grade=0, grading_sync=grading_sync, success=True + ) + factories.GradingSyncGrade( + lms_user=student_3, grade=0.5, grading_sync=grading_sync, success=False + ) + + return grading_sync + @pytest.fixture def pyramid_request(self, pyramid_request, lms_user): pyramid_request.parsed_params = {