From 57710d7226351a772fd6c50d909ca6a142d3cebf Mon Sep 17 00:00:00 2001 From: Usman Khalid <2200617@gmail.com> Date: Fri, 20 Dec 2013 19:19:20 +0500 Subject: [PATCH] Added django commands to get statistics and repost submissions to grader for open ended problems. ORA-286 --- .../xmodule/combined_open_ended_module.py | 6 +- .../combined_open_ended_modulev1.py | 26 +++ .../xmodule/tests/test_combined_open_ended.py | 22 +++ .../xmodule/tests/test_util_open_ended.py | 147 ++++++++++++++ .../management/commands/dump_grades.py | 12 +- .../management/commands/openended_post.py | 106 +++++++++++ .../management/commands/openended_stats.py | 138 ++++++++++++++ .../instructor/management/tests/__init__.py | 3 + .../tests/test_openended_commands.py | 180 ++++++++++++++++++ .../instructor/offline_gradecalc.py | 10 +- lms/djangoapps/instructor/utils.py | 37 ++++ 11 files changed, 667 insertions(+), 20 deletions(-) create mode 100644 lms/djangoapps/instructor/management/commands/openended_post.py create mode 100644 lms/djangoapps/instructor/management/commands/openended_stats.py create mode 100644 lms/djangoapps/instructor/management/tests/__init__.py create mode 100644 lms/djangoapps/instructor/management/tests/test_openended_commands.py create mode 100644 lms/djangoapps/instructor/utils.py diff --git a/common/lib/xmodule/xmodule/combined_open_ended_module.py b/common/lib/xmodule/xmodule/combined_open_ended_module.py index 3211bef9d545..a8d4afaa11f0 100644 --- a/common/lib/xmodule/xmodule/combined_open_ended_module.py +++ b/common/lib/xmodule/xmodule/combined_open_ended_module.py @@ -4,7 +4,7 @@ from pkg_resources import resource_string from xmodule.raw_module import RawDescriptor -from .x_module import XModule +from .x_module import XModule, module_attr from xblock.fields import Integer, Scope, String, List, Float, Boolean from xmodule.open_ended_grading_classes.combined_open_ended_modulev1 import CombinedOpenEndedV1Module, CombinedOpenEndedV1Descriptor from collections import namedtuple @@ -510,3 +510,7 @@ def non_editable_metadata_fields(self): non_editable_fields.extend([CombinedOpenEndedDescriptor.due, CombinedOpenEndedDescriptor.graceperiod, CombinedOpenEndedDescriptor.markdown, CombinedOpenEndedDescriptor.version, CombinedOpenEndedDescriptor.track_changes]) return non_editable_fields + + # Proxy to CombinedOpenEndedModule so that external callers don't have to know if they're working + # with a module or a descriptor + child_module = module_attr('child_module') diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py index f8fa0ffb4bcf..d23edebf0f3b 100644 --- a/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py @@ -363,6 +363,32 @@ def fix_invalid_state(self): last_completed_child = next((i for i, child in reversed(list(enumerate(children))) if child['child_state'] == self.DONE), 0) self.current_task_number = min(last_completed_child + 1, len(best_task_states) - 1) + def create_task(self, task_state, task_xml): + """Create task object for given task state and task xml.""" + + tag_name = self.get_tag_name(task_xml) + children = self.child_modules() + task_descriptor = children['descriptors'][tag_name](self.system) + task_parsed_xml = task_descriptor.definition_from_xml(etree.fromstring(task_xml), self.system) + task = children['modules'][tag_name]( + self.system, + self.location, + task_parsed_xml, + task_descriptor, + self.static_data, + instance_state=task_state, + ) + return task + + def get_task_number(self, task_number): + """Return task object at task_index.""" + + task_states_count = len(self.task_states) + if task_states_count > 0 and task_number < task_states_count: + task_state = self.task_states[task_number] + task_xml = self.task_xml[task_number] + return self.create_task(task_state, task_xml) + return None def reset_task_state(self, message=""): """ diff --git a/common/lib/xmodule/xmodule/tests/test_combined_open_ended.py b/common/lib/xmodule/xmodule/tests/test_combined_open_ended.py index 6498c82c4ab3..1d6493bc79ed 100644 --- a/common/lib/xmodule/xmodule/tests/test_combined_open_ended.py +++ b/common/lib/xmodule/xmodule/tests/test_combined_open_ended.py @@ -18,6 +18,7 @@ from xmodule.open_ended_grading_classes.openendedchild import OpenEndedChild from xmodule.open_ended_grading_classes.open_ended_module import OpenEndedModule +from xmodule.open_ended_grading_classes.self_assessment_module import SelfAssessmentModule from xmodule.open_ended_grading_classes.combined_open_ended_modulev1 import CombinedOpenEndedV1Module from xmodule.open_ended_grading_classes.grading_service_module import GradingServiceError from xmodule.combined_open_ended_module import CombinedOpenEndedModule @@ -500,6 +501,27 @@ def test_get_last_response(self): self.assertEqual(response_dict['max_score'], self.max_score) self.assertEqual(response_dict['state'], CombinedOpenEndedV1Module.INITIAL) + def test_create_task(self): + combinedoe = self.generate_oe_module(TEST_STATE_AI, 1, [self.task_xml1, self.task_xml2]) + + first_task = combinedoe.create_task(combinedoe.task_states[0], combinedoe.task_xml[0]) + self.assertIsInstance(first_task, SelfAssessmentModule) + + second_task = combinedoe.create_task(combinedoe.task_states[1], combinedoe.task_xml[1]) + self.assertIsInstance(second_task, OpenEndedModule) + + def test_get_task_number(self): + combinedoe = self.generate_oe_module(TEST_STATE_AI, 1, [self.task_xml1, self.task_xml2]) + + first_task = combinedoe.get_task_number(0) + self.assertIsInstance(first_task, SelfAssessmentModule) + + second_task = combinedoe.get_task_number(1) + self.assertIsInstance(second_task, OpenEndedModule) + + third_task = combinedoe.get_task_number(2) + self.assertIsNone(third_task) + def test_update_task_states(self): """ See if we can update the task states properly diff --git a/common/lib/xmodule/xmodule/tests/test_util_open_ended.py b/common/lib/xmodule/xmodule/tests/test_util_open_ended.py index dac7a2b697e4..3f6b07fa0f9a 100644 --- a/common/lib/xmodule/xmodule/tests/test_util_open_ended.py +++ b/common/lib/xmodule/xmodule/tests/test_util_open_ended.py @@ -774,6 +774,153 @@ def serialize_open_ended_instance_state(json_str): } """) + +# State Initial + +STATE_INITIAL = serialize_open_ended_instance_state(""" +{ + "ready_to_reset": false, + "skip_spelling_checks": false, + "current_task_number": 0, + "old_task_states": [], + "weight": 1, + "task_states": [ + { + "child_attempts" : 1, + "child_created" : false, + "child_history" : [], + "child_state" : "done", + "max_score" : 3, + "version" : 1 + }, + { + "child_created": false, + "child_attempts": 0, + "stored_answer": "A stored answer.", + "version": 1, + "child_history": [], + "max_score": 3, + "child_state": "initial" + } + ], + "graded": true, + "student_attempts": 0, + "required_peer_grading": 3, + "state": "initial", + "accept_file_upload": false, + "min_to_calibrate": 3, + "max_to_calibrate": 6, + "display_name": "Open Response Assessment", + "peer_grader_count": 3, + "max_attempts": 1 +}""") + +STATE_ACCESSING = serialize_open_ended_instance_state(""" +{ + "ready_to_reset": false, + "skip_spelling_checks": false, + "current_task_number": 0, + "old_task_states": [], + "weight": 1, + "task_states": [ + { + "child_attempts" : 1, + "child_created" : false, + "child_history": [ + { + "answer": "Here is an answer." + } + ], + "child_state" : "done", + "max_score" : 3, + "version" : 1 + }, + { + "child_created": false, + "child_attempts": 0, + "stored_answer": null, + "version": 1, + "child_history": [ + { + "answer": "Here is an answer." + } + ], + "max_score": 3, + "child_state": "assessing" + } + ], + "graded": true, + "student_attempts": 0, + "required_peer_grading": 3, + "state": "assessing", + "accept_file_upload": false, + "min_to_calibrate": 3, + "max_to_calibrate": 6, + "display_name": "Open Response Assessment", + "peer_grader_count": 3, + "max_attempts": 1 +}""") + +STATE_POST_ASSESSMENT = serialize_open_ended_instance_state(""" +{ + "ready_to_reset": false, + "skip_spelling_checks": false, + "current_task_number": 0, + "old_task_states": [], + "weight": 1, + "task_states": [ + { + "child_attempts" : 1, + "child_created" : false, + "child_history": [ + { + "answer": "Here is an answer." + } + ], + "child_state" : "done", + "max_score" : 3, + "version" : 1 + }, + { + "child_created": false, + "child_attempts": 0, + "stored_answer": null, + "version": 1, + "child_history": [ + { + "answer": "Here is an answer." + } + ], + "max_score": 3, + "post_assessment": { + "feedback" : { + "grammar" : "Grammar: Ok.", + "markup-text" : "valid essay", + "spelling" : "Spelling: Ok." + }, + "grader_id" : 3237, + "grader_type" : "ML", + "rubric_scores_complete" : true, + "rubric_xml" : "Response Quality3", + "score" : 2, + "submission_id" : 3099, + "success" : true + }, + "child_state": "post_assessment" + } + ], + "graded": true, + "student_attempts": 0, + "required_peer_grading": 3, + "state": "done", + "accept_file_upload": false, + "min_to_calibrate": 3, + "max_to_calibrate": 6, + "display_name": "Open Response Assessment", + "peer_grader_count": 3, + "max_attempts": 1 +}""") + # Task state with self assessment only. TEST_STATE_SA = ["{\"child_created\": false, \"child_attempts\": 1, \"version\": 1, \"child_history\": [{\"answer\": \"Censorship in the Libraries\\r
'All of us can think of a book that we hope none of our children or any other children have taken off the shelf. But if I have the right to remove that book from the shelf -- that work I abhor -- then you also have exactly the same right and so does everyone else. And then we have no books left on the shelf for any of us.' --Katherine Paterson, Author\\r

Write a persuasive essay to a newspaper reflecting your views on censorship in libraries. Do you believe that certain materials, such as books, music, movies, magazines, etc., should be removed from the shelves if they are found offensive? Support your position with convincing arguments from your own experience, observations, and/or reading.\", \"post_assessment\": \"[3, 3, 2, 2, 2]\", \"score\": 12}], \"max_score\": 12, \"child_state\": \"done\"}", "{\"child_created\": false, \"child_attempts\": 0, \"version\": 1, \"child_history\": [{\"answer\": \"Censorship in the Libraries\\r
'All of us can think of a book that we hope none of our children or any other children have taken off the shelf. But if I have the right to remove that book from the shelf -- that work I abhor -- then you also have exactly the same right and so does everyone else. And then we have no books left on the shelf for any of us.' --Katherine Paterson, Author\\r

Write a persuasive essay to a newspaper reflecting your views on censorship in libraries. Do you believe that certain materials, such as books, music, movies, magazines, etc., should be removed from the shelves if they are found offensive? Support your position with convincing arguments from your own experience, observations, and/or reading.\", \"post_assessment\": \"{\\\"submission_id\\\": 1461, \\\"score\\\": 12, \\\"feedback\\\": \\\"{\\\\\\\"feedback\\\\\\\": \\\\\\\"\\\\\\\"}\\\", \\\"success\\\": true, \\\"grader_id\\\": 5414, \\\"grader_type\\\": \\\"IN\\\", \\\"rubric_scores_complete\\\": true, \\\"rubric_xml\\\": \\\"\\\\nIdeas\\\\n3\\\\nContent\\\\n3\\\\nOrganization\\\\n2\\\\nStyle\\\\n2\\\\nVoice\\\\n2\\\"}\", \"score\": 12}], \"max_score\": 12, \"child_state\": \"post_assessment\"}"] diff --git a/lms/djangoapps/instructor/management/commands/dump_grades.py b/lms/djangoapps/instructor/management/commands/dump_grades.py index ff4edd02e0c3..9da068cedf09 100644 --- a/lms/djangoapps/instructor/management/commands/dump_grades.py +++ b/lms/djangoapps/instructor/management/commands/dump_grades.py @@ -10,6 +10,7 @@ from xmodule.modulestore.django import modulestore from django.core.management.base import BaseCommand +from instructor.utils import DummyRequest class Command(BaseCommand): @@ -37,7 +38,7 @@ def handle(self, *args, **options): if len(args) > 2: get_raw_scores = args[2].lower() == 'raw' - request = self.DummyRequest() + request = DummyRequest() try: course = get_course_by_id(course_id) except Exception: @@ -63,12 +64,3 @@ def handle(self, *args, **options): fp.close() print "Done: %d records dumped" % len(datatable['data']) - - class DummyRequest(object): - META = {} - def __init__(self): - return - def get_host(self): - return 'edx.mit.edu' - def is_secure(self): - return False diff --git a/lms/djangoapps/instructor/management/commands/openended_post.py b/lms/djangoapps/instructor/management/commands/openended_post.py new file mode 100644 index 000000000000..b1f22c7bc2d6 --- /dev/null +++ b/lms/djangoapps/instructor/management/commands/openended_post.py @@ -0,0 +1,106 @@ +""" +Command to manually re-post open ended submissions to the grader. +""" +from django.contrib.auth.models import User +from django.core.management.base import BaseCommand +from optparse import make_option + +from xmodule.modulestore.django import modulestore +from xmodule.open_ended_grading_classes.openendedchild import OpenEndedChild +from xmodule.open_ended_grading_classes.open_ended_module import OpenEndedModule + +from courseware.courses import get_course + +from instructor.utils import get_module_for_student + + +class Command(BaseCommand): + """ + Command to manually re-post open ended submissions to the grader. + """ + + help = ("Usage: openended_post --dry-run --task-number=\n" + "The text file should contain a User.id in each line.") + + option_list = BaseCommand.option_list + ( + make_option('-n', '--dry-run', + action='store_true', dest='dry_run', default=False, + help="Do everything except send the submission to the grader. "), + make_option('--task-number', + type='int', default=0, + help="Task number that needs to be submitted."), + ) + + def handle(self, *args, **options): + + dry_run = options['dry_run'] + task_number = options['task_number'] + + if len(args) == 3: + course_id = args[0] + location = args[1] + students_ids = [line.strip() for line in open(args[2])] + else: + print self.help + return + + try: + course = get_course(course_id) + except ValueError as err: + print err + return + + descriptor = modulestore().get_instance(course.id, location, depth=0) + if descriptor is None: + print "Location not found in course" + return + + if dry_run: + print "Doing a dry run." + + students = User.objects.filter(id__in=students_ids).order_by('username') + print "Number of students: {0}".format(students.count()) + + for student in students: + post_submission_for_student(student, course, location, task_number, dry_run=dry_run) + + +def post_submission_for_student(student, course, location, task_number, dry_run=True): + """If the student's task child_state is ASSESSING post submission to grader.""" + + print "{0}:{1}".format(student.id, student.username) + try: + module = get_module_for_student(student, course, location) + if module is None: + print " WARNING: No state found." + return False + + latest_task = module.child_module.get_task_number(task_number) + if latest_task is None: + print " WARNING: No task state found." + return False + + if not isinstance(latest_task, OpenEndedModule): + print " ERROR: Not an OpenEndedModule task." + return False + + latest_task_state = latest_task.child_state + + if latest_task_state == OpenEndedChild.INITIAL: + print " WARNING: No submission." + elif latest_task_state == OpenEndedChild.POST_ASSESSMENT or latest_task_state == OpenEndedChild.DONE: + print " WARNING: Submission already graded." + elif latest_task_state == OpenEndedChild.ASSESSING: + latest_answer = latest_task.latest_answer() + if dry_run: + print " Skipped sending submission to grader: {0!r}".format(latest_answer[:100].encode('utf-8')) + else: + latest_task.send_to_grader(latest_answer, latest_task.system) + print " Sent submission to grader: {0!r}".format(latest_answer[:100].encode('utf-8')) + return True + else: + print "WARNING: Invalid task_state: {0}".format(latest_task_state) + except Exception as err: # pylint: disable=broad-except + print err + + return False diff --git a/lms/djangoapps/instructor/management/commands/openended_stats.py b/lms/djangoapps/instructor/management/commands/openended_stats.py new file mode 100644 index 000000000000..5fd619b484a9 --- /dev/null +++ b/lms/djangoapps/instructor/management/commands/openended_stats.py @@ -0,0 +1,138 @@ +""" +Command to get statistics about open ended problems. +""" +import csv +import time + +from django.core.management.base import BaseCommand +from optparse import make_option + +from xmodule.modulestore import Location +from xmodule.modulestore.django import modulestore +from xmodule.open_ended_grading_classes.openendedchild import OpenEndedChild + +from courseware.courses import get_course +from courseware.models import StudentModule +from student.models import anonymous_id_for_user, CourseEnrollment + +from instructor.utils import get_module_for_student + + +class Command(BaseCommand): + """ + Command to get statistics about open ended problems. + """ + + help = "Usage: openended_stats --task-number=\n" + + option_list = BaseCommand.option_list + ( + make_option('--task-number', + type='int', default=0, + help="Task number to get statistics about."), + ) + + def handle(self, *args, **options): + """Handler for command.""" + + task_number = options['task_number'] + + if len(args) == 2: + course_id = args[0] + location = args[1] + else: + print self.help + return + + try: + course = get_course(course_id) + except ValueError as err: + print err + return + + descriptor = modulestore().get_instance(course.id, location, depth=0) + if descriptor is None: + print "Location {0} not found in course".format(location) + return + + try: + enrolled_students = CourseEnrollment.users_enrolled_in(course_id) + print "Total students enrolled in {0}: {1}".format(course_id, enrolled_students.count()) + + calculate_task_statistics(enrolled_students, course, location, task_number) + + except KeyboardInterrupt: + print "\nOperation Cancelled" + + +def calculate_task_statistics(students, course, location, task_number, write_to_file=True): + """Print stats of students.""" + + stats = { + OpenEndedChild.INITIAL: 0, + OpenEndedChild.ASSESSING: 0, + OpenEndedChild.POST_ASSESSMENT: 0, + OpenEndedChild.DONE: 0 + } + + students_with_saved_answers = [] + students_with_ungraded_submissions = [] # pylint: disable=invalid-name + students_with_graded_submissions = [] # pylint: disable=invalid-name + students_with_no_state = [] + + student_modules = StudentModule.objects.filter(module_state_key=location, student__in=students).order_by('student') + print "Total student modules: {0}".format(student_modules.count()) + + for index, student_module in enumerate(student_modules): + if index % 100 == 0: + print "--- {0} students processed ---".format(index) + + student = student_module.student + print "{0}:{1}".format(student.id, student.username) + + module = get_module_for_student(student, course, location) + if module is None: + print " WARNING: No state found" + students_with_no_state.append(student) + continue + + latest_task = module.child_module.get_task_number(task_number) + if latest_task is None: + print " No task state found" + students_with_no_state.append(student) + continue + + task_state = latest_task.child_state + stats[task_state] += 1 + print " State: {0}".format(task_state) + + if task_state == OpenEndedChild.INITIAL: + if latest_task.stored_answer is not None: + students_with_saved_answers.append(student) + elif task_state == OpenEndedChild.ASSESSING: + students_with_ungraded_submissions.append(student) + elif task_state == OpenEndedChild.POST_ASSESSMENT or task_state == OpenEndedChild.DONE: + students_with_graded_submissions.append(student) + + location = Location(location) + + print "----------------------------------" + print "Time: {0}".format(time.strftime("%Y %b %d %H:%M:%S +0000", time.gmtime())) + print "Course: {0}".format(course.id) + print "Location: {0}".format(location) + print "No state: {0}".format(len(students_with_no_state)) + print "Initial State: {0}".format(stats[OpenEndedChild.INITIAL] - len(students_with_saved_answers)) + print "Saved answers: {0}".format(len(students_with_saved_answers)) + print "Submitted answers: {0}".format(stats[OpenEndedChild.ASSESSING]) + print "Received grades: {0}".format(stats[OpenEndedChild.POST_ASSESSMENT] + stats[OpenEndedChild.DONE]) + print "----------------------------------" + + if write_to_file: + filename = "stats.{0}.{1}".format(location.course, location.name) + time_stamp = time.strftime("%Y%m%d-%H%M%S") + with open('{0}.{1}.csv'.format(filename, time_stamp), 'wb') as csv_file: + writer = csv.writer(csv_file, delimiter=' ', quoting=csv.QUOTE_MINIMAL) + for student in students_with_ungraded_submissions: + writer.writerow(("ungraded", student.id, anonymous_id_for_user(student, ''), student.username)) + for student in students_with_graded_submissions: + writer.writerow(("graded", student.id, anonymous_id_for_user(student, ''), student.username)) + return stats diff --git a/lms/djangoapps/instructor/management/tests/__init__.py b/lms/djangoapps/instructor/management/tests/__init__.py new file mode 100644 index 000000000000..8ab2057223da --- /dev/null +++ b/lms/djangoapps/instructor/management/tests/__init__.py @@ -0,0 +1,3 @@ +""" +Tests for the instructor app management commands. +""" diff --git a/lms/djangoapps/instructor/management/tests/test_openended_commands.py b/lms/djangoapps/instructor/management/tests/test_openended_commands.py new file mode 100644 index 000000000000..14a4383f909f --- /dev/null +++ b/lms/djangoapps/instructor/management/tests/test_openended_commands.py @@ -0,0 +1,180 @@ +"""Test the openended_post management command.""" + +from datetime import datetime +import json +from mock import patch, ANY +from pytz import UTC + +from django.test.utils import override_settings + +import capa.xqueue_interface as xqueue_interface +from xmodule.modulestore import Location +from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase +from xmodule.open_ended_grading_classes.openendedchild import OpenEndedChild +from xmodule.tests.test_util_open_ended import ( + STATE_INITIAL, STATE_ACCESSING, STATE_POST_ASSESSMENT +) + +from courseware.courses import get_course_with_access +from courseware.tests.factories import StudentModuleFactory, UserFactory +from courseware.tests.modulestore_config import TEST_DATA_MIXED_MODULESTORE +from student.models import anonymous_id_for_user + +from instructor.management.commands.openended_post import post_submission_for_student +from instructor.management.commands.openended_stats import calculate_task_statistics +from instructor.utils import get_module_for_student + + +@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE) +class OpenEndedPostTest(ModuleStoreTestCase): + """Test the openended_post management command.""" + + def setUp(self): + self.course_id = "edX/open_ended/2012_Fall" + self.problem_location = Location(["i4x", "edX", "open_ended", "combinedopenended", "SampleQuestion"]) + self.self_assessment_task_number = 0 + self.open_ended_task_number = 1 + + self.student_on_initial = UserFactory() + self.student_on_accessing = UserFactory() + self.student_on_post_assessment = UserFactory() + + StudentModuleFactory.create( + course_id=self.course_id, + module_state_key=self.problem_location, + student=self.student_on_initial, + grade=0, + max_grade=1, + state=STATE_INITIAL + ) + + StudentModuleFactory.create( + course_id=self.course_id, + module_state_key=self.problem_location, + student=self.student_on_accessing, + grade=0, + max_grade=1, + state=STATE_ACCESSING + ) + + StudentModuleFactory.create( + course_id=self.course_id, + module_state_key=self.problem_location, + student=self.student_on_post_assessment, + grade=0, + max_grade=1, + state=STATE_POST_ASSESSMENT + ) + + def test_post_submission_for_student_on_initial(self): + course = get_course_with_access(self.student_on_initial, self.course_id, 'load') + + dry_run_result = post_submission_for_student(self.student_on_initial, course, self.problem_location, self.open_ended_task_number, dry_run=True) + self.assertFalse(dry_run_result) + + result = post_submission_for_student(self.student_on_initial, course, self.problem_location, self.open_ended_task_number, dry_run=False) + self.assertFalse(result) + + def test_post_submission_for_student_on_accessing(self): + course = get_course_with_access(self.student_on_accessing, self.course_id, 'load') + + dry_run_result = post_submission_for_student(self.student_on_accessing, course, self.problem_location, self.open_ended_task_number, dry_run=True) + self.assertFalse(dry_run_result) + + with patch('capa.xqueue_interface.XQueueInterface.send_to_queue') as mock_send_to_queue: + mock_send_to_queue.return_value = (0, "Successfully queued") + + module = get_module_for_student(self.student_on_accessing, course, self.problem_location) + task = module.child_module.get_task_number(self.open_ended_task_number) + + qtime = datetime.strftime(datetime.now(UTC), xqueue_interface.dateformat) + student_info = {'anonymous_student_id': anonymous_id_for_user(self.student_on_accessing, ''), + 'submission_time': qtime} + + contents = task.payload.copy() + contents.update({ + 'max_score': 2, + 'student_info': json.dumps(student_info), + 'student_response': "Here is an answer.", + }) + + result = post_submission_for_student(self.student_on_accessing, course, self.problem_location, self.open_ended_task_number, dry_run=False) + self.assertTrue(result) + mock_send_to_queue.assert_called_with(body=json.dumps(contents), header=ANY) + + def test_post_submission_for_student_on_post_assessment(self): + course = get_course_with_access(self.student_on_post_assessment, self.course_id, 'load') + + dry_run_result = post_submission_for_student(self.student_on_post_assessment, course, self.problem_location, self.open_ended_task_number, dry_run=True) + self.assertFalse(dry_run_result) + + result = post_submission_for_student(self.student_on_post_assessment, course, self.problem_location, self.open_ended_task_number, dry_run=False) + self.assertFalse(result) + + def test_post_submission_for_student_invalid_task(self): + course = get_course_with_access(self.student_on_accessing, self.course_id, 'load') + + result = post_submission_for_student(self.student_on_accessing, course, self.problem_location, self.self_assessment_task_number, dry_run=False) + self.assertFalse(result) + + out_of_bounds_task_number = 3 + result = post_submission_for_student(self.student_on_accessing, course, self.problem_location, out_of_bounds_task_number, dry_run=False) + self.assertFalse(result) + + +@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE) +class OpenEndedStatsTest(ModuleStoreTestCase): + """Test the openended_stats management command.""" + + def setUp(self): + self.course_id = "edX/open_ended/2012_Fall" + self.problem_location = Location(["i4x", "edX", "open_ended", "combinedopenended", "SampleQuestion"]) + self.task_number = 1 + self.invalid_task_number = 3 + + self.student_on_initial = UserFactory() + self.student_on_accessing = UserFactory() + self.student_on_post_assessment = UserFactory() + + StudentModuleFactory.create( + course_id=self.course_id, + module_state_key=self.problem_location, + student=self.student_on_initial, + grade=0, + max_grade=1, + state=STATE_INITIAL + ) + + StudentModuleFactory.create( + course_id=self.course_id, + module_state_key=self.problem_location, + student=self.student_on_accessing, + grade=0, + max_grade=1, + state=STATE_ACCESSING + ) + + StudentModuleFactory.create( + course_id=self.course_id, + module_state_key=self.problem_location, + student=self.student_on_post_assessment, + grade=0, + max_grade=1, + state=STATE_POST_ASSESSMENT + ) + + self.students = [self.student_on_initial, self.student_on_accessing, self.student_on_post_assessment] + + def test_calculate_task_statistics(self): + course = get_course_with_access(self.student_on_accessing, self.course_id, 'load') + stats = calculate_task_statistics(self.students, course, self.problem_location, self.task_number, write_to_file=False) + self.assertEqual(stats[OpenEndedChild.INITIAL], 1) + self.assertEqual(stats[OpenEndedChild.ASSESSING], 1) + self.assertEqual(stats[OpenEndedChild.POST_ASSESSMENT], 1) + self.assertEqual(stats[OpenEndedChild.DONE], 0) + + stats = calculate_task_statistics(self.students, course, self.problem_location, self.invalid_task_number, write_to_file=False) + self.assertEqual(stats[OpenEndedChild.INITIAL], 0) + self.assertEqual(stats[OpenEndedChild.ASSESSING], 0) + self.assertEqual(stats[OpenEndedChild.POST_ASSESSMENT], 0) + self.assertEqual(stats[OpenEndedChild.DONE], 0) diff --git a/lms/djangoapps/instructor/offline_gradecalc.py b/lms/djangoapps/instructor/offline_gradecalc.py index da5fb3f7d46c..5b9589900577 100644 --- a/lms/djangoapps/instructor/offline_gradecalc.py +++ b/lms/djangoapps/instructor/offline_gradecalc.py @@ -13,6 +13,7 @@ from courseware.courses import get_course_by_id from django.contrib.auth.models import User +from instructor.utils import DummyRequest class MyEncoder(JSONEncoder): @@ -38,15 +39,6 @@ def offline_grade_calculation(course_id): enc = MyEncoder() - class DummyRequest(object): - META = {} - def __init__(self): - return - def get_host(self): - return 'edx.mit.edu' - def is_secure(self): - return False - print "%d enrolled students" % len(enrolled_students) course = get_course_by_id(course_id) diff --git a/lms/djangoapps/instructor/utils.py b/lms/djangoapps/instructor/utils.py new file mode 100644 index 000000000000..72eea1895132 --- /dev/null +++ b/lms/djangoapps/instructor/utils.py @@ -0,0 +1,37 @@ +""" +Helpers for instructor app. +""" + +from xmodule.modulestore.django import modulestore + +from courseware.model_data import FieldDataCache +from courseware.module_render import get_module + + +class DummyRequest(object): + """Dummy request""" + + META = {} + + def __init__(self): + self.session = {} + self.user = None + return + + def get_host(self): + """Return a default host.""" + return 'edx.mit.edu' + + def is_secure(self): + """Always insecure.""" + return False + + +def get_module_for_student(student, course, location): + """Return the module for the (student, location) using a DummyRequest.""" + request = DummyRequest() + request.user = student + + descriptor = modulestore().get_instance(course.id, location, depth=0) + field_data_cache = FieldDataCache([descriptor], course.id, student) + return get_module(student, request, location, field_data_cache, course.id)