Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,6 @@ LanguageTool-5.4
package-lock.json
learning_observer/learning_observer/static_data/google/
learning_observer/learning_observer/static_data/admins.yaml
.ipynb_checkpoints/
.ipynb_checkpoints/
.eggs/
.next/
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.1.0+2025.04.01T14.51.04.751Z.f8ffbdbc.berickson.course.list.homepage.improvements
0.1.0+2025.04.01T15.05.13.407Z.44725993.berickson.022025.gpt.dashboard.updates
2 changes: 1 addition & 1 deletion learning_observer/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.1.0+2025.04.01T14.51.04.751Z.f8ffbdbc.berickson.course.list.homepage.improvements
0.1.0+2025.03.27T20.51.04.053Z.57041e9f.berickson.022025.gpt.dashboard.updates
6 changes: 3 additions & 3 deletions learning_observer/learning_observer/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
cache_backend = None


def create_key_from_args(*args, **kwargs):
key_dict = {'args': args, 'kwargs': kwargs}
def create_key_from_args(func, *args, **kwargs):
key_dict = {'func': str(func), 'args': args, 'kwargs': kwargs}
key_str = json.dumps(key_dict, sort_keys=True)
return key_str

Expand Down Expand Up @@ -37,7 +37,7 @@ async def wrapper(*args, **kwargs):
return await func(*args, **kwargs)

# process item if the cache is present
key = create_key_from_args(args, kwargs)
key = create_key_from_args(func, args, kwargs)
if key in await cache_backend.keys():
return await cache_backend[key]
result = await func(*args, **kwargs)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,9 @@ async def call_dispatch(functions, function_name, args, kwargs):
...
learning_observer.communication_protocol.exception.DAGExecutionException: ('Function double did not execute properly during call.', 'call_dispatch', {'function_name': 'double', 'args': [None], 'kwargs': {}, 'error': 'Input cannot be None'}, ...)
"""
# TODO add in provenance to the call
# this probably requires switching to an async generator instead of regular return
provenance = {'function_name': function_name, 'args': args, 'kwargs': kwargs}
try:
function = functions[function_name]
result = function(*args, **kwargs)
Expand Down Expand Up @@ -528,6 +531,7 @@ async def hack_handle_keys(function, STUDENTS=None, STUDENTS_path=None, RESOURCE
We create a list of fields needed for the `make_key()` function as well as the provenance
associated with each. These are zipped together and returned to the user.
"""
# TODO do something if `func` is not found
func = next((item for item in learning_observer.module_loader.reducers() if item['id'] == function), None)
fields_and_provenances = None
if STUDENTS is not None and RESOURCES is None:
Expand Down Expand Up @@ -703,7 +707,6 @@ async def visit(node_name):
# We've already done this one.
if node_name in visited:
return nodes[node_name]

# Execute all the child nodes
await walk_dict(nodes[node_name])

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
This file provides utility functions specific to the
communication protocol.
'''
import inspect

import learning_observer.communication_protocol.query as q
import learning_observer.communication_protocol.exception
Expand Down
10 changes: 8 additions & 2 deletions learning_observer/learning_observer/dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -560,10 +560,15 @@ def _find_student_or_resource(d):
provenance = d['provenance']
output = []
if 'STUDENT' in provenance:
output.append('students')
output.append(provenance['STUDENT']['user_id'])
if 'RESOURCE' in provenance:
output.append('documents')
output.append(provenance['RESOURCE']['doc_id'])
if 'doc_id' in provenance['RESOURCE']:
output.append('documents')
output.append(provenance['RESOURCE']['doc_id'])
if 'assignment_id' in provenance['RESOURCE']:
output.append('assignments')
output.append(provenance['RESOURCE']['assignment_id'])
if output:
return output
return _find_student_or_resource(provenance)
Expand Down Expand Up @@ -629,6 +634,7 @@ async def _execute_dag(dag_query, target, params):
await _drive_generator(generator, dag_query['kwargs'])

# Handle rescheduling the execution of the DAG for fresh data
# TODO add some way to specific specific endpoint delays
dag_delay = dag_query['kwargs'].get('rerun_dag_delay', 10)
if dag_delay < 0:
# if dag_delay is negative, we skip repeated execution
Expand Down
13 changes: 13 additions & 0 deletions learning_observer/learning_observer/google.py
Original file line number Diff line number Diff line change
Expand Up @@ -381,6 +381,19 @@ def clean_course_list(google_json):
return courses


@register_cleaner('course_work', 'assignments')
def clean_course_work(google_json):
'''
Google's course work is one object deeper than we'd like, and update_time
sort order is nicer. This will clean it up a bit
'''
assignments = google_json.get('courseWork', [])
assignments.sort(
key=lambda x: x.get('update_time', 0)
)
return assignments


# Google Docs
def _force_text_length(text, length):
'''
Expand Down
4 changes: 2 additions & 2 deletions learning_observer/learning_observer/rosters.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,9 +446,9 @@ async def memoize_courseroster_runtime(runtime, course_id):
individual nodes are handled: static, dynamic (current), or memoized.
'''
@learning_observer.cache.async_memoization()
async def memoization_layer(c):
async def course_roster_memoization_layer(c):
return await courseroster_runtime(runtime, c)
return await memoization_layer(course_id)
return await course_roster_memoization_layer(course_id)


async def courseroster(request, course_id):
Expand Down
2 changes: 2 additions & 0 deletions learning_observer/learning_observer/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,8 @@ def register_auth_webapp_views(app):
debug_log("Running with Google authentication")
app.add_routes([
aiohttp.web.get(
# TODO only allow the available sign-in options found in pmss
# '/auth/login/{provider:google|canvas|schoology}',
'/auth/login/{provider:google}',
handler=learning_observer.auth.social_handler),
])
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
'''
This file creates an All-In-One component for the Learning
Observer server connection. This handles updating data from the
server (based on individual tree updates), storing any errors
that occured, and showing the time since it was last updated.
'''
from dash import html, dcc, clientside_callback, Output, Input, State, MATCH
import dash_bootstrap_components as dbc
import datetime
import uuid

class LODocumentSourceSelectorAIO(dbc.Card):
class ids:
source_selector = lambda aio_id: {
'component': 'LODocumentSourceSelectorAIO',
'subcomponent': 'source_selector',
'aio_id': aio_id
}
assignment_wrapper = lambda aio_id: {
'component': 'LODocumentSourceSelectorAIO',
'subcomponent': 'assignment_wrapper',
'aio_id': aio_id
}
assignment_input = lambda aio_id: {
'component': 'LODocumentSourceSelectorAIO',
'subcomponent': 'assignment_input',
'aio_id': aio_id
}
datetime_wrapper = lambda aio_id: {
'component': 'LODocumentSourceSelectorAIO',
'subcomponent': 'datetime_wrapper',
'aio_id': aio_id
}
date_input = lambda aio_id: {
'component': 'LODocumentSourceSelectorAIO',
'subcomponent': 'date_input',
'aio_id': aio_id
}
timestamp_input = lambda aio_id: {
'component': 'LODocumentSourceSelectorAIO',
'subcomponent': 'timestamp_input',
'aio_id': aio_id
}
kwargs_store = lambda aio_id: {
'component': 'LODocumentSourceSelectorAIO',
'subcomponent': 'kwargs_store',
'aio_id': aio_id
}
apply = lambda aio_id: {
'component': 'LODocumentSourceSelectorAIO',
'subcomponent': 'apply',
'aio_id': aio_id
}

ids = ids

def __init__(self, aio_id=None):
if aio_id is None:
aio_id = str(uuid.uuid4())


card_body = dbc.CardBody([
dbc.Label('Source'),
dbc.RadioItems(
id=self.ids.source_selector(aio_id),
options={'latest': 'Latest Document',
'assignment': 'Assignment',
'timestamp': 'Specific Time'},
inline=True,
value='latest'),
html.Div('Additional Arguments'),
html.Div([
dbc.RadioItems(id=self.ids.assignment_input(aio_id)),
], id=self.ids.assignment_wrapper(aio_id)),
html.Div([
dbc.InputGroup([
dcc.DatePickerSingle(
id=self.ids.date_input(aio_id),
date=datetime.date.today()),
dbc.Input(
type='time',
id=self.ids.timestamp_input(aio_id),
value=datetime.datetime.now().strftime("%H:%M"))
])
], id=self.ids.datetime_wrapper(aio_id)),
dbc.Button('Apply', id=self.ids.apply(aio_id), class_name='mt-1', n_clicks=0),
dcc.Store(id=self.ids.kwargs_store(aio_id), data={'src': 'latest'})
])
component = [
dbc.CardHeader('Document Source'),
card_body
]
super().__init__(component)

# Update data
clientside_callback(
'''function (clicks, src, assignment, date, time) {
if (clicks === 0) { return window.dash_clientside.no_update; }
let kwargs = {};
if (src === 'assignment') {
kwargs.assignment = assignment;
} else if (src === 'timestamp') {
kwargs.requested_timestamp = new Date(`${date}T${time}`).getTime().toString()
}
return {src, kwargs};
}
''',
Output(ids.kwargs_store(MATCH), 'data'),
Input(ids.apply(MATCH), 'n_clicks'),
State(ids.source_selector(MATCH), 'value'),
State(ids.assignment_input(MATCH), 'value'),
State(ids.date_input(MATCH), 'date'),
State(ids.timestamp_input(MATCH), 'value'),
)

clientside_callback(
'''function (src) {
if (src === 'assignment') {
return ['d-none', ''];
} else if (src === 'timestamp') {
return ['', 'd-none']
}
return ['d-none', 'd-none'];
}
''',
Output(ids.datetime_wrapper(MATCH), 'className'),
Output(ids.assignment_wrapper(MATCH), 'className'),
Input(ids.source_selector(MATCH), 'value'),
)

clientside_callback(
'''async function (id, hash) {
if (hash.length === 0) { return window.dash_clientside.no_update; }
const decoded = decode_string_dict(hash.slice(1));
if (!decoded.course_id) { return window.dash_clientside.no_update; }
const response = await fetch(`${window.location.protocol}//${window.location.hostname}:${window.location.port}/google/course_work/${decoded.course_id}`);
const data = await response.json();
const options = data.courseWork.map(function (item) {
return { label: item.title, value: item.id };
});
return options;
}
''',
Output(ids.assignment_input(MATCH), 'options'),
Input(ids.source_selector(MATCH), 'id'),
Input('_pages_location', 'hash'),
)

clientside_callback(
'''function (src, assignment, date, time, current) {
if (src === 'assignment' & (assignment === undefined | current.kwargs?.assignment === assignment)) {
return true;
}
if (src === 'timestamp' & current.kwargs?.requested_timestamp === new Date(`${date}T${time}`).getTime().toString()) {
return true;
}
if (src === 'latest' & current.src === 'latest') { return true; }
return false;
}
''',
Output(ids.apply(MATCH), 'disabled'),
Input(ids.source_selector(MATCH), 'value'),
Input(ids.assignment_input(MATCH), 'value'),
Input(ids.date_input(MATCH), 'date'),
Input(ids.timestamp_input(MATCH), 'value'),
Input(ids.kwargs_store(MATCH), 'data'),
)
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

from .LOConnectionStatusAIO import LOConnectionStatusAIO
from .LOConnectionAIO import LOConnectionAIO
from .LODocumentSourceSelectorAIO import LODocumentSourceSelectorAIO
from .ProfileSidebarAIO import ProfileSidebarAIO

if not hasattr(_dash, '__plotly_dash') and not hasattr(_dash, 'development'):
Expand Down
2 changes: 1 addition & 1 deletion modules/lo_dash_react_components/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
"watch-css": "sass src/lib:lo_dash_react_components/css --watch",
"start-all": "npm-run-all --parallel watch-css react-start webpack-start dash-start",
"clean-build:python": "rm -rf dist/ && rm -rf build/",
"build:python": "npm run clean-build:python && python setup.py sdist bdist_wheel"
"build:python": "npm run clean-build:python && npm run build && python setup.py sdist bdist_wheel"
},
"author": "Piotr Mitros <pmitros@ets.org>",
"license": "AGPL-3.0",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ export default class LOPanelLayout extends Component {
{leftPanels.map(panel =>
<div
key={panel.id}
className={shown.includes(panel.id) ? 'side-panel open' : 'side-panel closed'}
className={`${panel.className} ${shown.includes(panel.id) ? 'side-panel open' : 'side-panel closed'}`}
style={{ width: shown.includes(panel.id) ? panel.width : 0 }}
>
{panel.children}
Expand All @@ -36,7 +36,7 @@ export default class LOPanelLayout extends Component {
{rightPanels.map(panel =>
<div
key={panel.id}
className={shown.includes(panel.id) ? 'side-panel open' : 'side-panel closed'}
className={`${panel.className} ${shown.includes(panel.id) ? 'side-panel open' : 'side-panel closed'}`}
style={{ width: shown.includes(panel.id) ? panel.width : 0 }}
>
{panel.children}
Expand Down Expand Up @@ -77,6 +77,7 @@ LOPanelLayout.propTypes = {
width: PropTypes.string,
offset: PropTypes.number,
side: PropTypes.string,
className: PropTypes.string,
id: PropTypes.string.isRequired
})),

Expand Down
1 change: 1 addition & 0 deletions modules/wo_bulk_essay_analysis/VERSION
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
0.1.0+2025.04.01T15.05.13.407Z.44725993.berickson.022025.gpt.dashboard.updates
3 changes: 3 additions & 0 deletions modules/wo_bulk_essay_analysis/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"
5 changes: 4 additions & 1 deletion modules/wo_bulk_essay_analysis/setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,15 @@
name = Writing Observer Automated Essay Feedback
description = Dashboard for interfacing a classroom of essays with automated feedback
url = https://github.com/ETS-Next-Gen/writing_observer
version = 0.1
version = file:VERSION

[options]
packages = find:
include_package_data = true

[options.package_data]
wo_bulk_essay_analysis = assets/*

[options.entry_points]
lo_modules =
wo_bulk_essay_analysis = wo_bulk_essay_analysis.module
13 changes: 0 additions & 13 deletions modules/wo_bulk_essay_analysis/setup.py

This file was deleted.

Loading
Loading