Skip to content

Commit 1fbd63a

Browse files
authored
V2.2.9 fix assignment 3 sched (#81)
* WIP midterm post * ADD midterm retro post * ADD new rpc services to mindebug * FIX mobile content shift * FIX header top padding * FIX add onclick wrapper to non public nav items * ADD docs to some utils functions * ADD docs to more utils functions and views * CHG reorganize minikube debug * FIX cache missing issue * FIX weird reaper crashing issue * ADD timestamp to usage plot * ADD regrade button to admin panel * ADD regrade queue * ADD rpc-regrade to restart script * CHG add check to reaper for user Nonetype
1 parent ca70de5 commit 1fbd63a

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

60 files changed

+1703
-843
lines changed

Makefile

+21-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
PERSISTENT_SERVICES := db traefik kibana elasticsearch-coordinating redis-master logstash adminer
2-
RESTART_ALWAYS_SERVICES := api web-dev rpc-default rpc-theia
2+
RESTART_ALWAYS_SERVICES := api web-dev rpc-default rpc-theia rpc-regrade
33
PUSH_SERVICES := api web logstash theia-init theia-proxy theia-admin theia-xv6
44

55

@@ -55,20 +55,38 @@ debug:
5555
sleep 3
5656
@echo 'running migrations'
5757
make -C api migrations
58+
@echo ''
5859
@echo 'seed: http://localhost/api/admin/seed/'
5960
@echo 'auth: http://localhost/api/admin/auth/token/jmc1283'
6061
@echo 'site: http://localhost/'
6162

6263
.PHONY: mindebug # Start the minimal cluster in debug mode
63-
mindebug: build
64+
mindebug:
6465
docker-compose up -d traefik db redis-master logstash
6566
docker-compose up \
6667
-d --force-recreate \
67-
api web rpc-worker
68+
api web rpc-default rpc-theia
6869
@echo 'Waiting a moment before running migrations'
6970
sleep 3
7071
@echo 'running migrations'
7172
make -C api migrations
73+
@echo ''
74+
@echo 'seed: http://localhost/api/admin/seed/'
75+
@echo 'auth: http://localhost/api/admin/auth/token/jmc1283'
76+
@echo 'site: http://localhost/'
77+
78+
.PHONY: mkdebug # Start minikube debug
79+
mkdebug:
80+
./kube/debug/provision.sh
81+
@echo ''
82+
@echo 'seed: http://localhost/api/admin/seed/'
83+
@echo 'auth: http://localhost/api/admin/auth/token/jmc1283'
84+
@echo 'site: http://localhost/'
85+
86+
.PHONY: mkrestart # Restart minikube debug
87+
mkrestart:
88+
./kube/debug/restart.sh
89+
@echo ''
7290
@echo 'seed: http://localhost/api/admin/seed/'
7391
@echo 'auth: http://localhost/api/admin/auth/token/jmc1283'
7492
@echo 'site: http://localhost/'

api/anubis/rpc/pipeline.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -100,15 +100,15 @@ def cleanup_jobs(batch_v1) -> int:
100100
return active_count
101101

102102

103-
def test_repo(submission_id: str):
103+
def create_submission_pipeline(submission_id: str):
104104
"""
105105
This function should launch the appropriate testing container
106106
for the assignment, passing along the function arguments.
107107
108108
:param submission_id: submission.id of to test
109109
"""
110110
from anubis.app import create_app
111-
from anubis.utils.rpc import enqueue_webhook
111+
from anubis.utils.rpc import enqueue_autograde_pipeline
112112

113113
app = create_app()
114114

@@ -153,8 +153,7 @@ def test_repo(submission_id: str):
153153
"TOO many jobs - re-enqueue {}".format(submission_id),
154154
extra={"submission_id": submission_id},
155155
)
156-
enqueue_webhook(submission_id)
157-
time.sleep(1)
156+
enqueue_autograde_pipeline(submission_id)
158157
exit(0)
159158

160159
# Create job object

api/anubis/rpc/seed.py

+2
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
TheiaSession,
1717
AssignmentQuestion,
1818
AssignedStudentQuestion,
19+
AssignedQuestionResponse,
1920
)
2021
from anubis.utils.data import rand
2122
from anubis.utils.questions import assign_questions
@@ -149,6 +150,7 @@ def create_course(users):
149150
def seed_main():
150151
# Yeet
151152
TheiaSession.query.delete()
153+
AssignedQuestionResponse.query.delete()
152154
AssignedStudentQuestion.query.delete()
153155
AssignmentQuestion.query.delete()
154156
SubmissionTestResult.query.delete()

api/anubis/utils/assignments.py

+35-13
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import traceback
22
from datetime import datetime
3-
from typing import Union, List, Dict
3+
from typing import Union, List, Dict, Tuple
44

55
from dateutil.parser import parse as date_parse, ParserError
66
from sqlalchemy import or_, and_
@@ -64,11 +64,11 @@ def get_assignments(netid: str, course_id=None) -> Union[List[Dict[str, str]], N
6464
filters.append(Assignment.release_date <= datetime.now())
6565
filters.append(Assignment.hidden == False)
6666

67-
assignments = Assignment.query\
67+
assignments = Assignment.query \
6868
.join(Course).join(InCourse).join(User).filter(
69-
User.netid == netid,
70-
*filters
71-
).order_by(Assignment.due_date.desc()).all()
69+
User.netid == netid,
70+
*filters
71+
).order_by(Assignment.due_date.desc()).all()
7272

7373
a = [a.data for a in assignments]
7474
for assignment_data in a:
@@ -135,7 +135,17 @@ def get_submissions(
135135
return [s.full_data for s in submissions]
136136

137137

138-
def assignment_sync(assignment_data):
138+
def assignment_sync(assignment_data: dict) -> Tuple[Union[dict, str], bool]:
139+
"""
140+
Take an assignment_data dictionary from a assignment meta.yaml
141+
and update any and all existing data about the assignment.
142+
143+
* This includes the assignment object fields, assignment tests,
144+
and assignment questions. *
145+
146+
:param assignment_data:
147+
:return:
148+
"""
139149
assignment = Assignment.query.filter(
140150
Assignment.unique_code == assignment_data["unique_code"]
141151
).first()
@@ -170,24 +180,31 @@ def assignment_sync(assignment_data):
170180
return "Unable to parse datetime", 406
171181

172182
db.session.add(assignment)
173-
db.session.commit()
174183

184+
# Go through assignment tests, and delete those that are now
185+
# not in the assignment data.
175186
for assignment_test in AssignmentTest.query.filter(
176187
and_(
177188
AssignmentTest.assignment_id == assignment.id,
178189
AssignmentTest.name.notin_(assignment_data["tests"]),
179190
)
180191
).all():
192+
# Delete any and all submission test results that are still outstanding
193+
# for an assignment test that will be deleted.
181194
SubmissionTestResult.query.filter(
182195
SubmissionTestResult.assignment_test_id == assignment_test.id,
183196
).delete()
197+
198+
# Delete the assignment test
184199
AssignmentTest.query.filter(
185200
AssignmentTest.assignment_id == assignment.id,
186201
AssignmentTest.name == assignment_test.name,
187202
).delete()
188-
db.session.commit()
189203

204+
# Run though the tests in the assignment data
190205
for test_name in assignment_data["tests"]:
206+
207+
# Find if the assignment test exists
191208
assignment_test = (
192209
AssignmentTest.query.filter(
193210
Assignment.id == assignment.id,
@@ -197,14 +214,19 @@ def assignment_sync(assignment_data):
197214
.first()
198215
)
199216

217+
# Create the assignment test if it did not already exist
200218
if assignment_test is None:
201219
assignment_test = AssignmentTest(assignment=assignment, name=test_name)
202220
db.session.add(assignment_test)
203-
db.session.commit()
204221

205-
accepted, ignored, rejected = ingest_questions(
206-
assignment_data["questions"], assignment
207-
)
208-
question_message = {"accepted": accepted, "ignored": ignored, "rejected": rejected}
222+
# Sync the questions in the assignment data
223+
question_message = None
224+
if 'questions' in assignment_data:
225+
accepted, ignored, rejected = ingest_questions(
226+
assignment_data["questions"], assignment
227+
)
228+
question_message = {"accepted": accepted, "ignored": ignored, "rejected": rejected}
229+
230+
db.session.commit()
209231

210232
return {"assignment": assignment.data, "questions": question_message}, True

api/anubis/utils/cache.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,6 @@
33
cache = Cache(config={"CACHE_TYPE": "redis"})
44

55

6-
@cache.cached(timeout=1)
6+
@cache.memoize(timeout=1)
77
def cache_health():
8-
pass
8+
return None

api/anubis/utils/data.py

+33-7
Original file line numberDiff line numberDiff line change
@@ -218,15 +218,32 @@ def split_chunks(lst, n):
218218
return _chunks
219219

220220

221-
def rand(max_len=None):
221+
def rand(max_len: int = None):
222+
"""
223+
Get a relatively random hex string of up
224+
to max_len.
225+
226+
:param max_len:
227+
:return:
228+
"""
222229
rand_hash = sha256(urandom(32)).hexdigest()
223230
if max_len is not None:
224231
return rand_hash[:max_len]
225232
return rand_hash
226233

227234

228235
def human_readable_to_bytes(size: str) -> int:
229-
size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
236+
"""
237+
Convert a string in the form of 5GB and get an integer value
238+
for the number of bytes in that data size.
239+
240+
>>> human_readable_to_bytes('1 GiB')
241+
>>> 1073741824
242+
243+
:param size:
244+
:return:
245+
"""
246+
size_name = ("B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB")
230247
size = size.split() # divide '1 GB' into ['1', 'GB']
231248
num, unit = int(size[0]), size[1]
232249
# index in list of sizes determines power to raise it to
@@ -236,16 +253,25 @@ def human_readable_to_bytes(size: str) -> int:
236253
return num * factor
237254

238255

239-
def row2dict(row):
240-
d = {}
256+
def row2dict(row) -> dict:
257+
"""
258+
Convert an sqlalchemy object to a dictionary from its column
259+
values. This function looks at internal sqlalchemy fields
260+
to create a raw dictionary from the columns in the table.
261+
262+
:param row:
263+
:return:
264+
"""
265+
266+
raw = {}
241267

242268
for column in row.__table__.columns:
243269
value = getattr(row, column.name)
244270

245271
if isinstance(value, datetime):
246-
d[column.name] = str(value)
272+
raw[column.name] = str(value)
247273
continue
248274

249-
d[column.name] = value
275+
raw[column.name] = value
250276

251-
return d
277+
return raw

api/anubis/utils/rpc.py

+10-10
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
from rq import Queue
33

44
from anubis.config import config
5-
from anubis.rpc.pipeline import test_repo
5+
from anubis.rpc.pipeline import create_submission_pipeline
66
from anubis.rpc.seed import seed_debug
77
from anubis.rpc.theia import (
88
initialize_theia_session,
@@ -29,20 +29,18 @@ def rpc_enqueue(func, queue=None, args=None):
2929
conn.close()
3030

3131

32-
def enqueue_webhook(*args):
32+
def enqueue_autograde_pipeline(*args, queue: str = 'default'):
3333
"""Enqueues a test job"""
34-
rpc_enqueue(test_repo, args=args)
34+
rpc_enqueue(create_submission_pipeline, queue=queue, args=args)
3535

3636

3737
def enqueue_ide_initialize(*args):
3838
"""Enqueue an ide initialization job"""
39-
40-
rpc_enqueue(initialize_theia_session, 'theia', args=args)
39+
rpc_enqueue(initialize_theia_session, queue='theia', args=args)
4140

4241

4342
def enqueue_ide_stop(*args):
4443
"""Reap theia session kube resources"""
45-
4644
rpc_enqueue(reap_theia_session, queue='theia', args=args)
4745

4846

@@ -51,9 +49,11 @@ def enqueue_ide_reap_stale(*args):
5149
rpc_enqueue(reap_stale_theia_sessions, queue='theia', args=args)
5250

5351

54-
def seed():
55-
rpc_enqueue(seed_debug)
52+
def enqueue_seed():
53+
"""Enqueue debug seed data"""
54+
rpc_enqueue(seed_debug, queue='default')
5655

5756

58-
def create_visuals(*_):
59-
rpc_enqueue(create_visuals_)
57+
def enqueue_create_visuals(*_):
58+
"""Enqueue create visuals"""
59+
rpc_enqueue(create_visuals_, queue='default')

0 commit comments

Comments
 (0)