@@ -3,6 +3,7 @@ import React from "react";
33import "../../main.css" ;
44import EssayContainer from "../../components/posts/CollegeAdmissions/essay/EssayContainer" ;
55import WomenEnrollmentContainer from "../../components/posts/CollegeAdmissions/enrollmentGraph/WomenEnrollmentContainer.js" ;
6+ import { MatchingGame } from "./MatchingGame.js" ;
67
78export default [
89 {
@@ -15,19 +16,21 @@ export default [
1516 {
1617 body : (
1718 < p >
18- In 2013, the CS department at the University of Texas at Austin implemented the GRADE (GRaduate ADmissions Evaluator)
19- machine learning system into its Ph.D. admissions process.
19+ In 2013, the CS department at the University of Texas at Austin
20+ implemented the GRADE (GRaduate ADmissions Evaluator) machine
21+ learning system into its Ph.D. admissions process.
2022 </ p >
2123 ) ,
2224 } ,
2325 {
2426 body : (
2527 < p >
26- Made to reflect the admissions committee’s decisions prior to its implementation in 2013,
27- issues arose of it{ " " }
28+ Made to reflect the admissions committee’s decisions prior to its
29+ implementation in 2013, issues arose of it{ " " }
2830 < mark className = "underline" >
29- compounding the initial biases that the admissions committee held,
30- which disadvantaged applicants from underrepresented groups.
31+ compounding the initial biases that the admissions committee
32+ held, which disadvantaged applicants from underrepresented
33+ groups.
3134 </ mark > { " " }
3235 The department ultimately abandoned GRADE in 2020.
3336 </ p >
@@ -37,16 +40,17 @@ export default [
3740 body : (
3841 < p >
3942 < mark className = "bold" >
40- In this case study, we will analyze why and how ML models such as GRADE that screen various applications
41- can undervalue the achievements of underrepresented groups.
43+ In this case study, we will analyze why and how ML models such
44+ as GRADE that screen various applications can undervalue the
45+ achievements of underrepresented groups.
4246 </ mark >
4347 </ p >
4448 ) ,
4549 } ,
4650 ] ,
4751 } ,
4852 } ,
49-
53+
5054 {
5155 post : {
5256 profilePic : teachLogo ,
@@ -167,7 +171,39 @@ export default [
167171 ] ,
168172 } ,
169173 } ,
170-
174+ {
175+ post : {
176+ profilePic : teachLogo ,
177+ profilePicName : "Profile Picture - Thinking Like an ML Model" ,
178+ header : "Thinking Like an ML Model" ,
179+ headerLink : true ,
180+ linkTo : "facebook" ,
181+ subheader :
182+ "Why did the model take factors unrelated to competence into account?" ,
183+ bodyText : [
184+ {
185+ body : (
186+ < p className = "bold" >
187+ The computer isn't really looking for gender. So why does it learn
188+ to take that into account without being told to do so?
189+ < br > </ br >
190+ < br > </ br >
191+ Here's an interactive example that emulates how machine learning
192+ models "learn".
193+ < br > </ br >
194+ < br > </ br >
195+ Grouping exercise!
196+ < br > </ br >
197+ Explain why you make mistakes.
198+ < br > </ br >
199+ Explain how that's similar to how ML models make mistakes.
200+ < MatchingGame />
201+ </ p >
202+ ) ,
203+ } ,
204+ ] ,
205+ } ,
206+ } ,
171207 {
172208 post : {
173209 profilePic : teachLogo ,
0 commit comments