@@ -5,7 +5,6 @@ import PostsList from "../components/mainContent/PostsList";
55import CounterFrame from "../components/posts/FacialRecognition/CounterFrame" ;
66import { VisibilityProvider } from "../components/mainContent/commonLogic" ;
77
8-
98export default function FacialRecognition ( ) {
109 return (
1110 < VisibilityProvider >
@@ -45,7 +44,7 @@ const FacialRecognitionInfo = [
4544 subheader : "Understanding Bias Through Categorization" ,
4645 bodyText : [
4746 {
48- body : < CounterFrame />
47+ body : < CounterFrame /> ,
4948 } ,
5049 ] ,
5150 } ,
@@ -215,29 +214,26 @@ const FacialRecognitionInfo = [
215214 {
216215 body : (
217216 < p className = "bold" >
218- Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
219- eiusmod tempor incididunt ut labore et dolore magna aliqua.
217+ The Consequences of Bias in Facial Recognition
220218 </ p >
221219 ) ,
222220 } ,
223221 {
224222 body : (
225223 < p >
226- Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris
227- nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
228- reprehenderit in voluptate velit esse cillum dolore eu fugiat
229- nulla pariatur.
224+ Facial recognition technology has repeatedly been shown to exhibit
225+ various biases against certain groups, leading to real-world
226+ consequences such as wrongful identification of suspects or
227+ determining healthcare. To address these issues, researchers are
228+ actively working on debiasing facial recognition models as well as
229+ the data sets they rely on. This involves creating more balanced
230+ training data and adjusting algorithms so they learn to make
231+ accurate predictions.
230232 </ p >
231233 ) ,
232234 } ,
233235 {
234- body : (
235- < p >
236- < mark className = "bold" > { `TODO: ` } </ mark > Excepteur sint occaecat
237- cupidatat non proident, sunt in culpa qui officia deserunt mollit
238- anim id est laborum.
239- </ p >
240- ) ,
236+ body : < p > </ p > ,
241237 } ,
242238 ] ,
243239 } ,
0 commit comments