diff --git a/.github/workflows/frontend_ci.yml b/.github/workflows/frontend_ci.yml new file mode 100644 index 0000000..f1556c3 --- /dev/null +++ b/.github/workflows/frontend_ci.yml @@ -0,0 +1,46 @@ +name: Frontend CI + +on: [pull_request] + +defaults: + run: + working-directory: ./frontend + +jobs: + link-check: + name: Run ESLint check + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Install dependencies + run: yarn install + + - name: Run dependencies + run: yarn run lint + + format-check: + name: Run Prettier check + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Install dependencies + run: yarn install + + - name: Run dependencies + run: yarn run format-check \ No newline at end of file diff --git a/README.md b/README.md index 65ddbb9..ddf9a74 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ This repository contains the code for the front-end website for the NIH-funded B ## Prerequisities -- Node.js +- [Node.js](https://nodejs.org/en) v20 - Yarn ## Install diff --git a/frontend/.vscode/extensions.json b/frontend/.vscode/extensions.json index fe38802..b6a2eb9 100644 --- a/frontend/.vscode/extensions.json +++ b/frontend/.vscode/extensions.json @@ -12,4 +12,4 @@ "dbaeumer.jshint", "ms-vscode.vscode-typescript-tslint-plugin" ] -} \ No newline at end of file +} diff --git a/frontend/.vscode/settings.json b/frontend/.vscode/settings.json index 746cf57..fa9e6d6 100644 --- a/frontend/.vscode/settings.json +++ b/frontend/.vscode/settings.json @@ -3,14 +3,7 @@ "editor.guides.bracketPairs": true, "editor.formatOnSave": true, "editor.defaultFormatter": "esbenp.prettier-vscode", - "editor.codeActionsOnSave": [ - "source.fixAll.eslint" - ], - "eslint.validate": [ - "javascript", - "javascriptreact", - "typescript", - "vue" - ], + "editor.codeActionsOnSave": ["source.fixAll.eslint"], + "eslint.validate": ["javascript", "javascriptreact", "typescript", "vue"], "typescript.tsdk": "node_modules/typescript/lib" -} \ No newline at end of file +} diff --git a/frontend/README.md b/frontend/README.md index 305590e..0a98417 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -3,6 +3,7 @@ A Quasar Project ## Install the dependencies + ```bash yarn # or @@ -10,32 +11,33 @@ npm install ``` ### Start the app in development mode (hot-code reloading, error reporting, etc.) + ```bash quasar dev ``` - ### Lint the files + ```bash yarn lint # or npm run lint ``` - ### Format the files + ```bash yarn format # or npm run format ``` - - ### Build the app for production + ```bash quasar build ``` ### Customize the configuration + See [Configuring quasar.config.js](https://v2.quasar.dev/quasar-cli-vite/quasar-config-js). diff --git a/frontend/index.html b/frontend/index.html index 9aaa7af..07be257 100644 --- a/frontend/index.html +++ b/frontend/index.html @@ -3,23 +3,28 @@ <%= productName %> - - - - - + + + + + - - - - - + + + + + diff --git a/frontend/src/constants/about.ts b/frontend/src/constants/about.ts index e1a6d4d..acc60a7 100644 --- a/frontend/src/constants/about.ts +++ b/frontend/src/constants/about.ts @@ -1,3 +1,8 @@ +/** + * about.ts + * + * Contains organizations, team members, and image paths to populate the About page. + */ import { OrganizationTeam } from 'src/models/about'; export const team: OrganizationTeam[] = [ @@ -43,7 +48,7 @@ export const team: OrganizationTeam[] = [ { name: 'Lauren Diaz', image: 'team/lauren_diaz.jpg', - } + }, ], }, { diff --git a/frontend/src/constants/links.ts b/frontend/src/constants/links.ts index 8f5dc0f..1feebbf 100644 --- a/frontend/src/constants/links.ts +++ b/frontend/src/constants/links.ts @@ -1,3 +1,8 @@ +/** + * links.ts + * + * Contains commonly re-used links throughout the website. + */ import { Link } from 'src/models/common'; export const links: Record = { @@ -32,6 +37,6 @@ export const links: Record = { open_data: { text: 'Open Data', uri: 'https://aws.amazon.com/opendata', - // uri: 'https://registry.opendata.aws/ember', TODO: When site is published + // uri: 'https://registry.opendata.aws/ember', TODO: When/If site is published }, }; diff --git a/frontend/src/constants/projects.ts b/frontend/src/constants/projects.ts index 6a25d85..19baf41 100644 --- a/frontend/src/constants/projects.ts +++ b/frontend/src/constants/projects.ts @@ -1,3 +1,9 @@ +/** + * projects.ts + * + * Contains the list of BBQS projects and metadata for each project. + * TODO: Convert this file of data into a real database. + */ import { ContributorRole, ProjectMetadata } from 'src/models/projects'; // Semi auto-generated from fetchProjectMetadata.ts script diff --git a/frontend/src/css/app.scss b/frontend/src/css/app.scss index 31fae84..db68d2e 100644 --- a/frontend/src/css/app.scss +++ b/frontend/src/css/app.scss @@ -8,7 +8,7 @@ a.link { } body { - font-size: 1em; + font-size: 1em; } .text-decoration-none { @@ -57,7 +57,6 @@ body { } } - /* Small devices (portrait tablets and large phones, 600px and up) */ @media (min-width: 600px) and (max-width: 768px) { .text-h1 { @@ -77,11 +76,10 @@ body { } } - /** * Scrollable Component * - top margin ensures the component is below the toolbar on scroll */ - .scrollable { +.scrollable { scroll-margin-top: 80px; } diff --git a/frontend/src/css/quasar.variables.scss b/frontend/src/css/quasar.variables.scss index 931d0be..a7a73bb 100644 --- a/frontend/src/css/quasar.variables.scss +++ b/frontend/src/css/quasar.variables.scss @@ -12,14 +12,14 @@ // to match your app's branding. // Tip: Use the "Theme Builder" on Quasar's documentation website. -$primary : #950f1a; -$secondary : #f18029; -$accent : #f5e031; +$primary: #950f1a; +$secondary: #f18029; +$accent: #f5e031; -$dark : #1D1D1D; -$dark-page : #121212; +$dark: #1d1d1d; +$dark-page: #121212; -$positive : #21BA45; -$negative : #C10015; -$info : #31CCEC; -$warning : #F2C037; +$positive: #21ba45; +$negative: #c10015; +$info: #31ccec; +$warning: #f2c037; diff --git a/frontend/src/layouts/MainLayout.vue b/frontend/src/layouts/MainLayout.vue index 9f87390..acc184c 100644 --- a/frontend/src/layouts/MainLayout.vue +++ b/frontend/src/layouts/MainLayout.vue @@ -54,9 +54,9 @@
- Privacy Policy | - Terms of Service | - APL Home + Privacy Policy | + Terms of Service | + APL Home
© 2024 JHU/APL. All rights reserved.
diff --git a/frontend/src/models/projects.ts b/frontend/src/models/projects.ts index 707352a..ff8c438 100644 --- a/frontend/src/models/projects.ts +++ b/frontend/src/models/projects.ts @@ -69,23 +69,25 @@ export const ProjectTableColumns: QTableColumn[] = [ name: 'principalInvestigator', label: 'Principal Investigator(s)', align: 'left', - field: (row) => row.contributors - .sort((a: Contributor, b: Contributor) => { - if (a.roles.includes(ContributorRole.contactPrincipalInvestigator)) return -1; - if (b.roles.includes(ContributorRole.contactPrincipalInvestigator)) return 1; + field: (row) => + row.contributors.sort((a: Contributor, b: Contributor) => { + if (a.roles.includes(ContributorRole.contactPrincipalInvestigator)) return -1; + if (b.roles.includes(ContributorRole.contactPrincipalInvestigator)) return 1; - return 0; - }) - , + return 0; + }), required: true, sortable: true, - format: (val) => val.map((c: Contributor) => c.name).join(', ') + format: (val) => val.map((c: Contributor) => c.name).join(', '), }, { name: 'contactPrincipalInvestigator', label: 'Contact PI', align: 'left', - field: (row) => row.contributors.filter((c: Contributor) => c.roles.includes(ContributorRole.contactPrincipalInvestigator)), + field: (row) => + row.contributors.filter((c: Contributor) => + c.roles.includes(ContributorRole.contactPrincipalInvestigator) + ), required: true, sortable: true, }, diff --git a/frontend/src/pages/AboutPage.vue b/frontend/src/pages/AboutPage.vue index 7528558..409fa17 100644 --- a/frontend/src/pages/AboutPage.vue +++ b/frontend/src/pages/AboutPage.vue @@ -10,9 +10,8 @@

In September 2024, our team at the - , University of California, Los Angeles, University - of Pennsylvania, Dartmouth College, and Massachusetts Institute of Technology were - funded by the NIH Grant + , University of California, Los Angeles, University of Pennsylvania, Dartmouth College, + and Massachusetts Institute of Technology were funded by the NIH Grant to build the data archive for hosting the data generated through the NIH 's . @@ -37,7 +36,10 @@ ; IARPA ; DARPA - + ; NIH

Uploading to and Downloading Data from the EMBER-DANDI Archive
-

These instructions describe how to upload to and download data from the EMBER-DANDI Archive. The instructions are divided into two main sections: one set of instructions for novice users, and one set for experienced users of DANDI. Navigate to a section depending on your experience with each tool.

- +

+ These instructions describe how to upload to and download data from the EMBER-DANDI + Archive. The instructions are divided into two main sections: one set of instructions + for novice users, and one set for experienced users of DANDI. Navigate to a section + depending on your experience with each tool. +

+
I'm new to Python, CLI, and/or DANDI
-

If you're new to Python, follow these instructions to get Python and a virtual Anaconda environment installed:

+

+ If you're new to Python, follow these instructions to get Python and a virtual Anaconda + environment installed: +

    -
  1. - to get Python and standard data science/machine learning packages, and a desktop application
  2. +
  3. + to get + Python and standard data science/machine learning packages, and a desktop application +
  4. After installing, launch the Anaconda Navigator
  5. Navigate to the "Environments" tab on the left pane
  6. Create a new environment with your desired name
  7. - Anaconda Navigator + Anaconda Navigator -
  8. Click on the newly created environment, press the "Play" icon and select "Open Terminal"
  9. +
  10. + Click on the newly created environment, press the "Play" icon and select "Open + Terminal" +
How do I upload data?
    -
  1. Create a GitHub account - - (ideally with an .edu account for quicker approval)
  2. -
  3. Register with - - using the "Log In With GitHub" button in the top right corner
  4. -
  5. Wait for confirmation of review. In the meantime, convert your data to approved standards
  6. +
  7. + Create a GitHub account + + (ideally with an .edu account for quicker approval) +
  8. +
  9. + Register with + + using the "Log In With GitHub" button in the top right corner +
  10. +
  11. + Wait for confirmation of review. In the meantime, convert your data to approved + standards +
  12. Upon approval, log in with the "Log In With GitHub" button
Convert files to appropriate standards
  1. - - , which converts common neuroscience data formats into Neurodata Without Borders standards
  2. -
  3. Complete key tutorials for NWB GUIDE: + + , which converts common neuroscience data formats into Neurodata Without Borders + standards +
  4. +
  5. + Complete key tutorials for NWB GUIDE:
    • - +
    • -
    • Convert - -
    • -
    • Convert - +
    • + Convert + +
    • +
    • + Convert +
  6. -
  7. Repeat with your own your data +
  8. Repeat with your own your data
  9. +
  10. + Ensure your data is supported by checking the +
  11. -
  12. Ensure your data is supported by checking the - -
  13. - Ecosystem
Create an EMBER-DANDIset
@@ -81,17 +112,33 @@
  • Log in to EMBER-DANDI with your approved GitHub account
  • Select the "New Dandiset" button in the top right corner
  • Fill out basic metadata and hit "Register Dandiset"
  • - New Dandiset + New Dandiset
    Upload the converted files
      -
    1. In the python terminal in your ember-dandi specific environment, run: `pip install -U dandi`
    2. -
    3. Validate the converted files, replacing `source_folder` with the path to your .nwb files: `dandi validate --ignore DANDI.NO_DANDISET_FOUND "source_folder"`
    4. +
    5. + In the python terminal in your ember-dandi specific environment, run: `pip install -U + dandi` +
    6. +
    7. + Validate the converted files, replacing `source_folder` with the path to your .nwb + files: `dandi validate --ignore DANDI.NO_DANDISET_FOUND "source_folder"` +
    8. Navigate to your dataset in the EMBER-DANDI archive and note the ID number
    9. - Test Number Data -
    10. Upload your validated .nwb files using the following commands, replacing `<dataset_id>` and `source_folder` with your specific information: -
      +            Test Number Data
      +            
    11. + Upload your validated .nwb files using the following commands, replacing + `<dataset_id>` and `source_folder` with your specific information: +
       dandi download https://dandi.emberarchive.org/dandiset/<dataset_id>/draft
       cd <dataset_id>
       dandi organize "source_folder" -f dry
      @@ -108,32 +155,36 @@ dandi upload -i ember
                   
    12. Click the "Download" tab on the right-hand panel
    13. Copy the command into your python terminal
    - Download Dandiset + Download Dandiset

    To download specific files:

    1. Navigate to the URL of the dataset of interest
    2. Click on the "Files" tab on the right-hand panel
    3. - Files Tab + Files Tab
    4. Navigate to the folder containing the desired file
    5. Click the downward pointing arrow icon to download the file
    I've used DANDI or CLI tools before!

    For experienced users, the steps for uploading data are as follows:

    -
    +          
     dandi download https://dandi.emberarchive.org/dandiset/<dataset_id>/draft
     cd <dataset_id>
     dandi organize "source_folder" -f dry
     dandi organize "source_folder"
     dandi validate .
     dandi upload -i ember
    -
    -

    For downloading data, use the standard procedures from the DANDI documentation - - .

    +
    +

    + For downloading data, use the standard procedures from the DANDI documentation + + . +

    diff --git a/frontend/src/pages/GettingStartedPage.vue b/frontend/src/pages/GettingStartedPage.vue index 12da384..844f252 100644 --- a/frontend/src/pages/GettingStartedPage.vue +++ b/frontend/src/pages/GettingStartedPage.vue @@ -7,13 +7,17 @@

    The EMBER Archive supports free storage and sharing of public data, prioritizing data generated as a part of the - . Users can create an account to begin instantiating a project and uploading data. Users can also request access to data. As the program progresses, project data will be released freely and publicly following appropriate protocols. + . Users can create an account to begin instantiating + a project and uploading data. Users can also request access to data. As the program + progresses, project data will be released freely and publicly following appropriate + protocols.

    Currently, PHI/PII data cannot be submitted to the EMBER Archive. - As the BBQS program progresses, EMBER will be adding functionality to support PHI/PII data storage. + As the BBQS program progresses, EMBER will be adding functionality to support PHI/PII + data storage.

    @@ -27,7 +31,8 @@ Phase I - Users will be required to verify human data have been de-identified and are approved for release and reuse. PHI/PII data will not be accepted. + Users will be required to verify human data have been de-identified and are + approved for release and reuse. PHI/PII data will not be accepted. @@ -36,7 +41,8 @@ Phase II - Data access controls and data use agreements will be implemented, as needed, in accordance with IRB requirements. + Data access controls and data use agreements will be implemented, as needed, in + accordance with IRB requirements. @@ -45,7 +51,8 @@ Phase III - Additional support for the storage and access control for PHI and PII data will be added. + Additional support for the storage and access control for PHI and PII data will be + added. @@ -62,7 +69,8 @@ Metadata - Users can create an EMBER Project, specifying required and optional data for EMBERsets. + Users can create an EMBER Project, specifying required and optional data for + EMBERsets. @@ -71,7 +79,9 @@ Animal Electrophysiology and Behavioral Data - Users can upload animal electrophysiology and behavioral data, specified in NWB and BIDS formats, along with supporting data files. This leverages our EMBER-DANDI instance. + Users can upload animal electrophysiology and behavioral data, specified in NWB + and BIDS formats, along with supporting data files. This leverages our EMBER-DANDI + instance. @@ -80,7 +90,9 @@ De-identified Human Data - Appropriately de-identified human data, such as electrophysiology, can be uploaded by users once assuring the appropriate IRB processes are followed and consent documented. Please contact us for more information! + Appropriately de-identified human data, such as electrophysiology, can be uploaded + by users once assuring the appropriate IRB processes are followed and consent + documented. Please contact us for more information! @@ -89,11 +101,11 @@ Future Phases - Future phases will allow upload of human data requiring access controls or data use agreements, as well as secure upload of PHI/PII information. + Future phases will allow upload of human data requiring access controls or data + use agreements, as well as secure upload of PHI/PII information. - @@ -125,7 +137,8 @@
    Data Access & Requests

    - Users can create an account to request data access. As data are released publicly in support of the BBQS program, they will be listed on the + Users can create an account to request data access. As data are released publicly in + support of the BBQS program, they will be listed on the Projects page.

    diff --git a/frontend/src/pages/IndexPage.vue b/frontend/src/pages/IndexPage.vue index 03e91fc..97b1f1a 100644 --- a/frontend/src/pages/IndexPage.vue +++ b/frontend/src/pages/IndexPage.vue @@ -54,8 +54,7 @@

    - For BBQS project teams, - please visit the + For BBQS project teams, please visit the Data Upload section of the Getting Started @@ -68,10 +67,9 @@ >does not yet support storage of Personally Identifiable Information (PII) or Protected Health Information (PHI), but support for this capability is under development. BBQS project teams will be - responsible for ensuring compliance with guidelines from any and all - entities that may govern the use of your data, including the Institutional Review Board - (IRB) of your institution. To learn more about how this may affect your workflow, visit - the + responsible for ensuring compliance with guidelines from any and all entities that may + govern the use of your data, including the Institutional Review Board (IRB) of your + institution. To learn more about how this may affect your workflow, visit the Compliance section of the Getting Started diff --git a/frontend/src/pages/PrivacyPolicy.vue b/frontend/src/pages/PrivacyPolicy.vue index c08747d..d3f3d6f 100644 --- a/frontend/src/pages/PrivacyPolicy.vue +++ b/frontend/src/pages/PrivacyPolicy.vue @@ -6,15 +6,17 @@

    Effective Date: 2/24/2025

    - The Johns Hopkins University Applied Physics Laboratory, LLC (JHU/APL) - ("we", "our", or - "us") respects the privacy of our users. This Privacy Policy - describes how we collect, use, and share information in connection with the EMBER website, + The Johns Hopkins University Applied Physics Laboratory, LLC (JHU/APL) ("we", "our", or "us") respects + the privacy of our users. This Privacy Policy describes how we collect, use, and share + information in connection with the EMBER website, - ("Website"), our social media pages that link to this Privacy - Policy, and all publicly accessible servers, APIs, cloud services, and websites operated - by the EMBER project (collectively, the "Sites"). It also - explains your rights and choices with respect to such information. + ("Website"), our social media pages that link to this + Privacy Policy, and all publicly accessible servers, APIs, cloud services, and websites + operated by the EMBER project (collectively, the "Sites"). + It also explains your rights and choices with respect to such information.

    diff --git a/frontend/src/pages/ProjectsPage.vue b/frontend/src/pages/ProjectsPage.vue index 4465aee..e0a2c9d 100644 --- a/frontend/src/pages/ProjectsPage.vue +++ b/frontend/src/pages/ProjectsPage.vue @@ -9,25 +9,25 @@ , through its ongoing - , has funded {{ projectCounts.total }} research projects, with the earliest project start - date of {{ earliestProjectStart }}. {{ projectCounts.r61r33 }} of the projects are funded - through the + , has funded {{ projectCounts.total }} research projects, with the earliest project + start date of {{ earliestProjectStart }}. {{ projectCounts.r61r33 }} of the projects are + funded through the - intended to support the development and validation of next-generation tools, methods, and - analytic approaches to precisely quantify behaviors and combine them with simultaneous - recordings of brain activity in humans; {{ projectCounts.r34 }} of the projects are funded - through the + intended to support the development and validation of next-generation tools, methods, + and analytic approaches to precisely quantify behaviors and combine them with + simultaneous recordings of brain activity in humans; {{ projectCounts.r34 }} of the + projects are funded through the , intended to support planning and development of the research framework, design, and approach, including activities that will establish feasibility, validity, and/or other - technically qualifying results that, if successful, would support a competitive application - for a U01, R01 or equivalent NIH research award. + technically qualifying results that, if successful, would support a competitive + application for a U01, R01 or equivalent NIH research award.

    A quick overview of the BBQS research projects is provided below.

    @@ -52,7 +52,12 @@