diff --git a/_data/citations.yaml b/_data/citations.yaml index 8d2f5952..30df1a4f 100644 --- a/_data/citations.yaml +++ b/_data/citations.yaml @@ -17,8 +17,9 @@ date: '2024-02-28' link: https://doi.org/gttf4k orcid: 0000-0001-5290-8342 - plugin: orcid.py - file: orcid.yaml + plugin: sources.py + file: sources.yaml + image: https://www.frontiersin.org/files/Articles/1340383/fdpys-02-1340383-HTML/image_m/fdpys-02-1340383-g003.jpg - id: doi:10.31234/osf.io/ezdbr title: A New Measure of Mnemonic Discrimination Applicable to Recognition Memory Tests with Continuous Variation in Novel Stimulus Interference @@ -36,8 +37,9 @@ date: '2023-11-14' link: https://doi.org/gs5d85 orcid: 0000-0001-5290-8342 - plugin: orcid.py - file: orcid.yaml + plugin: sources.py + file: sources.yaml + image: images/thumbnails/Leger_2023.png - id: doi:10.1145/3544548.3580943 title: Are You Human? Investigating the Perceptions and Evaluations of Virtual Versus Human Instagram Influencers @@ -52,7 +54,7 @@ orcid: 0000-0001-5290-8342 plugin: sources.py file: sources.yaml - image: images/are_you_human.png + image: images/thumbnails/are_you_human.png - id: doi:10.1016/j.mex.2023.102019 title: 'A two for one special: EEG hyperscanning using a single-person EEG recording setup' @@ -66,7 +68,7 @@ orcid: 0000-0001-5290-8342 plugin: sources.py file: sources.yaml - image: images/two_for_one.png + image: images/thumbnails/two_for_one.png - id: doi:10.1016/j.prdoa.2022.100182 title: 'Aberrant corticospinal tract characteristics in prodromal PD: A diffusion tensor imaging study' @@ -116,8 +118,9 @@ date: '2022-12-02' link: https://doi.org/grpfwj orcid: 0000-0001-5290-8342 - plugin: orcid.py - file: orcid.yaml + plugin: sources.py + file: sources.yaml + image: https://images.tandf.co.uk/common/jackets/crclarge/978042934/9780429342356.jpg - id: doi:10.3389/fnins.2022.922960 title: Do emotions influence safe browsing? Toward an electroencephalography marker of affective responses to cybersecurity notifications diff --git a/_data/sources.yaml b/_data/sources.yaml index 6474b0c9..f0436ae6 100644 --- a/_data/sources.yaml +++ b/_data/sources.yaml @@ -20,11 +20,20 @@ title: "An Event-Related fMRI Study of Syntactic and Semantic Violations" # specify thumbnail images for citations +- id: doi:10.3389/fdpys.2024.1340383 + image: https://www.frontiersin.org/files/Articles/1340383/fdpys-02-1340383-HTML/image_m/fdpys-02-1340383-g003.jpg + +- id: doi:10.31234/osf.io/ezdbr + image: images/thumbnails/Leger_2023.png + +- id: doi:10.4324/9780429342356 + image: https://images.tandf.co.uk/common/jackets/crclarge/978042934/9780429342356.jpg + - id: doi:10.1016/j.mex.2023.102019 - image: images/two_for_one.png + image: images/thumbnails/two_for_one.png - id: doi:10.1145/3544548.3580943 - image: images/are_you_human.png + image: images/thumbnails/are_you_human.png - id: isbn:1446296504 image: images/rmcn_cover.png diff --git a/contact/index.md b/contact/index.md index 22a38015..8301fd48 100644 --- a/contact/index.md +++ b/contact/index.md @@ -8,13 +8,17 @@ nav: # {% include icon.html icon="fa-solid fa-envelope" %}Contact {:.center} -Our lab is part of the [Department of Psychology & Neuroscience](https://www.dal.ca/faculty/science/psychology_neuroscience.html), at [Dalhousie University](https://www.dal.ca/). +Our lab is located on the 2nd floor of the [Life Sciences Centre](https://www.dal.ca/campus-maps/building-directory/studley-campus/life-sciences-centre.html), in the [Department of Psychology & Neuroscience](https://www.dal.ca/faculty/science/psychology_neuroscience.html), at [Dalhousie University](https://www.dal.ca/). {:.center} -We are located on the 2nd floor of the [Life Sciences Centre](https://www.dal.ca/campus-maps/building-directory/studley-campus/life-sciences-centre.html). +{% + include figure.html + image="images/LSC.jpeg" + link="https://www.dal.ca/campus-maps/building-directory/studley-campus/life-sciences-centre.html" + width="600px" +%} {:.center} - {% include button.html type="email" @@ -39,7 +43,13 @@ link="https://www.google.com/maps/place/Department+of+Psychology+and+Neuroscienc # {% include icon.html icon="fa-solid fa-envelopes-bulk" %}Mailing Address {:.center} -1355 Oxford St. +6287 Alumni Crescent Halifax, NS B3H 4R2 Canada {:.center} + + + +{:.center} + + diff --git a/images/LSC.jpeg b/images/LSC.jpeg new file mode 100644 index 00000000..eded7498 Binary files /dev/null and b/images/LSC.jpeg differ diff --git a/images/NCIL_EEG1.jpg b/images/NCIL_EEG1.jpg new file mode 100644 index 00000000..5c836399 Binary files /dev/null and b/images/NCIL_EEG1.jpg differ diff --git a/images/NCIL_EEG2.jpg b/images/NCIL_EEG2.jpg new file mode 100644 index 00000000..9136a063 Binary files /dev/null and b/images/NCIL_EEG2.jpg differ diff --git a/images/NCIL_EEG3.jpg b/images/NCIL_EEG3.jpg new file mode 100644 index 00000000..4233ab47 Binary files /dev/null and b/images/NCIL_EEG3.jpg differ diff --git a/images/are_you_human.png b/images/are_you_human.png deleted file mode 100644 index 294915f5..00000000 Binary files a/images/are_you_human.png and /dev/null differ diff --git a/images/bci_pirates.png b/images/bci_pirates.png new file mode 100644 index 00000000..604edfe8 Binary files /dev/null and b/images/bci_pirates.png differ diff --git a/images/thumbnails/Leger_2023.png b/images/thumbnails/Leger_2023.png new file mode 100644 index 00000000..ca371d28 Binary files /dev/null and b/images/thumbnails/Leger_2023.png differ diff --git a/images/thumbnails/are_you_human.png b/images/thumbnails/are_you_human.png new file mode 100644 index 00000000..6a28b586 Binary files /dev/null and b/images/thumbnails/are_you_human.png differ diff --git a/images/thumbnails/two_for_one.png b/images/thumbnails/two_for_one.png new file mode 100644 index 00000000..0bf4be03 Binary files /dev/null and b/images/thumbnails/two_for_one.png differ diff --git a/images/two_for_one.png b/images/two_for_one.png deleted file mode 100644 index 06e7363a..00000000 Binary files a/images/two_for_one.png and /dev/null differ diff --git a/participate/BCI-Participate.md b/participate/BCI-Participate.md new file mode 100644 index 00000000..33cef387 --- /dev/null +++ b/participate/BCI-Participate.md @@ -0,0 +1,21 @@ +--- +title: Brain-Computer Interfaces (BCI) +--- + +# Brain-Computer Interfaces +{:.center} + +We are recruiting adults aged 18 years or older. Please continue reading for more information! + +# What is the Purpose of the Project? +Brain-computer interface (BCI) systems use brain signals to control a computer. Our lab is focused on *visual selection* BCIs, in which users direct their attention to different stimuli on a computer screen, with each stimulus executing a different command. UIsing EEG, we use brain activity to determine which stimulus the user is attending to. Read more about the [brain-computer interface project here.](https://www.ncilab.ca/projects/BCI) + +# Who Are We Looking For? +We are currently recruiting adults aged 18 years or older, with normal or corrected to normal vision, no color-blindness, and no history of neurological disorder (seizures, concussion, stroke). + +# What Does a Participant Do? +Participants will take part in this study in the NCIL lab, located within the Life Science Center at the Dalhousie University Studley campus. This study will take place over a single session, lasting approximately 1-2 hours. The experiment will involve the use of a 32-electrode EEG system. Water soluble electrolyte gel is used to bridge the gap between the scalp and the electrodes. Once the EEG cap and electrodes have been fitted, you will be seated in a room with a monitor which will present multiple stimulus paradigms, some of which may involve flickering. + +_Interested? Want to learn more?_ +Contact us by email (BCIncil@dal.ca) or by phone (902-494-1911). +We look forward to hearing from you! diff --git a/participate/index.md b/participate/index.md index 64e13bea..8b1afe67 100644 --- a/participate/index.md +++ b/participate/index.md @@ -22,6 +22,17 @@ Interested in participating in one of our studies? Click the links below to find tags="" repo="" %} +{% + include card.html + image="images/projects/EEG_setup.jpeg" + link="participate/BCI-Participate" + title="Brain-Computer Interfaces" + subtitle="" + description="" + tooltip="" + tags="" + repo="" +%} {% include card.html image="images/projects/aphasia_couple.jpeg" diff --git a/projects/BCI.md b/projects/BCI.md index 578b6384..86a748c3 100644 --- a/projects/BCI.md +++ b/projects/BCI.md @@ -1,12 +1,26 @@ --- title: Brain-Computer Interfaces --- -# Brain-Computer Interfaces +# Brain-Computer Interfaces for Visual Selection ## Project Rationale -A brain-computer interface (BCI) is an electronic system that converts users’ brain activity into control commands to operate external software or devices (e.g., typing programs, electronic wheelchairs). BCIs receive input directly from task-related brain signals rather than from traditional input methods that require muscular control to operate (e.g., mouse/keyboard, joystick). Most BCIs use visual stimuli to elicit the necessary input signal(s). An array of objects is presented and the user attends to the one that he/she wants to select—the *target*. The objects are highlighted systematically, using stimulation techniques known to elicit predictable signals of interest when the target is highlighted, but not (as strongly) when non-targets are highlighted. Neuroimaging equipment records the user’s brain activity, and machine learning algorithms are trained to classify target and non-target responses based on the presence or absence of the predicted signals. The user’s target is determined to be the object that elicited signal-present responses. The system selects the target object and executes its corresponding control command. In this way, BCIs offer promising assistive technology solutions for people suffering with severe motor dysfunction. Beyond clinical use, applications are being developed for gaming systems, smart home control, and other uses. -In theory, target stimuli will consistently elicit distinct response signals from all users, all the time. In practice, however, this has yet to be the case. No two brains are identical, and no one’s brain activity remains constant under varying circumstances (e.g., fatigue, hunger, mood). As such, signal features (e.g., strength, timing) in response to a given stimulus will vary widely—both between users and within an individual user at different points in time. This variability compromises classification accuracy and, in turn, overall system performance. Consequently, there is no BCI that works extremely well for all users, nor for a single user all the time. +A brain-computer interface (BCI) is an electronic system that converts users’ brain activity into control commands to operate external software or devices (e.g., typing programs, electronic wheelchairs). BCIs offer promising assistive technology solutions for people suffering with severe motor dysfunction. Beyond clinical use, applications are being developed for gaming systems, smart home control, and other uses. + +One class of BCIs use visual stimuli to elicit the necessary input signal(s). A number of pictures (or letters, numbers, etc.) are presented on a screen, and the user attends to the one they want to select. The system then identifies the user’s target based on the brain activity elicited by the target stimulus. An important part of this process is modifying the visual stimuli so that we can determine which one the user is paying attention to. Our project is focused on testing different kinds of visual stimuli, and ways of presenting them, to determine which ones are most effective at eliciting the necessary brain activity. + +One of the biggest challenges with BCIs is that no two brains, or brain responses are identical. So, the best method of stimulation may vary from person to person, and even within the same person at different times. Brain signals are affected by things like tiredness, boredom, and hunger. Consequently, there is no BCI that works extremely well for all users, nor for a single user all the time. Our goal is to optimize the performance of BCIs by maximizing the distinction between the brain activity elicited by the target and non-target stimuli. + +## Project Description + +We are testing different ways of presenting stimuli, and analyzing the brain responses, to determine which approaches work best, or how they can be combined to improve performance. One way of presenting the stimuli is to use the *oddball* paradigm, where each stimulus (picture) is highlighted in some way, one at a time, in a random order. The highlighting can involve making that stimulus bigger, brighter, a different colour, a different object (such as a face) etc.. When the highlight occurs on the attended stimulus, the brain response is different (or at least stronger) than when the highlight occurs on a non-attended stimulus. Another approach is for the different stimuli to flicker at different rates. Flickering stimuli elicit *steady-state visual evoked potentials* (SSVEPs), which are brain responses that oscillate (get stronger and weaker) at the same rate as the flicker frequency. The SSVEP response to the attended stimulus is stronger than to the non-attended stimuli. We are testing different ways of combining these two approaches to see if we can get a stronger response than either one alone. For fun (and because we're in Halifax, a coastal city), we are using pirate-themed stimuli! + +{% + include figure.html + image="images/bci_pirates.png" + caption="An example BCI stimulus, using green pirate faces as highlights" + width="400px" +%} ## Mission @@ -21,17 +35,17 @@ Phase 2: We will then combine two stimulus presentation methods to elicit a hybr Phase 3: Once the performance metrics have been maximized, we aim to integrate this stimulus presentation approach into an *online* BCI system designed to identify users' target objects and execute their corresponding commands in real time. ## Current State of Project and Lab Volunteer Contribution Opportunities -We are finalizing the stimulus protocol for Phase 1, and we will resume collecting data during the Fall 2023 semester. We will conduct several statistical and machine learning analyses on this data during the Winter 2024 semester. In parallel, across both semesters, we will integrate our second stimulation method and begin Phase 2 data collection. Phase 2 analyses and Phase 3 preparation will be conducted thereafter. - +We have finalized the stimulus protocol for Phase 2, and will be piloting the study over summer 2024. We will conduct several statistical and machine learning analyses on this data during the Fall 2024 semester. In parallel we are beginning Phase 3 with the gamification and online implementation of a pirate-based BCI paradigm. -Overall, this is a complex study that intersects neuroscience, computer science, and data science. Involvement opportunities exist at all levels (volunteer, independent study, honours, graduate students) during any phase. Note that while domain knowledge is considered an asset, especially [NESC/PSYO 3505 *Neural Data Science*](https://dalpsychneuro.github.io/NESC_3505/), it may not be required for some roles/tasks. +Overall, this is a complex study that intersects neuroscience, computer science, and data science. Involvement opportunities exist at all levels (independent study, honours, graduate students) during any phase. Note that while domain knowledge is considered an asset, especially [NESC/PSYO 3505 *Neural Data Science*](https://dalpsychneuro.github.io/NESC_3505/), it may not be required for some roles/tasks. ## Funding Sources -Funded by the Nova Scotia Graduate Scholarship (NSGS) and the Dalhousie Medical Research Foundation (DMRF). +Funded by the Natural Sciences and Engineering Research Council of Canada (NSERC). {% include figure.html - image="images/logos/DMRF_Logo.svg" - link="https://dmrf.ca/" - tooltip="DMRF" + image="images/logos/nserc_logo.png" + link="https://www.nserc-crsng.gc.ca/" + tooltip="NSERC" + height='100px' %} diff --git a/publications/index.md b/publications/index.md index 25bde972..dfb75728 100644 --- a/publications/index.md +++ b/publications/index.md @@ -8,7 +8,7 @@ nav: # {% include icon.html icon="fa-solid fa-book" %}NCIL Publications {:.center} -Below are publications authored by Dr. Aaron Newman and NCIL lab members. Please note that these citations are automatically generated, and sometimes there may be repetitions (e.g., with preprints). We will be adding PDFs of those papers we can share over time, but for now please refer to the links to the original sources. If you wish to request a paper directly, you may do so via [ResearchGate](https://www.researchgate.net/profile/Aaron-Newman-2). +Below are publications authored by Dr. Aaron Newman and NCIL lab members. Please note that these citations are automatically generated, and sometimes there may be repetitions (e.g., with preprints). If you wish to request a paper directly, you may do so via [ResearchGate](https://www.researchgate.net/profile/Aaron-Newman-2). {% include section.html %} @@ -16,6 +16,8 @@ Below are publications authored by Dr. Aaron Newman and NCIL lab members. Please {% include citation.html lookup="Research Methods for Cognitive Neuroscience" style="rich" %} +{% include citation.html lookup="doi:10.3389/fdpys.2024.1340383" style="rich" %} + {% include citation.html lookup="A two for one special: EEG hyperscanning using a single-person EEG recording setup" style="rich" %} {% include citation.html lookup="Are You Human? Investigating the Perceptions and Evaluations of Virtual Versus Human Instagram Influencers" style="rich" %}