diff --git a/.github/workflows/staging_fusion-staging.yml b/.github/workflows/staging_fusion-staging.yml index ef800c73..d4f0d358 100644 --- a/.github/workflows/staging_fusion-staging.yml +++ b/.github/workflows/staging_fusion-staging.yml @@ -31,6 +31,8 @@ jobs: echo "NEXT_PUBLIC_FUSION_RELAY_URL=${{ secrets.NEXT_PUBLIC_FUSION_RELAY_URL }}" >> .env.production echo "NEXT_PUBLIC_FUSION_NOSTR_PUBLIC_KEY=${{ secrets.NEXT_PUBLIC_FUSION_NOSTR_PUBLIC_KEY }}" >> .env.production echo "NEXT_PUBLIC_NEUROFUSION_BACKEND_URL=${{ secrets.NEXT_PUBLIC_NEUROFUSION_BACKEND_URL }}" >> .env.production + echo "NEXT_PUBLIC_ANALYSIS_SERVER_URL=${{ secrets.NEXT_PUBLIC_ANALYSIS_SERVER_URL }}" >> .env.production + echo "NEXT_PUBLIC_APP_INSIGHTS_KEY=${{ secrets.NEXT_PUBLIC_APP_INSIGHTS_KEY }}" >> .env.production npm run build --if-present npm run test --if-present diff --git a/README.md b/README.md index 343d6580..d4c3725c 100644 --- a/README.md +++ b/README.md @@ -15,22 +15,21 @@ We build tools to accelerate the adoption of neurotechnology and behavior resear - [x] Resting state (Eyes Closed/Eyes Open) - [x] Stroop Task - [x] Auditory Oddball + - [x] Visual Oddball - [x] Flappy Bird game (Detecting intent & frustration) - [ ] Analysis of collected data https://usefusion.ai/analysis - [x] Chart of steady state frequency power across recordings - - [ ] Periodic vs. aperiodic frequency evaluation using [fooof package](https://fooof-tools.github.io/fooof/) + - [x] Periodic vs. aperiodic frequency evaluation using [fooof package](https://fooof-tools.github.io/fooof/) - [ ] If applicable event related potential analysis - [ ] Running Distributed Studies with people (Quests) https://usefusion.ai/blog/quests - [x] A set of prompts people respond to at intervals on a topic related to you - [x] Connecting Apple Health - steps, sleep, heart-rate - - [ ] Support for cognitive experiments - - [ ] Quest Dashboard - view submissions, analyze and publish results - -- [ ] Design and upload custom EEG experiment protocols to participants' devices + - [x] Quest Dashboard - view submissions, analyze and publish results + - [ ] Design and upload custom EEG experiment protocols to participants' devices - [ ] Connecting Other Sources - [ ] Connect your screentime events & productivity metrics using [ActivityWatch](https://activitywatch.net) diff --git a/analysis_api/.gitignore b/analysis_api/.gitignore index cb83b2bf..0b69782a 100644 --- a/analysis_api/.gitignore +++ b/analysis_api/.gitignore @@ -1,4 +1,5 @@ temp_unzip powerComparisons.png powerDistributions.png -.env \ No newline at end of file +.env +fooof_outputs \ No newline at end of file diff --git a/analysis_api/app.py b/analysis_api/app.py index 1c8e6e44..a704c7d8 100644 --- a/analysis_api/app.py +++ b/analysis_api/app.py @@ -95,40 +95,117 @@ def process_eeg(): return jsonify({'error': str(e)}), 500 # TODO: endpoint for ERP analysis +@app.route('/api/v1/process_eeg_erp', methods=['POST']) +def process_eeg_erp(): + try: + return jsonify({'response': "works perfect"}), 200 + except Exception as e: + return jsonify({'error': 'error processing', 'message': e}), 500 @app.route('/api/v1/process_eeg_fooof', methods=['POST']) def process_eeg_fooof(): + """ + When a person uploads an EEG file, we need to process it and return the FOOOF results + as images""" try: - # Check if the POST request contains a file with the key 'file' - if 'file' not in request.files: - return jsonify({'error': 'No file part'}), 400 + import matplotlib + matplotlib.use('Agg') + import matplotlib.pyplot as plt + from fooof import FOOOFGroup + import mne + import io + import base64 + import time - if 'fileTimestamp' not in request.form: - return jsonify({'error': 'No file timestamp'}), 400 + # Check if the POST request contains a file with the key 'file' + if 'eegFile' not in request.files: + return jsonify({'error': 'No EEG file submitted for processing'}), 400 - file = request.files['file'] + eegFile = request.files['eegFile'] + samplingFrequency = int(request.form['samplingFrequency']) - fileTimestamp = request.form['fileTimestamp'] + print("eegFile", eegFile) # Check if the file has a filename - if file.filename == '': - return jsonify({'error': 'No selected file'}), 400 + if eegFile.filename == '': + return jsonify({'error': 'No selected EEG file'}), 400 + + if eegFile.filename.endswith('.csv'): + # Read the CSV file into a pandas DataFrame + df = pd.read_csv(eegFile) + df.drop(columns=['index'], inplace=True) + sfreq = samplingFrequency + info = mne.create_info(ch_names=list(df.columns[1:]), sfreq=sfreq, ch_types='eeg') + + # transpose data + df = df.values[:, 1:].T + df *= 1e-6 # convert from uV to V + raw = mne.io.RawArray(df, info) + raw.set_montage('standard_1020') - # Check if the file is a ZIP file - if file.filename.endswith('.zip'): - # Create a temporary directory to store the unzipped files - temp_dir = 'temp_unzip' # +`` "_" + str(int(time.time())) - os.makedirs(temp_dir, exist_ok=True) + events = mne.make_fixed_length_events(raw, duration=5) + epochs = mne.Epochs(raw, events, tmin=0, tmax=0.5, baseline=None) - # Save the ZIP file to the temporary directory - zip_file_path = os.path.join(temp_dir, file.filename) - file.save(zip_file_path) + epochsSpectrum = epochs.compute_psd(fmin=1, fmax=40, method='welch', verbose=False) + + fg = FOOOFGroup(peak_width_limits=[1.0, 8.0], min_peak_height=0.1, peak_threshold=2.) + fg.fit(epochsSpectrum.freqs, epochsSpectrum.average().get_data(), freq_range=[1, 40]) - # Unzip the file - with zipfile.ZipFile(zip_file_path, 'r') as zip_ref: - zip_ref.extractall(temp_dir + "/raw_files") + temp_dir = 'fooof_outputs' + os.makedirs(temp_dir, exist_ok=True) + file_name = f"fooof_results_{int(time.time())}.png" + fg.save_report(file_name, file_path=temp_dir) + + images = [{ + "key": "FOOOF - Group Results", + "value": "data:image/png;base64," + encode_image_to_base64(temp_dir + "/" + file_name) + }] + + # Get the FOOOF results for each channel + ch_names = raw.info['ch_names'] + for i in range(len(ch_names)): + result = fg.get_fooof(i) + + # Print the result + results = result.get_results() + results_str = ( + f" FOOOF - POWER SPECTRUM MODEL\n\n" + f"The model was run on the frequency range {result.freq_range[0]:.2f} - {result.freq_range[1]:.2f} Hz\n" + f"Frequency Resolution is {result.freq_res:.2f} Hz\n\n" + f"Aperiodic Parameters (offset, exponent):\n" + f"{results.aperiodic_params[0]:.4f}, {results.aperiodic_params[1]:.4f}\n\n" + f"{len(results.peak_params)} peaks were found:\n" + ) + for peak in results.peak_params: + results_str += f"CF: {peak[0]:6.2f}, PW: {peak[1]:6.3f}, BW: {peak[2]:6.2f}\n" + results_str += ( + f"\nGoodness of fit metrics:\n" + f"R^2 of model fit is {results.r_squared:.4f}\n" + f"Error of the fit is {results.error:.4f}" + ) + fig, ax = plt.subplots() + + # Plot the result + result.plot(ax=ax, show=False) + + # Save the plot to a BytesIO object + img_buffer = io.BytesIO() + fig.savefig(img_buffer, format='png') + img_buffer.seek(0) + plt.close(fig) + + # Encode the image to base64 + img_str = base64.b64encode(img_buffer.getvalue()).decode() + + # Add the image to the list + images.append({ + "key": f"FOOOF Results for {ch_names[i]}", + "value": f"data:image/png;base64,{img_str}", + "summary": results_str + }) - print("file directory", temp_dir) + return jsonify({"images": images, "summary": "FOOOF Results"}), 200 + except Exception as e: print("error", e) return jsonify({'error': str(e)}), 500 diff --git a/analysis_api/requirements.txt b/analysis_api/requirements.txt index 180902cd..698d1e16 100644 --- a/analysis_api/requirements.txt +++ b/analysis_api/requirements.txt @@ -8,4 +8,5 @@ scipy fooof nltk python-dotenv -requests \ No newline at end of file +requests +mne \ No newline at end of file diff --git a/frontend/next.config.js b/frontend/next.config.js index ef6ad63d..17c1b431 100644 --- a/frontend/next.config.js +++ b/frontend/next.config.js @@ -13,5 +13,7 @@ module.exports = { NEXT_PUBLIC_FUSION_RELAY_URL: process.env.NEXT_PUBLIC_FUSION_RELAY_URL, NEXT_PUBLIC_FUSION_NOSTR_PUBLIC_KEY: process.env.NEXT_PUBLIC_FUSION_NOSTR_PUBLIC_KEY, NEXT_PUBLIC_NEUROFUSION_BACKEND_URL: process.env.NEXT_PUBLIC_NEUROFUSION_BACKEND_URL, + NEXT_PUBLIC_ANALYSIS_SERVER_URL: process.env.NEXT_PUBLIC_ANALYSIS_SERVER_URL, + NEXT_PUBLIC_APP_INSIGHTS_KEY: process.env.NEXT_PUBLIC_APP_INSIGHTS_KEY, }, }; diff --git a/frontend/src/pages/analysis.tsx b/frontend/src/pages/analysis.tsx index 659dc490..7d007702 100644 --- a/frontend/src/pages/analysis.tsx +++ b/frontend/src/pages/analysis.tsx @@ -1,6 +1,6 @@ import { GetServerSideProps, NextPage } from "next"; import { getServerSession } from "next-auth"; -import React, { useState } from "react"; +import React, { useEffect, useState } from "react"; import { authOptions } from "./api/auth/[...nextauth]"; import { DashboardLayout, Meta } from "~/components/layouts"; @@ -10,42 +10,57 @@ import { set } from "zod"; interface ResponseImage { key: string; value: string; + summary: string; } const AnalysisPage: NextPage = () => { const [file, setFile] = useState(null); const [images, setImages] = useState([]); + const [selectedImage, setSelectedImage] = useState(null); const [loading, setLoading] = useState(false); + const [eegFile, setEegFile] = useState(null); + const [stimulusFile, setStimulusFile] = useState(null); - const handleFileChange = (e: React.ChangeEvent) => { - const files = e.target.files; - if (files && files.length > 0) { - setFile(files[0]); + const [samplingFrequency, setSamplingFrequency] = useState(256); + + useEffect(() => { + if (images.length > 0) { + setSelectedImage(images[0]); } - }; + }, [images]); + const handleUpload = async () => { - // TODO: support multiple file uploads for comparison - if (file) { + if (eegFile) { const formData = new FormData(); - formData.append("file", file); + if (eegFile) { + formData.append("eegFile", eegFile); + } + if (stimulusFile) { + formData.append("stimulusFile", stimulusFile); + } // we need to extract from the file url the exact timestamp - const fileMatch = (file as File).name.match(/(\d+)/); + const fileMatch = (eegFile as File).name.match(/(\d+)/); if (!fileMatch || !(fileMatch?.length > 0)) { alert( - "File name does not contain a timestamp. Make sure you're uploading a dataset recorded on the playground." + "EEG file name does not contain a timestamp. Make sure you're uploading a dataset recorded on the playground." ); return; } const fileTimestamp = fileMatch[0]; formData.append("fileTimestamp", fileTimestamp); - console.log(file); + + // add the sampling frequency to the form data + if (samplingFrequency) { + console.log("samplingFrequency", samplingFrequency); + formData.append("samplingFrequency", samplingFrequency.toString()); + } try { setLoading(true); - const response = await fetch(`${process.env["NEXT_PUBLIC_ANALYSIS_SERVER_URL"]}/api/v1/process_eeg`, { + const response = await fetch(`${process.env["NEXT_PUBLIC_ANALYSIS_SERVER_URL"]}/api/v1/process_eeg_fooof`, { method: "POST", body: formData, }); @@ -74,33 +89,140 @@ const AnalysisPage: NextPage = () => {

Analysis

- Upload a zip file from a previous recording completed on the playground + Upload a .csv file of EEG data and get a report on brain activity in that time period.

-
- :{" "} - +
+
+ + {eegFile && ( + Selected EEG file: {eegFile.name} + )} +
+ + {eegFile && ( +
+ + +
+ )} + +
+ + {stimulusFile && ( + + Selected stimulus file: {stimulusFile.name} + + )} +
-
{loading &&

Processing...

} - {images.length > 0 && - images.map((image) => ( -
-

{image.key}

- {image.key} -
- ))} + {images.length > 0 && ( +
+ + {selectedImage && ( +
+

{selectedImage.key}

+

") }}>

+ {selectedImage.key} +
+ )} +
+ )} ); };