diff --git a/src/spyglass/common/__init__.py b/src/spyglass/common/__init__.py index 77e12d18e..4f5eb627e 100644 --- a/src/spyglass/common/__init__.py +++ b/src/spyglass/common/__init__.py @@ -52,7 +52,7 @@ PositionVideo, TrackGraph, ) -from spyglass.common.common_region import BrainRegion +from spyglass.common.common_region import BrainCoordinateSystem, BrainRegion from spyglass.common.common_sensors import SensorData from spyglass.common.common_session import Session from spyglass.common.common_subject import Subject diff --git a/src/spyglass/common/common_region.py b/src/spyglass/common/common_region.py index 9cd3f031e..17668bfce 100644 --- a/src/spyglass/common/common_region.py +++ b/src/spyglass/common/common_region.py @@ -1,6 +1,6 @@ import datajoint as dj -from spyglass.utils.dj_mixin import SpyglassMixin +from spyglass.utils.dj_mixin import SpyglassMixin, logger schema = dj.schema("common_region") @@ -10,9 +10,13 @@ class BrainRegion(SpyglassMixin, dj.Lookup): definition = """ region_id: smallint auto_increment --- - region_name: varchar(200) # the name of the brain region - subregion_name=NULL: varchar(200) # subregion name - subsubregion_name=NULL: varchar(200) # subregion within subregion + region_name: varchar(200) # Name of the region (e.g., 'Hippocampal formation') + region_abbr=NULL: varchar(64) # Standard abbreviation (e.g., 'HPF') + subregion_name=NULL: varchar(200) # Subregion name (e.g., 'Cornu Ammonis 1') + subregion_abbr=NULL: varchar(64) # Subregion abbreviation (e.g., 'CA1') + subsubregion_name=NULL: varchar(200) # Sub-subregion name (e.g., 'stratum pyramidale') + subsubregion_abbr=NULL: varchar(64) # Sub-subregion abbreviation (e.g., 'sp') + atlas_source=NULL: varchar(128) # Source atlas (e.g., 'Allen CCF v3', 'Paxinos Rat 6th Ed') """ @classmethod @@ -48,6 +52,49 @@ def fetch_add( ) query = BrainRegion & key if not query: + logger.info( + f"Brain region '{region_name}' not found. Adding to BrainRegion. " + "Please make sure to check the spelling and format." + "Remove any extra spaces or special characters." + ) cls.insert1(key) query = BrainRegion & key return query.fetch1("region_id") + + +@schema +class BrainCoordinateSystem(dj.Lookup): + definition = """ + # Defines standard coordinate systems used for brain spatial data. + coordinate_system_id: varchar(64) # Primary key (e.g., 'Allen_CCFv3_RAS_um') + --- + description: varchar(255) # Description of the coordinate system + atlas_source=NULL: varchar(128) # Source if based on an atlas (e.g., 'Allen CCF v3', 'WHS Rat v4') + """ + contents = [ + [ + "Allen_CCFv3_RAS_um", + "Allen CCF v3 Mouse Atlas, RAS orientation, micrometers", + "Allen CCF v3", + ], + [ + "Paxinos_Rat_6th_PIR_um", + "Paxinos & Watson Rat Atlas 6th Ed, PIR orientation, micrometers", + "Paxinos Rat 6th Ed", + ], + [ + "WHS_Rat_v4_RAS_um", + "Waxholm Space Sprague Dawley Rat Atlas v4, RAS orientation, micrometers", + "WHS Rat v4", + ], + [ + "Histology_Image_Pixels", + "2D Pixels from processed histology image (Origin/Orientation Varies)", + None, + ], + [ + "MicroCT_Voxel_Scan", + "3D Voxel space from raw microCT scan (Orientation relative to scanner)", + None, + ], + ] diff --git a/src/spyglass/electrode_localization/__init__.py b/src/spyglass/electrode_localization/__init__.py new file mode 100644 index 000000000..42e2dedce --- /dev/null +++ b/src/spyglass/electrode_localization/__init__.py @@ -0,0 +1,5 @@ +from spyglass.electrode_localization.localization_merge import ( + ChannelBrainLocation, + ChannelBrainLocationHistologyV1, + ChannelBrainLocationMicroCTV1, +) diff --git a/src/spyglass/electrode_localization/localization_merge.py b/src/spyglass/electrode_localization/localization_merge.py new file mode 100644 index 000000000..e6fe39fce --- /dev/null +++ b/src/spyglass/electrode_localization/localization_merge.py @@ -0,0 +1,36 @@ +"""Merge of electrode channel locations from histology and microCT pipelines.""" + +import datajoint as dj + +from spyglass.electrode_localization.v1.histology import ( # noqa: F401 + ChannelBrainLocationHistologyV1, +) +from spyglass.electrode_localization.v1.micro_ct import ( # noqa: F401 + ChannelBrainLocationMicroCTV1, +) +from spyglass.utils import SpyglassMixin +from spyglass.utils.dj_merge_tables import _Merge + +schema = dj.schema("electrode_localization_v1") + + +@schema +class ChannelBrainLocation(_Merge, SpyglassMixin): + """Merge of electrode channel locations from histology and microCT pipelines. + + The master table lists each (subject, probe, channel), with parts: + - HistologyV1: ground‐truth coordinates from manual histology alignment. + - MicroCTV1: coregistered coordinates from the microCT volume. + """ + + class HistologyV1(SpyglassMixin, dj.Part): + definition = """ + -> master + -> ChannelBrainLocationHistologyV1 + """ + + class MicroCTV1(SpyglassMixin, dj.Part): + definition = """ + -> master + -> ChannelBrainLocationMicroCTV1 + """ diff --git a/src/spyglass/electrode_localization/v1/__init__.py b/src/spyglass/electrode_localization/v1/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/spyglass/electrode_localization/v1/histology.py b/src/spyglass/electrode_localization/v1/histology.py new file mode 100644 index 000000000..5b2d98de9 --- /dev/null +++ b/src/spyglass/electrode_localization/v1/histology.py @@ -0,0 +1,32 @@ +"""Histology-derived coordinates and region assignment for an electrode""" + +import datajoint as dj + +from spyglass.common import ( + BrainCoordinateSystem, + BrainRegion, + Electrode, +) # noqa: F401 +from spyglass.histology.v1.histology import ( # noqa: F401 + HistologyImages, + HistologyRegistration, +) +from spyglass.utils import SpyglassMixin + +schema = dj.schema("electrode_localization_v1") + + +@schema +class ChannelBrainLocationHistologyV1(SpyglassMixin, dj.Manual): + definition = """ + # Histology-derived coordinates and region assignment for an electrode + -> Electrode # Electrode being localized + -> HistologyImages # Source NWB file link for histology images + -> HistologyRegistration # Alignment parameters used + --- + -> BrainCoordinateSystem # Defines the space for pos_x,y,z (e.g., Allen CCF RAS um) + pos_x: float # (um) coordinate in the specified space + pos_y: float # (um) coordinate in the specified space + pos_z: float # (um) coordinate in the specified space + -> BrainRegion # Assigned brain region + """ diff --git a/src/spyglass/electrode_localization/v1/micro_ct.py b/src/spyglass/electrode_localization/v1/micro_ct.py new file mode 100644 index 000000000..e2f5af1cb --- /dev/null +++ b/src/spyglass/electrode_localization/v1/micro_ct.py @@ -0,0 +1,32 @@ +"""MicroCT-derived coordinates and region assignment for an electrode""" + +import datajoint as dj + +from spyglass.common import ( + BrainCoordinateSystem, + BrainRegion, + Electrode, +) # noqa: F401 +from spyglass.micro_ct.v1.micro_ct import ( # noqa: F401 + MicroCTImages, + MicroCTRegistration, +) +from spyglass.utils import SpyglassMixin + +schema = dj.schema("electrode_localization_v1") + + +@schema +class ChannelBrainLocationMicroCTV1(SpyglassMixin, dj.Manual): + definition = """ + # MicroCT-derived coordinates and region assignment for an electrode + -> Electrode # Electrode being localized + -> MicroCTImages # Source NWB file link for microCT data + -> MicroCTRegistration # Alignment parameters used + --- + -> BrainCoordinateSystem # Defines the space for pos_x,y,z + pos_x: float # (um) coordinate in the specified space + pos_y: float # (um) coordinate in the specified space + pos_z: float # (um) coordinate in the specified space + -> BrainRegion # Assigned brain region + """ diff --git a/src/spyglass/histology/__init__.py b/src/spyglass/histology/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/spyglass/histology/v1/__init__.py b/src/spyglass/histology/v1/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/spyglass/histology/v1/histology.py b/src/spyglass/histology/v1/histology.py new file mode 100644 index 000000000..393906560 --- /dev/null +++ b/src/spyglass/histology/v1/histology.py @@ -0,0 +1,141 @@ +import datajoint as dj + +from spyglass.common import ( # noqa: F401 + AnalysisNwbfile, + BrainCoordinateSystem, + LabMember, + Subject, +) +from spyglass.utils import SpyglassMixin, logger + +schema = dj.schema("histology_v1") + + +@schema +class Histology(SpyglassMixin, dj.Manual): + definition = """ + # Represents a single histology preparation for a subject + -> Subject + histology_id: varchar(32) # User-defined ID (e.g., 'probe_track_run1') + --- + prep_date=NULL: date # Optional: Date of tissue preparation + slice_orientation: enum( # Orientation of sections + "coronal", + "sagittal", + "horizontal", + "other" + ) + slice_thickness: float # (um) Thickness of sections + mounting_medium=NULL: varchar(128) # e.g., 'DPX', 'Fluoromount-G' + experiment_purpose: varchar(1024) # e.g., 'Probe track recovery', 'Anatomical ref' + notes="": varchar(2048) # Optional general notes about the preparation + -> [nullable] LabMember.proj(histology_experimenter='user_name') # Optional: who did the prep? + + # --- Data Source --- + output_format='TIFF stack': varchar(64) # Format of raw image data from scanner + raw_scan_path: varchar(512) # Path to the raw output (e.g., folder containing TIFF stack) + """ + + class HistologyStain(SpyglassMixin, dj.Part): + definition = """ + # Details of specific stains used in a histology preparation + -> Histology + stain_index: tinyint unsigned # Use index for multiple stains (0, 1, 2...) + --- + identified_feature: varchar(128) # Biological target/marker (e.g., 'Nissl Bodies', 'ChR2-tdTomato+', 'ProbeTrack_DiI') + visualization_agent: varchar(128)# Method/molecule making feature visible (e.g., 'Cresyl Violet', 'Native tdTomato', 'DiI', 'Alexa 488') + stain_type: enum( # Type of staining method used + "immunohistochemistry", + "genetic_marker", + "tracer", + "anatomical", + "histochemical", + "in_situ_hybridization", + "other" + ) = "anatomical" + stain_protocol_name=NULL: varchar(128) # Optional: name of the protocol used + antibody_details=NULL: varchar(255) # Optional: specific antibody info (e.g. company, cat#, lot#) + stain_notes="": varchar(1024) # Optional notes about this specific stain (e.g., concentration) + """ + + +@schema +class HistologyImages(SpyglassMixin, dj.Computed): + definition = """ + # Links Histology info to the Analysis NWB file containing the image data + -> Histology + images_id: varchar(32) # User-defined ID for these images (e.g., histology_id) + --- + -> AnalysisNwbfile # Link to the NWB file storing image data + processing_time=CURRENT_TIMESTAMP: timestamp # Timestamp of NWB file creation + # --- Image Acquisition/Processing Details --- + color_to_stain=NULL: blob # Mapping channel colors to stain features (e.g., {'DAPI': 'blue', 'GFAP': 'green'}) + pixel_size_x: float # (um) Pixel size in X dimension after processing/scaling + pixel_size_y: float # (um) Pixel size in Y dimension after processing/scaling + pixel_size_z: float # (um) Pixel size in Z dimension (often slice_thickness or scan step) + objective_magnification: float # Magnification of the objective lens (e.g., 20 for 20x) + image_modality: enum( # Modality of the microscopy + "fluorescence", + "brightfield", + "other" + ) + processing_notes="": varchar(1024) # Notes on image processing applied before/during NWB creation + """ + + # Ensure this key is unique for HistologyImages entries + # key_source = Histology + + def make(self, key: dict) -> None: + """ + Populate HistologyImages table. + This method should: + 1. Find the raw image data using `raw_scan_path` from the `Histology` table. + 2. Process the images as needed (e.g., stitching, scaling). + 3. Create an NWB file containing the processed image stack (e.g., using `pynwb.image.ImageSeries`). + - Store relevant metadata (pixel sizes, objective, modality, etc.) within the NWB file. + 4. Create an `AnalysisNwbfile` entry for the new NWB file. + 5. Insert the key, including the `analysis_file_name` from AnalysisNwbfile, + along with image metadata like `pixel_size_*`, `color_to_stain`, etc., into this table. + """ + logger.info(f"Populating HistologyImages for key: {key}") + # Placeholder: Replace with actual NWB creation and insertion logic + # Example steps (conceptual): + # 1. histology_entry = (Histology & key).fetch1() + # 2. raw_path = histology_entry['raw_scan_path'] + # 3. image_data, metadata = process_histology_images(raw_path) # Your function + # 4. nwb_file_name = f"{key['subject_id']}_{key['histology_id']}_images.nwb" + # 5. nwb_file_path = AnalysisNwbfile().create(nwb_file_name) + # 6. create_histology_nwb(nwb_file_path, image_data, metadata) # Your function + # 7. AnalysisNwbfile().add(key['subject_id'], nwb_file_name) + # 8. self.insert1({ + # **key, + # 'images_id': key['histology_id'], # Or generate a new unique ID if needed + # 'analysis_file_name': nwb_file_name, + # 'pixel_size_x': metadata['pixel_size_x'], + # # ... other metadata fields ... + # }) + pass + + +@schema +class HistologyRegistration(SpyglassMixin, dj.Manual): + definition = """ + # Stores results/params of aligning histology image data to a target coordinate system + -> HistologyImages # Link to the source histology NWB file info + registration_id: varchar(32) # Unique ID for this registration instance/parameters + --- + -> BrainCoordinateSystem # The TARGET coordinate system (e.g., 'allen_ccf_v3_ras_um') + + # --- Registration Parameters --- + registration_method: varchar(128) # Algorithmic approach (e.g. 'affine+bspline', 'manual_landmark') + registration_software: varchar(128) # Software used (e.g. 'ANTs', 'elastix', 'SimpleITK', 'CloudCompare') + registration_software_version: varchar(64) # Software version (e.g. '2.3.5', '5.0.1') + registration_params=NULL: blob # Store detailed parameters (e.g., dict/JSON/YAML content) + + # --- Registration Results --- + transformation_matrix=NULL: blob # Store affine matrix (e.g., 4x4 np.array.tobytes()) + warp_field_path=NULL: varchar(512) # Path to non-linear warp field file (e.g., .nii.gz, .mha) + registration_quality=NULL: float # Optional QC metric (e.g., Dice score, landmark error in um) + registration_time=CURRENT_TIMESTAMP: timestamp # Time this registration entry was created/run + registration_notes="": varchar(2048) # Specific notes about this registration run + """ diff --git a/src/spyglass/micro_ct/__init__.py b/src/spyglass/micro_ct/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/spyglass/micro_ct/v1/__init__.py b/src/spyglass/micro_ct/v1/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/spyglass/micro_ct/v1/micro_ct.py b/src/spyglass/micro_ct/v1/micro_ct.py new file mode 100644 index 000000000..70665bea7 --- /dev/null +++ b/src/spyglass/micro_ct/v1/micro_ct.py @@ -0,0 +1,120 @@ +import datajoint as dj + +from spyglass.common import ( # noqa: F401 + AnalysisNwbfile, + CoordinateSystem, + LabMember, + Subject, +) +from spyglass.utils import SpyglassMixin, logger + +schema = dj.schema("microct_v1") + + +@schema +class MicroCTScan(SpyglassMixin, dj.Manual): + definition = """ + # Metadata for a microCT scan of a subject's brain/tissue + -> Subject + scan_id: varchar(32) # User-defined ID (e.g., 'SubjX_OsO4_Scan1') + --- + # --- Preparation Details --- + stain_reagent='Aqueous 2% OsO4': varchar(128) # Staining reagent details (e.g., 'OsO4', 'I2E') + stain_duration_days=14.0: float # Duration of staining in days + embedding_resin='Durcupan ACM': varchar(128) # Resin used for embedding + prep_protocol_notes="": varchar(2048) # Notes on staining, dehydration, embedding variations + + # --- Scan Details --- + scan_date=NULL: date # Date of the scan itself + scanner_details: varchar(255) # e.g., 'Nikon Metrology X-Tek HMX ST 225 @ HCNS' + source_target_type='Molybdenum': varchar(64) # X-ray source target material + filter_material='None': varchar(64) # Filter material used (e.g., 'None', 'Copper') + filter_thickness_mm=0.0: float # (mm) Filter thickness + voltage_kv: float # (kV) Scan voltage (e.g., 100, 130) + current_ua: float # (uA) Scan current (e.g., 105, 135) + exposure_time_s: float # (s) Exposure time per projection (e.g., 1.0) + num_projections: int # Number of projections acquired (e.g., 3184) + frames_per_projection=1: int # Frames averaged per projection (e.g., 1, 4) + + # --- Reconstruction Details --- + reconstruction_software=NULL: varchar(128) # e.g., 'CT Pro (Nikon)', 'NRecon (Bruker)' + voxel_size_x: float # (um) Reconstructed voxel size X + voxel_size_y: float # (um) Reconstructed voxel size Y + voxel_size_z: float # (um) Reconstructed voxel size Z + output_format='TIFF stack': varchar(64) # Format of raw reconstructed data + + # --- Data Location & Notes --- + raw_scan_path: varchar(512) # Path to raw output (e.g., folder containing TIFF stack) + scan_notes="": varchar(2048) # General notes about the scan itself + -> [nullable] LabMember.proj(scanner_operator='user_name') # Optional: Who operated the scanner? + """ + + +@schema +class MicroCTImages(SpyglassMixin, dj.Computed): + definition = """ + # Links MicroCTScan info to the Analysis NWB file containing the image volume + -> MicroCTScan + images_id: varchar(32) # User-defined ID for these images (e.g., scan_id) + --- + -> AnalysisNwbfile # Link to the NWB file storing image data + processing_time=CURRENT_TIMESTAMP: timestamp # Timestamp of NWB file creation + processing_notes="": varchar(1024) # Notes on image processing applied before/during NWB creation + """ + + # Ensure this key is unique for MicroCTImages entries + # key_source = MicroCTScan + + def make(self, key: dict) -> None: + """ + Populate MicroCTImages table. + This method should: + 1. Find the raw reconstructed image data using `raw_scan_path` from `MicroCTScan`. + 2. Process the images if necessary (e.g., format conversion, cropping). + 3. Create an NWB file containing the image volume (e.g., using `pynwb.image.ImageSeries`). + - Store relevant metadata (voxel sizes from MicroCTScan, etc.) within the NWB file. + 4. Create an `AnalysisNwbfile` entry for the new NWB file. + 5. Insert the key, including the `analysis_file_name` from AnalysisNwbfile, + and any processing notes into this table. + """ + logger.info(f"Populating MicroCTImages for key: {key}") + # Placeholder: Replace with actual NWB creation and insertion logic + # Example steps (conceptual): + # 1. scan_entry = (MicroCTScan & key).fetch1() + # 2. raw_path = scan_entry['raw_scan_path'] + # 3. image_data, metadata = process_microct_images(raw_path, scan_entry) # Your function + # 4. nwb_file_name = f"{key['subject_id']}_{key['scan_id']}_images.nwb" + # 5. nwb_file_path = AnalysisNwbfile().create(nwb_file_name) + # 6. create_microct_nwb(nwb_file_path, image_data, metadata) # Your function + # 7. AnalysisNwbfile().add(key['subject_id'], nwb_file_name) + # 8. self.insert1({ + # **key, + # 'images_id': key['scan_id'], # Or generate a new unique ID if needed + # 'analysis_file_name': nwb_file_name, + # 'processing_notes': '...', + # }) + pass + + +@schema +class MicroCTRegistration(SpyglassMixin, dj.Manual): + definition = """ + # Stores results/params of aligning microCT image data to a target coordinate system + -> MicroCTImages # Link to the source microCT NWB file info + registration_id: varchar(32) # Unique ID for this registration instance/parameters + --- + -> CoordinateSystem # The TARGET coordinate system (e.g., 'allen_ccf_v3_ras_um') + + # --- Registration Parameters --- + registration_method: varchar(128) # Algorithmic approach (e.g. 'affine+bspline', 'manual_landmark') + registration_software: varchar(128) # Software used (e.g. 'ANTs', 'elastix', 'SimpleITK', 'CloudCompare') + registration_software_version: varchar(64) # Software version (e.g. '2.3.5', '5.0.1') + registration_params=NULL: blob # Store detailed parameters (e.g., dict/JSON/YAML content) + + # --- Registration Results --- + transformation_matrix=NULL: blob # Store affine matrix (e.g., 4x4 np.array.tobytes()) + warp_field_path=NULL: varchar(512) # Path to non-linear warp field file (e.g., .nii.gz, .mha) + registration_quality=NULL: float # Optional QC metric (e.g., Dice score, landmark error in um) + registration_time=CURRENT_TIMESTAMP: timestamp # Time this registration entry was created/run + registration_notes="": varchar(2048) # Specific notes about this registration run + """