Skip to content

A few small bug fixes #54

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 11 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
595 changes: 595 additions & 0 deletions scripts/fafb_missing_some_synapses.ipynb

Large diffs are not rendered by default.

192 changes: 192 additions & 0 deletions tests/test_local/test_data_loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
"""


from vnc_networks.params import SelectionDict


class TestDataLoading:
"""
Test the data loading functions.
Expand Down Expand Up @@ -58,3 +61,192 @@ def test_connections_instantiation_MANCv1_2(self):

df_2 = df[(df["start_bid"] == 10725) & (df["end_bid"] == 10439)]
assert df_2["eff_weight"].values[0] == -1080, "Incorrect nt_type handling"

def test_connections_getting_neuron_ids_MANCv1_2(self):
"""
Test that we get the same results if we get uids or bodyids and convert between the two.
"""
import vnc_networks
from vnc_networks.connections import Connections

# Instantiate a Connections object
connections = Connections(vnc_networks.connectome_reader.MANC("v1.2"))

# test a few different selection_dicts
selection_dicts: list[SelectionDict | None] = [
None,
{},
{"class_1": "descending"},
{"class_1": "ascending", "nt_type": "GABA"},
]
for selection_dict in selection_dicts:
body_ids = connections.get_neuron_bodyids(selection_dict)
uids = connections.get_neuron_ids(selection_dict)
assert (
set(connections.get_uids_from_bodyids(body_ids)) == set(uids)
), f"Getting bodyids and converting to uids doesn't match with selection_dict {selection_dict}."
assert (
set(connections.get_bodyids_from_uids(uids)) == set(body_ids)
), f"Getting uids and converting to bodyids doesn't match with selection_dict {selection_dict}."

def test_getting_counts_by_neuropil_MANCv1_2(self):
"""
Test that we can get neuron and synapse counts by neuropil
"""
import pandas as pd

import vnc_networks

# Instantiate a Connections object
connectome_reader = vnc_networks.connectome_reader.MANC("v1.2")

# check that this matches what we expect
pd.testing.assert_frame_equal(
connectome_reader.get_synapse_counts_by_neuropil(
"downstream", [10000, 23458]
),
pd.DataFrame(
{
"body_id": [10000, 23458],
"CV": [703, 0],
"IntTct": [313, 0],
"LTct": [3181, 0],
"LegNp(T3)(R)": [0, 1688],
}
),
)
pd.testing.assert_frame_equal(
connectome_reader.get_synapse_counts_by_neuropil(
"upstream", [10000, 23458]
),
pd.DataFrame(
{
"body_id": [10000, 23458],
"CV": [224, 0],
"IntTct": [185, 0],
"LTct": [1462, 0],
"LegNp(T3)(R)": [0, 685],
}
),
)
pd.testing.assert_frame_equal(
connectome_reader.get_synapse_counts_by_neuropil("pre", [10000, 23458]),
pd.DataFrame(
{
"body_id": [10000, 23458],
"CV": [138, 0],
"IntTct": [73, 0],
"LTct": [752, 0],
"LegNp(T3)(R)": [0, 207],
}
),
)
pd.testing.assert_frame_equal(
connectome_reader.get_synapse_counts_by_neuropil("post", [10000, 23458]),
pd.DataFrame(
{
"body_id": [10000, 23458],
"CV": [224, 0],
"IntTct": [185, 0],
"LTct": [1462, 0],
"LegNp(T3)(R)": [0, 685],
}
),
)
pd.testing.assert_frame_equal(
connectome_reader.get_synapse_counts_by_neuropil(
"total_synapses", [10000, 23458]
),
pd.DataFrame(
{
"body_id": [10000, 23458],
"CV": [927, 0],
"IntTct": [498, 0],
"LTct": [4643, 0],
"LegNp(T3)(R)": [0, 2373],
}
),
)

def test_getting_counts_by_neuropil_FAFBv783(self):
"""
Test that we can get neuron and synapse counts by neuropil
"""
import pandas as pd

import vnc_networks

# Instantiate a Connections object
connectome_reader = vnc_networks.connectome_reader.FAFB_v783()

# check that this matches what we expect
pd.testing.assert_frame_equal(
connectome_reader.get_synapse_counts_by_neuropil(
"downstream", [720575940627036426, 720575940633587552]
),
pd.DataFrame(
{
"body_id": [720575940627036426, 720575940633587552],
"LOP_L": [9, 0],
"LO_L": [2, 0],
"SLP_R": [0, 2],
"SMP_R": [0, 31],
}
),
)
pd.testing.assert_frame_equal(
connectome_reader.get_synapse_counts_by_neuropil(
"upstream", [720575940627036426, 720575940633587552]
),
pd.DataFrame(
{
"body_id": [720575940627036426, 720575940633587552],
"LOP_L": [3, 0],
"LO_L": [9, 0],
"SLP_R": [0, 11],
"SMP_R": [0, 13],
}
),
)
pd.testing.assert_frame_equal(
connectome_reader.get_synapse_counts_by_neuropil(
"pre", [720575940627036426, 720575940633587552]
),
pd.DataFrame(
{
"body_id": [720575940627036426, 720575940633587552],
"LOP_L": [14, 0],
"LO_L": [83, 0],
"SLP_R": [0, 33],
"SMP_R": [0, 75],
}
),
)
pd.testing.assert_frame_equal(
connectome_reader.get_synapse_counts_by_neuropil(
"post", [720575940627036426, 720575940633587552]
),
pd.DataFrame(
{
"body_id": [720575940627036426, 720575940633587552],
"LOP_L": [100, 0],
"LO_L": [8, 0],
"SLP_R": [0, 4],
"SMP_R": [0, 284],
}
),
)
pd.testing.assert_frame_equal(
connectome_reader.get_synapse_counts_by_neuropil(
"total_synapses", [720575940627036426, 720575940633587552]
),
pd.DataFrame(
{
"body_id": [720575940627036426, 720575940633587552],
"LOP_L": [114, 0],
"LO_L": [91, 0],
"SLP_R": [0, 37],
"SMP_R": [0, 359],
}
),
)
5 changes: 3 additions & 2 deletions vnc_networks/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -977,8 +977,9 @@ def get_neuron_ids(
Get the neuron IDs from the nodes dataframe as loaded in the initial
dataset, based on a selection dictionary.
"""
nodes = self.get_nodes(type="body_id")
body_ids = self.CR.get_neuron_bodyids(selection_dict, nodes)
body_ids = self.get_nodes(type="body_id")
if selection_dict is not None:
body_ids = self.CR.get_neuron_bodyids(selection_dict, body_ids)
return self.__get_uids_from_bodyids(body_ids)

def get_neurons_pre(self):
Expand Down
Loading
Loading