Skip to content

neuprint

Fetch neuron meshes as navis.MeshNeuron.

Requires additional packages depending on the mesh source.

For DVID you need dvid-tools:

``` shell
pip3 install dvidtools
```

For everything else you need cloudvolume:

``` shell
pip3 install cloud-volume
```
PARAMETER DESCRIPTION
x
        Body ID(s). Multiple IDs can be provided as list-like or
        DataFrame with "bodyId" or "bodyid" column.

TYPE: str | int | list-like | pandas.DataFrame | SegmentCriteria

lod
        Level of detail. Higher `lod` = coarser. Ignored if mesh
        source does not support LODs (e.g. for DVID).

TYPE: int DEFAULT: 1

with_synapses
        If True will download and attach synapses as `.connectors`.

TYPE: bool DEFAULT: False

missing_mesh
        What to do if no mesh is found for a given body ID:

            "raise" (default) will raise an exception
            "warn" will throw a warning but continue
            "skip" will skip without any message

TYPE: 'raise' | 'warn' | 'skip' DEFAULT: 'raise'

parallel
        If True, will use parallel threads to fetch data.

TYPE: bool DEFAULT: True

max_threads
        Max number of parallel threads to use.

TYPE: int DEFAULT: 5

seg_source
        Use this to override the segmentation source specified by
        neuPrint.

TYPE: str | cloudvolume.CloudVolume DEFAULT: None

client
        If `None` will try using global client.

TYPE: neuprint.Client DEFAULT: None

**kwargs
        Will be passed to `cloudvolume.CloudVolume`.

DEFAULT: {}

RETURNS DESCRIPTION
navis.Neuronlist

Containing navis.MeshNeuron. Note that meshes are resized to raw voxel size to match other spatial data from neuprint (synapses, skeletons, etc).

Source code in navis/interfaces/neuprint.py
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
@inject_client
def fetch_mesh_neuron(x, *, lod=1, with_synapses=False, missing_mesh='raise',
                      parallel=True, max_threads=5, seg_source=None,
                      client=None, **kwargs):
    """Fetch neuron meshes as navis.MeshNeuron.

    Requires additional packages depending on the mesh source.

    For DVID you need [`dvid-tools`](https://github.com/flyconnectome/dvid_tools):

        ``` shell
        pip3 install dvidtools
        ```

    For everything else you need [cloudvolume](https://github.com/seung-lab/cloud-volume):

        ``` shell
        pip3 install cloud-volume
        ```


    Parameters
    ----------
    x :             str | int | list-like | pandas.DataFrame | SegmentCriteria
                    Body ID(s). Multiple IDs can be provided as list-like or
                    DataFrame with "bodyId" or "bodyid" column.
    lod :           int
                    Level of detail. Higher `lod` = coarser. Ignored if mesh
                    source does not support LODs (e.g. for DVID).
    with_synapses : bool, optional
                    If True will download and attach synapses as `.connectors`.
    missing_mesh :  'raise' | 'warn' | 'skip'
                    What to do if no mesh is found for a given body ID:

                        "raise" (default) will raise an exception
                        "warn" will throw a warning but continue
                        "skip" will skip without any message

    parallel :      bool
                    If True, will use parallel threads to fetch data.
    max_threads :   int
                    Max number of parallel threads to use.
    seg_source :    str | cloudvolume.CloudVolume, optional
                    Use this to override the segmentation source specified by
                    neuPrint.
    client :        neuprint.Client, optional
                    If `None` will try using global client.
    **kwargs
                    Will be passed to `cloudvolume.CloudVolume`.

    Returns
    -------
    navis.Neuronlist
                    Containing [`navis.MeshNeuron`][]. Note that meshes are
                    resized to raw voxel size to match other spatial data from
                    neuprint (synapses, skeletons, etc).

    """
    if isinstance(x, pd.DataFrame):
        if 'bodyId' in x.columns:
            x = x['bodyId'].values
        elif 'bodyid' in x.columns:
            x = x['bodyid'].values
        else:
            raise ValueError('DataFrame must have "bodyId" column.')

    # Extract source
    if not seg_source:
        seg_source = get_seg_source(client=client)

    if not seg_source:
        raise ValueError('Segmentation source could not be automatically '
                         'determined. Please provide via `seg_source`.')

    if isinstance(seg_source, str) and seg_source.startswith('dvid'):
        try:
            import dvid as dv
        except ModuleNotFoundError:
            raise ModuleNotFoundError(
                'This looks like a DVID mesh source. For this we '
                'need the `dvid-tools` library:\n'
                '  pip3 install dvidtools -U')
        o = urlparse(seg_source.replace('dvid://', ''))
        server = f'{o.scheme}://{o.netloc}'
        node = o.path.split('/')[1]

        if lod is not None:
                logger.warning(
                    'This dataset does not support LODs. '
                    'Will ignore the `lod` argument. '
                    'You can silence this warning by setting `lod=None`.')
                lod = None
    else:
        try:
            from cloudvolume import CloudVolume
        except ModuleNotFoundError:
            raise ModuleNotFoundError(
                "You need to install the `cloudvolume` library"
                "to fetch meshes from this mesh source:\n"
                "  pip3 install cloud-volume -U")
        # Initialize volume
        if isinstance(seg_source, CloudVolume):
            vol = seg_source
        else:
            defaults = dict(use_https=True, progress=False)
            defaults.update(kwargs)
            vol = CloudVolume(seg_source, **defaults)

            # Check if vol.mesh.get has a lod argument
            if lod is not None and 'lod' not in vol.mesh.get.__code__.co_varnames:
                logger.warning(
                    'This dataset does not have multi-resolution meshes and '
                    'the `lod` parameter will be ignored. '
                    'You can silence this warning by setting `lod=None`.')
                lod = None

    if isinstance(x, NeuronCriteria):
        query = x
        wanted_ids = None
    else:
        query = NeuronCriteria(bodyId=x, client=client)
        wanted_ids = utils.make_iterable(x)

    # Fetch names, etc
    meta, roi_info = fetch_neurons(query, client=client)

    if meta.empty:
        raise ValueError('No neurons matching the given criteria found!')
    elif not isinstance(wanted_ids, type(None)):
        miss = wanted_ids[~np.isin(wanted_ids, meta.bodyId.values)]
        if len(miss):
            logger.warning(f'Skipping {len(miss)} body IDs that were not found: '
                           f'{", ".join(miss.astype(str))}')

    # Make sure there is a somaLocation and somaRadius column
    if 'somaLocation' not in meta.columns:
        meta['somaLocation'] = None
    if 'somaRadius' not in meta.columns:
        meta['somaRadius'] = None

    if isinstance(seg_source, str) and seg_source.startswith('dvid'):
        # Fetch the meshes
        nl = dv.get_meshes(meta.bodyId.values,
                           on_error=missing_mesh,
                           output='navis',
                           progress=meta.shape[0] > 1 and not config.pbar_hide,
                           max_threads=1 if not parallel else max_threads,
                           server=server,
                           node=node)
    else:
        nl = []
        with ThreadPoolExecutor(max_workers=1 if not parallel else max_threads) as executor:
            futures = {}
            for r in meta.itertuples():
                f = executor.submit(__fetch_mesh,
                                    r.bodyId,
                                    vol=vol,
                                    lod=lod,
                                    missing_mesh=missing_mesh)
                futures[f] = r.bodyId

            with config.tqdm(desc='Fetching',
                             total=len(futures),
                             leave=config.pbar_leave,
                             disable=meta.shape[0] == 1 or config.pbar_hide) as pbar:
                for f in as_completed(futures):
                    bodyId = futures[f]
                    pbar.update(1)
                    try:
                        nl.append(f.result())
                    except Exception as exc:
                        print(f'{bodyId} generated an exception:', exc)

    nl = NeuronList(nl)

    # Add meta data
    instances = meta.set_index('bodyId').instance.to_dict()
    sizes = meta.set_index('bodyId')['size'].to_dict()
    status = meta.set_index('bodyId').status.to_dict()
    statuslabel = meta.set_index('bodyId').statusLabel.to_dict()
    somalocs = meta.set_index('bodyId').somaLocation.to_dict()
    radii = meta.set_index('bodyId').somaRadius.to_dict()

    for n in nl:
        n.name = instances[n.id]
        n.status = status[n.id]
        n.statusLabel = statuslabel[n.id]
        n.n_voxels = sizes[n.id]
        n.somaLocation = somalocs[n.id]

        # Meshes come out in units (e.g. nanometers) but most other data (synapses,
        # skeletons, etc) come out in voxels, we will therefore scale meshes to voxels
        n.vertices /= np.array(client.meta['voxelSize']).reshape(1, 3)
        n.units=f'{client.meta["voxelSize"][0]} {client.meta["voxelUnits"]}'

        if n.somaLocation:
            if radii[n.id]:
                n.soma_radius = radii[n.id] / n.units.to('nm').magnitude
            else:
                n.soma_radius = None
            n.soma_pos = n.somaLocation

    if with_synapses:
        # Fetch synapses
        syn = fetch_synapses(meta.bodyId.values,
                             synapse_criteria=SynapseCriteria(primary_only=True, client=client),
                             client=client)

        for n in nl:
            this_syn = syn[syn.bodyId == n.id]
            if not this_syn.empty:
                # Keep only relevant columns
                n.connectors = syn[['type', 'x', 'y', 'z', 'roi', 'confidence']]

    # Make an effort to retain the original order
    if not isinstance(x, NeuronCriteria) and not nl.empty:
        nl = nl.idx[np.asarray(x)[np.isin(x, nl.id)]]

    return nl

Fetch given ROI.

PARAMETER DESCRIPTION
roi
        Name of an ROI.

TYPE: str

client
        If `None` will try using global client.

TYPE: neuprint.Client DEFAULT: None

RETURNS DESCRIPTION
navis.Volume
Source code in navis/interfaces/neuprint.py
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
@inject_client
def fetch_roi(roi, *, client=None):
    """Fetch given ROI.

    Parameters
    ----------
    roi :           str
                    Name of an ROI.
    client :        neuprint.Client, optional
                    If `None` will try using global client.

    Returns
    -------
    navis.Volume

    """
    if not isinstance(roi, str):
        raise TypeError(f'Expect ROI name as string, got "{type(roi)}"')

    # Fetch data
    data = client.fetch_roi_mesh(roi, export_path=None)

    # Turn into file-like object
    f = io.StringIO(data.decode())

    # Parse with trimesh
    ob = trimesh.load_mesh(f, file_type='obj')

    return Volume.from_object(ob, name=roi)

Fetch neuron skeletons as navis.TreeNeurons.

Notes

Synapses will be attached to the closest node in the skeleton.

PARAMETER DESCRIPTION
x
        Body ID(s). Multiple Ids can be provided as list-like or
        DataFrame with "bodyId"  or "bodyid" column.

TYPE: str | int | list-like | pandas.DataFrame | SegmentCriteria

with_synapses
        If True will also attach synapses as `.connectors`.

TYPE: bool DEFAULT: False

heal
        If True, will automatically heal fragmented skeletons using
        neuprint-python's `heal_skeleton` function. Pass a float
        or an int to limit the max distance at which nodes are
        allowed to be re-connected (requires neuprint-python >= 0.4.11).

TYPE: bool | int | float DEFAULT: False

missing_swc
        What to do if no skeleton is found for a given body ID:

          - "raise" (default) will raise an exception
          - "warn" will throw a warning but continue
          - "skip" will skip without any message

TYPE: 'raise' | 'warn' | 'skip' DEFAULT: 'raise'

parallel
        If True, will use parallel threads to fetch data.

TYPE: bool DEFAULT: True

max_threads
        Max number of parallel threads to use.

TYPE: int DEFAULT: 5

client
        If `None` will try using global client.

TYPE: neuprint.Client DEFAULT: None

RETURNS DESCRIPTION
navis.Neuronlist
Source code in navis/interfaces/neuprint.py
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
@inject_client
def fetch_skeletons(x, *, with_synapses=False, heal=False, missing_swc='raise',
                    parallel=True, max_threads=5, client=None):
    """Fetch neuron skeletons as navis.TreeNeurons.

    Notes
    -----
    Synapses will be attached to the closest node in the skeleton.

    Parameters
    ----------
    x :             str | int | list-like | pandas.DataFrame | SegmentCriteria
                    Body ID(s). Multiple Ids can be provided as list-like or
                    DataFrame with "bodyId"  or "bodyid" column.
    with_synapses : bool, optional
                    If True will also attach synapses as `.connectors`.
    heal :          bool | int | float, optional
                    If True, will automatically heal fragmented skeletons using
                    neuprint-python's `heal_skeleton` function. Pass a float
                    or an int to limit the max distance at which nodes are
                    allowed to be re-connected (requires neuprint-python >= 0.4.11).
    missing_swc :   'raise' | 'warn' | 'skip'
                    What to do if no skeleton is found for a given body ID:

                      - "raise" (default) will raise an exception
                      - "warn" will throw a warning but continue
                      - "skip" will skip without any message

    parallel :      bool
                    If True, will use parallel threads to fetch data.
    max_threads :   int
                    Max number of parallel threads to use.
    client :        neuprint.Client, optional
                    If `None` will try using global client.

    Returns
    -------
    navis.Neuronlist

    """
    if isinstance(x, pd.DataFrame):
        if 'bodyId' in x.columns:
            x = x['bodyId'].values
        elif 'bodyid' in x.columns:
            x = x['bodyid'].values
        else:
            raise ValueError('DataFrame must have "bodyId" column.')

    if isinstance(x, NeuronCriteria):
        query = x
        wanted_ids = None
    else:
        query = NeuronCriteria(bodyId=x, client=client)
        wanted_ids = utils.make_iterable(x)

    # Fetch names, etc
    meta, roi_info = fetch_neurons(query, client=client)

    if meta.empty:
        raise ValueError('No neurons matching the given criteria found!')
    elif not isinstance(wanted_ids, type(None)):
        miss = wanted_ids[~np.isin(wanted_ids, meta.bodyId.values)]
        if len(miss):
            logger.warning(f'Skipping {len(miss)} body IDs that were not found: '
                           f'{", ".join(miss.astype(str))}')

    # Make sure there is a somaLocation and somaRadius column
    if 'somaLocation' not in meta.columns:
        meta['somaLocation'] = None
    if 'somaRadius' not in meta.columns:
        meta['somaRadius'] = None

    nl = []
    with ThreadPoolExecutor(max_workers=1 if not parallel else max_threads) as executor:
        futures = {}
        for r in meta.itertuples():
            f = executor.submit(__fetch_skeleton,
                                r,
                                client=client,
                                with_synapses=with_synapses,
                                missing_swc=missing_swc,
                                heal=heal)
            futures[f] = r.bodyId

        with config.tqdm(desc='Fetching',
                         total=meta.shape[0],
                         leave=config.pbar_leave,
                         disable=meta.shape[0] == 1 or config.pbar_hide) as pbar:
            for f in as_completed(futures):
                bodyId = futures[f]
                pbar.update(1)
                try:
                    nl.append(f.result())
                except Exception as exc:
                    print(f'{bodyId} generated an exception:', exc)

    nl = NeuronList(nl)

    # Make an effort to retain the original order
    if not isinstance(x, NeuronCriteria) and not nl.empty:
        nl = nl.idx[np.asarray(x)[np.isin(x, nl.id)]]

    return nl

Get segmentation source for given client+dataset.

Source code in navis/interfaces/neuprint.py
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
@inject_client
def get_seg_source(*, client=None):
    """Get segmentation source for given client+dataset."""
    # First try to fetch the scene for the neuroglancer
    url = f'{client.server}/api/npexplorer/nglayers/{client.dataset}.json'

    r = client.session.get(url)
    try:
        r.raise_for_status()
        scene = r.json()
        segs = [s for s in scene['layers'] if s.get('type') == 'segmentation']
    except BaseException:
        segs = []

    # If we didn't find a `dataset.json`, will check the client's meta data for a seg source
    if not segs:
        segs = [s for s in client.meta['neuroglancerMeta'] if s.get('dataType') == 'segmentation']

    if not len(segs):
        return None

    # Check if any segmentation source matches our dataset exactly
    named_segs = [s for s in segs if s.get('name') == client.dataset]
    if len(named_segs):
        segs = named_segs

    # If there are multiple segmentation layers, select the first entry
    seg_source = segs[0]['source']

    # If there are multiple segmentation sources for
    # the layer we picked, select the first source.
    if isinstance(seg_source, list):
        seg_source = seg_source[0]

    # If it's a dict like {'source': url, 'subsources'...},
    # select the url.
    if isinstance(seg_source, dict):
        seg_source = seg_source['url']

    if not isinstance(seg_source, str):
        e = f"Could not understand segmentation source: {seg_source}"
        raise RuntimeError(e)

    if len(segs) > 1:
        logger.warning(f'{len(segs)} segmentation sources found. Using the '
                       f'first entry: "{seg_source}"')

    return seg_source

Remove hairball around soma.

PARAMETER DESCRIPTION
x

TYPE: core.TreeNeuron

radius
    Radius around the soma to check for hairball

TYPE: float DEFAULT: 500

RETURNS DESCRIPTION
TreeNeuron

If inplace=False.

Source code in navis/interfaces/neuprint.py
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
def remove_soma_hairball(x: 'core.TreeNeuron',
                         radius: float = 500,
                         inplace: bool = False):
    """Remove hairball around soma.

    Parameters
    ----------
    x :         core.TreeNeuron
    radius :    float
                Radius around the soma to check for hairball

    Returns
    -------
    TreeNeuron
                If inplace=False.
    """
    if not inplace:
        x = x.copy()
    if not x.soma:
        if not inplace:
            return x
        return
    # Get all nodes within given radius of soma nodes
    soma_loc = x.nodes.set_index('node_id').loc[[x.soma],
                                                ['x', 'y', 'z']].values
    tree = neuron2KDTree(x)
    dist, ix = tree.query(soma_loc, k=x.n_nodes, distance_upper_bound=radius)

    # Subset to nodes within range
    to_check = set(list(ix[0, dist[0, :] <= radius]))

    # Get the segments that have nodes in the soma
    segs = [seg for seg in x.segments if set(seg) & to_check]

    # Unless these segments end in a root node, we will keep the last node
    # (which will be a branch point)
    segs = [s[:-1] if s[-1] not in x.root else s for s in segs]

    # This is already sorted by length -> we will keep the first (i.e. longest)
    # segment and remove the rest
    to_remove = [n for s in segs[1:] for n in s]

    to_keep = x.nodes.loc[~x.nodes.node_id.isin(to_remove), 'node_id'].values

    # Move soma if required
    if x.soma in to_remove:
        x.soma = list(to_check & set(to_keep))[0]

    subset_neuron(x, to_keep, inplace=True)

    if not inplace:
        return x