diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml new file mode 100644 index 000000000..7373affc3 --- /dev/null +++ b/.github/workflows/codespell.yml @@ -0,0 +1,22 @@ +--- +name: Codespell + +on: + push: + branches: [master] + pull_request: + branches: [master] + +permissions: + contents: read + +jobs: + codespell: + name: Check for spelling errors + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Codespell + uses: codespell-project/actions-codespell@v2 diff --git a/doc/conf.py b/doc/conf.py index a3af076f1..98758abf5 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -118,7 +118,7 @@ ("GUI", "gui/index"), ("API", "api"), ("Glossary", "glossary"), - ("Whats new", "whats_new"), + ("What's new", "whats_new"), ("GitHub", "https://github.com/jonescompneurolab/hnn-core", True) ], 'bootswatch_theme': "yeti" diff --git a/doc/gui/tutorial_erp.ipynb b/doc/gui/tutorial_erp.ipynb index 89f1837c6..04359d5ee 100644 --- a/doc/gui/tutorial_erp.ipynb +++ b/doc/gui/tutorial_erp.ipynb @@ -210,7 +210,7 @@ "id": "0030deba", "metadata": {}, "source": [ - "You can also view all cell connectivity paramters, i.e., weight and connectivity probability, in the `Cell connectivity` tab:" + "You can also view all cell connectivity parameters, i.e., weight and connectivity probability, in the `Cell connectivity` tab:" ] }, { @@ -291,7 +291,7 @@ "id": "6c7c3da2", "metadata": {}, "source": [ - "After simulation, you can immediately find simulation results (dipoles) at the right side of the GUI. In the figures below, the thin gray traces are dipole signals from individual trials while the green trace is the average ERP. In the left panel, the aggregated dipole data is plotted while in the right panel layer-specific dipole data are also shwon so you can check the contribution per layer." + "After simulation, you can immediately find simulation results (dipoles) at the right side of the GUI. In the figures below, the thin gray traces are dipole signals from individual trials while the green trace is the average ERP. In the left panel, the aggregated dipole data is plotted while in the right panel layer-specific dipole data are also shown so you can check the contribution per layer." ] }, { diff --git a/doc/roadmap.rst b/doc/roadmap.rst index 38683d740..49f94b3eb 100644 --- a/doc/roadmap.rst +++ b/doc/roadmap.rst @@ -37,7 +37,7 @@ Timeline Overview This roadmap timeline outlines the major short-term and longer-term goals for HNNs. The short term goals will entail a substantial reorganization of the HNN code and creation of an API to facilitate HNN expansions, community contribution, -and integration with other relevant open-source platforms (e.g. MNE-Python, NetPyNE). To this end, in March 2021, we released the first version of the HNN-core repository. HNN-core contains improved versions of HNN’s non-GUI components following best practices in open-source software design, with unit testing and continuous integration, along with initial API and documentation for command-line coding. We will adopt similar best practices to develop a new HNN-GUI and several new HNN features, including the ability to simulate and visualize LFP/CSD and to use improved parameter estimation procedures. Our process will be to develop all new features in HNN-core, with API and examples of use followed, when applicable, by integration into the HNN-GUI with correspoding GUI-based tutorials on our website. Longer-term goals include integration with the related modeling software MNE-Python and NetPyNe, the development of a web-based interface with ability for simultaneous GUI and Command Line Interface (CLI), and extension to multi-area simulations. +and integration with other relevant open-source platforms (e.g. MNE-Python, NetPyNE). To this end, in March 2021, we released the first version of the HNN-core repository. HNN-core contains improved versions of HNN’s non-GUI components following best practices in open-source software design, with unit testing and continuous integration, along with initial API and documentation for command-line coding. We will adopt similar best practices to develop a new HNN-GUI and several new HNN features, including the ability to simulate and visualize LFP/CSD and to use improved parameter estimation procedures. Our process will be to develop all new features in HNN-core, with API and examples of use followed, when applicable, by integration into the HNN-GUI with corresponding GUI-based tutorials on our website. Longer-term goals include integration with the related modeling software MNE-Python and NetPyNe, the development of a web-based interface with ability for simultaneous GUI and Command Line Interface (CLI), and extension to multi-area simulations. Short-Term Goals -------------------------- @@ -54,7 +54,7 @@ This reorganization will entail continued improvements within the HNN-core repos - Following best practices in open-source software design, including continuous integration testing, to develop HNN-core. HNN-core will contain clean and reorganized code, and separate all components that - interact directly with the NEURON simulator (e.g. cell and network intantiation, external drives, etc..), + interact directly with the NEURON simulator (e.g. cell and network instantiation, external drives, etc..), from those that pertain to post-processing data analysis and plotting functions (e.g. spectra lanalysis). **COMPLETED FEB 2021** - Convert installation procedures to PIP. **COMPLETED FEB 2021** @@ -98,7 +98,7 @@ domain over which the predictions will be tested is local field potential (LFP) across the cortical layers and the associated current source density (CSD) profiles. We will develop a method to simulate and visualize LFP/CSD across the cortical layers and to statistically compare model simulations to recorded data. These components will -be developed in HNN-core, with correponding API and examples of use, followed by integration +be developed in HNN-core, with corresponding API and examples of use, followed by integration into the HNN-GUI, with corresponding GUI based tutorials on the HNN website, in the following steps: - Develop code in HNN-core to simulate and visualize LFP/CSD from cellular @@ -150,7 +150,7 @@ API and Tutorial development The ability to interpret the neural origin of macroscale MEG/EEG signals in a complex high-dimensional non-linear computational neural model is challenging. A primary goal of HNN is to facilitate this interpretation with a clear API and examples -of use in HNN-core, and interative GUI-based tutorals for all HNN-GUI functionality on our HNN website. +of use in HNN-core, and interactive GUI-based tutorals for all HNN-GUI functionality on our HNN website. Following the process for creating new featuers in HNN, the process for documenting new features will be to first develop them with API and examples of use in HNN-core, followed by integration into the HNN-GUI, with corresponding GUI-based tutorials on the HNN-website. diff --git a/doc/whats_new.rst b/doc/whats_new.rst index 2567c3005..7144555e1 100644 --- a/doc/whats_new.rst +++ b/doc/whats_new.rst @@ -240,7 +240,7 @@ Changelog - Add method for setting in-plane cell distances and layer separation in the network :func:`~hnn_core.Network.set_cell_positions`, by `Christopher Bailey`_ in `#370 `_ -- External drives API now accepts probability argument for targetting subsets of cells, +- External drives API now accepts probability argument for targeting subsets of cells, by `Nick Tolley`_ in :gh:`416` Bug @@ -275,7 +275,7 @@ API :func:`~hnn_core.Network.add_connection`, by `Nick Tolley`_ in :gh:`276` - Remove :class:`~hnn_core.L2Pyr`, :class:`~hnn_core.L5Pyr`, :class:`~hnn_core.L2Basket`, - and :class:`~hnn_core.L5Basket` classes in favor of instantation through functions and + and :class:`~hnn_core.L5Basket` classes in favor of instantiation through functions and a more consistent :class:`~hnn_core.Cell` class by `Mainak Jas`_ in :gh:`322` - Remove parameter ``distribution`` in :func:`~hnn_core.Network.add_bursty_drive`. diff --git a/examples/howto/optimize_evoked.py b/examples/howto/optimize_evoked.py index 94097b861..c9b4a7217 100644 --- a/examples/howto/optimize_evoked.py +++ b/examples/howto/optimize_evoked.py @@ -107,7 +107,7 @@ # # First, we define a function that will tell the optimization routine how to # modify the network drive parameters. The function will take in the Network -# object with no attached drives, and a dictionary of the paramters we wish to +# object with no attached drives, and a dictionary of the parameters we wish to # optimize. diff --git a/examples/howto/plot_record_extracellular_potentials.py b/examples/howto/plot_record_extracellular_potentials.py index 7c4a5212c..57916d227 100644 --- a/examples/howto/plot_record_extracellular_potentials.py +++ b/examples/howto/plot_record_extracellular_potentials.py @@ -39,7 +39,7 @@ net.set_cell_positions(inplane_distance=30.) ############################################################################### -# Extracellular recordings require specifying the electrode postions. It can be +# Extracellular recordings require specifying the electrode positions. It can be # useful to visualize the cells of the network to decide on the placement of # each electrode. net.plot_cells() diff --git a/examples/workflows/plot_simulate_beta.py b/examples/workflows/plot_simulate_beta.py index 2629a6bcd..8452c50e7 100644 --- a/examples/workflows/plot_simulate_beta.py +++ b/examples/workflows/plot_simulate_beta.py @@ -6,7 +6,7 @@ This example demonstrates how event related potentials (ERP) are modulated by prestimulus beta events. Specifically, this example reproduces Figure 5 from Law et al. 2021 [1]_. To be consistent with the publication, the default -network connectivity is altered. These modfications demonstrate a potential +network connectivity is altered. These modifications demonstrate a potential mechanism by which transient beta activity in the neocortex can suppress the perceptibility of sensory input. This suppression depends on the timing of the beta event, and the incoming sensory information. @@ -171,7 +171,7 @@ def add_beta_drives(net, beta_start): # occurs exclusively at 50 ms, the peak of the gaussian distributed proximal # and distal inputs. This spiking activity leads to sustained GABAb mediated # inhibition of the L2 and L5 pyrmaidal cells. One effect of this inhibition -# is an assymetric beta event with a long positive tail. +# is an asymmetric beta event with a long positive tail. import matplotlib.pyplot as plt import numpy as np fig, axes = plt.subplots(4, 1, sharex=True, figsize=(7, 7), diff --git a/hnn_core/cell.py b/hnn_core/cell.py index 26a956156..d5798ce10 100644 --- a/hnn_core/cell.py +++ b/hnn_core/cell.py @@ -268,7 +268,7 @@ class Cell: Stores the tree representation of a cell. Root is the 0 end of 'soma'. Nodes are a tuple (sec_name, node_pos) where sec_name is the name of the section and node_pos is the 0 end - or 1 end. The data structure is the adjacency list represetation of a + or 1 end. The data structure is the adjacency list representation of a tree. The keys of the dict are the parent nodes. The value is the list of nodes (children nodes) connected to the parent node. @@ -306,7 +306,7 @@ class Cell: Stores the tree representation of a cell. Root is the 0 end of 'soma'. Nodes are a tuple (sec_name, node_pos) where sec_name is the name of the section and node_pos is the 0 end - or 1 end. The data structure is the adjacency list represetation of a + or 1 end. The data structure is the adjacency list representation of a tree. The keys of the dict are the parent nodes. The value is the list of nodes (children nodes) connected to the parent node. diff --git a/hnn_core/cell_response.py b/hnn_core/cell_response.py index e36fe3b36..fbf155e0c 100644 --- a/hnn_core/cell_response.py +++ b/hnn_core/cell_response.py @@ -27,7 +27,7 @@ class CellResponse(object): spike_types : list (n_trials,) of list (n_spikes,) of float, shape | None Each element of the outer list is a trial. The inner list contains the type of spike (e.g., evprox1 - or L2_pyramidal) that occured at the corresonding time stamp. + or L2_pyramidal) that occurred at the corresponding time stamp. Each gid corresponds to a type via Network().gid_ranges. times : numpy array | None Array of time points for samples in continuous data. @@ -48,7 +48,7 @@ class CellResponse(object): spike_types : list (n_trials,) of list (n_spikes,) of float, shape Each element of the outer list is a trial. The inner list contains the type of spike (e.g., evprox1 - or L2_pyramidal) that occured at the corresonding time stamp. + or L2_pyramidal) that occurred at the corresponding time stamp. Each gid corresponds to a type via Network::gid_ranges. vsec : list (n_trials,) of dict, shape Each element of the outer list is a trial. diff --git a/hnn_core/cells_default.py b/hnn_core/cells_default.py index 5447a40bd..86f931162 100644 --- a/hnn_core/cells_default.py +++ b/hnn_core/cells_default.py @@ -312,7 +312,7 @@ def _exp_g_at_dist(x, zero_val, exp_term, offset): zero_val : float | int Value of function when x = 0 exp_term : float | int - Mutiplier of x in the exponent + Multiplier of x in the exponent offset: float |int Offset value added to output diff --git a/hnn_core/externals/bayesopt.py b/hnn_core/externals/bayesopt.py index a55777e5e..318bd38a3 100644 --- a/hnn_core/externals/bayesopt.py +++ b/hnn_core/externals/bayesopt.py @@ -58,7 +58,7 @@ def bayes_opt(func, x0, cons, acquisition, maxfun=200, cons : list of tuples Parameter constraints in solver-specific format. acquisition : func - Acquisiton function we want to use to find query points. + Acquisition function we want to use to find query points. maxfun : int, optional Maximum number of function evaluations. The default is 200. debug : bool, optional diff --git a/hnn_core/extracellular.py b/hnn_core/extracellular.py index 607a07310..fb952e535 100644 --- a/hnn_core/extracellular.py +++ b/hnn_core/extracellular.py @@ -90,7 +90,7 @@ def _get_laminar_z_coords(electrode_positions): raise ValueError( 'Electrode contacts are incompatible with laminar profiling ' 'in a neocortical column. Make sure the ' - 'electrode postions are equispaced, colinear, and projecting ' + 'electrode positions are equispaced, colinear, and projecting ' 'along the z-axis.') else: return np.array(electrode_positions)[:, 2], z_delta diff --git a/hnn_core/gui/_viz_manager.py b/hnn_core/gui/_viz_manager.py index f3aaaa6fc..883a281d7 100644 --- a/hnn_core/gui/_viz_manager.py +++ b/hnn_core/gui/_viz_manager.py @@ -746,7 +746,7 @@ def _simulate_edit_figure(self, fig_name, ax_name, simulation_name, fig_name : str The figure name shown in the GUI, e.g., 'Figure 1'. ax_name : str - Axis name shwon in the left side of GUI, like, 'ax0'. + Axis name shown in the left side of GUI, like, 'ax0'. simulation_name : str The name of simulation you want to visualize plot_type : str diff --git a/hnn_core/gui/gui.py b/hnn_core/gui/gui.py index 99f503a15..54ab6255e 100644 --- a/hnn_core/gui/gui.py +++ b/hnn_core/gui/gui.py @@ -431,7 +431,7 @@ def compose(self, return_layout=True): self._connectivity_out, ]) - # accordians to group local-connectivity by cell type + # accordions to group local-connectivity by cell type connectivity_boxes = [ VBox(slider) for slider in self.connectivity_widgets] connectivity_names = ( diff --git a/hnn_core/mpi_child.py b/hnn_core/mpi_child.py index a4c64c2b9..a26ac5eae 100644 --- a/hnn_core/mpi_child.py +++ b/hnn_core/mpi_child.py @@ -133,7 +133,7 @@ def run(self, net, tstop, dt, n_trials): # only rank 0 has data that should be sent back to MPIBackend sim_data.append(single_sim_data) - # flush output buffers from all ranks (any errors or status mesages) + # flush output buffers from all ranks (any errors or status messages) sys.stdout.flush() sys.stderr.flush() diff --git a/hnn_core/network.py b/hnn_core/network.py index 8aa754de1..6885d4c01 100644 --- a/hnn_core/network.py +++ b/hnn_core/network.py @@ -114,7 +114,7 @@ def _connection_probability(conn, probability, conn_seed=None): Defaults to 1.0 producing an all-to-all pattern. conn_seed : int Optional initial seed for random number generator (default: None). - Used to randomly remove connections when probablity < 1.0. + Used to randomly remove connections when probability < 1.0. Notes ----- @@ -171,12 +171,12 @@ def pick_connection(net, src_gids=None, target_gids=None, src_gids : str | int | range | list of int | None Identifier for source cells. Passing str arguments ('L2_pyramidal', 'L2_basket', 'L5_pyramidal', 'L5_basket') is - equivalent to passing a list of gids for the relvant cell type. + equivalent to passing a list of gids for the relevant cell type. source - target connections are made in an all-to-all pattern. target_gids : str | int | range | list of int | None - Identifer for targets of source cells. Passing str arguments + Identifier for targets of source cells. Passing str arguments ('L2_pyramidal', 'L2_basket', 'L5_pyramidal', 'L5_basket') is - equivalent to passing a list of gids for the relvant cell type. + equivalent to passing a list of gids for the relevant cell type. source - target connections are made in an all-to-all pattern. loc : str | list of str | None Location of synapse on target cell. Must be @@ -253,7 +253,7 @@ def pick_connection(net, src_gids=None, target_gids=None, else: receptor_dict[conn['receptor']] = [conn_idx] - # Look up conn indeces that match search terms and add to set. + # Look up conn indices that match search terms and add to set. conn_set = set() search_pairs = [(src_gids, src_dict), (target_gids, target_dict), (loc, loc_dict), (receptor, receptor_dict)] @@ -331,7 +331,7 @@ class Network(object): Notes ----- - ``net = jones_2009_model(params)`` is the reccomended path for creating a + ``net = jones_2009_model(params)`` is the recommended path for creating a network. Instantiating the network as ``net = Network(params)`` will produce a network with no cell-to-cell connections. As such, connectivity information contained in ``params`` will be ignored. @@ -348,7 +348,7 @@ def __init__(self, params, add_drives_from_params=False, # interrogate a built and simulated net. In addition, CellResponse is # attached to a Network during simulation---Network is the natural # place to keep this information. Order matters: cell gids first, then - # artifical drive cells + # artificial drive cells self.gid_ranges = OrderedDict() self._n_gids = 0 # utility: keep track of last GID @@ -517,7 +517,7 @@ def add_evoked_drive(self, name, *, mu, sigma, numspikes, location, this evoked drive across the network in a given trial with one spike, set n_drive_cells=1 and cell_specific=False. cell_specific : bool - Whether each artifical drive cell has 1-to-1 (True, default) or + Whether each artificial drive cell has 1-to-1 (True, default) or all-to-all (False) connection parameters. Note that 1-to-1 connectivity requires that n_drive_cells='n_cells', where 'n_cells' denotes the number of all available cells that this drive can @@ -551,7 +551,7 @@ def add_evoked_drive(self, name, *, mu, sigma, numspikes, location, Not fixed across trials (see Notes) conn_seed : int Optional initial seed for random number generator (default: 3). - Used to randomly remove connections when probablity < 1.0. + Used to randomly remove connections when probability < 1.0. Fixed across trials (see Notes) Notes @@ -624,7 +624,7 @@ def add_poisson_drive(self, name, *, tstart=0, tstop=None, rate_constant, to synchronize the timing of Poisson drive across the network in a given trial, set n_drive_cells=1 and cell_specific=False. cell_specific : bool - Whether each artifical drive cell has 1-to-1 (True, default) or + Whether each artificial drive cell has 1-to-1 (True, default) or all-to-all (False) connection parameters. Note that 1-to-1 connectivity requires that n_drive_cells='n_cells', where 'n_cells' denotes the number of all available cells that this drive can @@ -657,7 +657,7 @@ def add_poisson_drive(self, name, *, tstart=0, tstop=None, rate_constant, Used to generate event times for drive cells. conn_seed : int Optional initial seed for random number generator (default: 3). - Used to randomly remove connections when probablity < 1.0. + Used to randomly remove connections when probability < 1.0. """ _check_drive_parameter_values('Poisson', tstart=tstart, @@ -740,7 +740,7 @@ def add_bursty_drive(self, name, *, tstart=0, tstart_std=0, tstop=None, all-to-all connectivity and provide synchronous input to cells in the network. cell_specific : bool - Whether each artifical drive cell has 1-to-1 (True) or all-to-all + Whether each artificial drive cell has 1-to-1 (True) or all-to-all (False, default) connection parameters. Note that 1-to-1 connectivity requires that n_drive_cells='n_cells', where 'n_cells' denotes the number of all available cells that this drive can @@ -773,7 +773,7 @@ def add_bursty_drive(self, name, *, tstart=0, tstart_std=0, tstop=None, Used to generate event times for drive cells. conn_seed : int Optional initial seed for random number generator (default: 3). - Used to randomly remove connections when probablity < 1.0. + Used to randomly remove connections when probability < 1.0. """ if not self._legacy_mode: _check_drive_parameter_values('bursty', tstart=tstart, tstop=tstop, @@ -845,7 +845,7 @@ def _attach_drive(self, name, drive, weights_ampa, weights_nmda, location, drive across the network in a given trial with one spike, set n_drive_cells=1 and cell_specific=False. cell_specific : bool - Whether each artifical drive cell has 1-to-1 (True) or all-to-all + Whether each artificial drive cell has 1-to-1 (True) or all-to-all (False) connection parameters. Note that 1-to-1 connectivity requires that n_drive_cells='n_cells', where 'n_cells' denotes the number of all available cells that this drive can @@ -1005,7 +1005,7 @@ def _instantiate_drives(self, tstop, n_trials=1): NB this must be a separate method because dipole.py:simulate_dipole accepts an n_trials-argument, which overrides the N_trials-parameter - used at intialisation time. The good news is that only the event_times + used at initialisation time. The good news is that only the event_times need to be recalculated, all the GIDs etc remain the same. """ self._reset_drives() @@ -1114,7 +1114,7 @@ def add_connection(self, src_gids, target_gids, loc, receptor, equivalent to passing a list of gids for the relevant cell type. source - target connections are made in an all-to-all pattern. target_gids : str | int | range | list of int - Identifer for targets of source cells. Passing str arguments + Identifier for targets of source cells. Passing str arguments ('L2_pyramidal', 'L2_basket', 'L5_pyramidal', 'L5_basket') is equivalent to passing a list of gids for the relevant cell type. source - target connections are made in an all-to-all pattern. @@ -1141,7 +1141,7 @@ def add_connection(self, src_gids, target_gids, loc, receptor, Defaults to 1.0 producing an all-to-all pattern. conn_seed : int Optional initial seed for random number generator (default: None). - Used to randomly remove connections when probablity < 1.0. + Used to randomly remove connections when probability < 1.0. Notes ----- @@ -1434,7 +1434,7 @@ class _NetworkDrive(dict): Each artificial drive cell has seed = event_seed + gid conn_seed : int Optional initial seed for random number generator. - Used to randomly remove connections when probablity < 1.0. + Used to randomly remove connections when probability < 1.0. target_types : set or list of str Names of cell types targeted by this drive (must be subset of net.cell_types.keys()). diff --git a/hnn_core/network_builder.py b/hnn_core/network_builder.py index 0d7c33f46..354e4b6e9 100644 --- a/hnn_core/network_builder.py +++ b/hnn_core/network_builder.py @@ -282,7 +282,7 @@ def __init__(self, net, trial_idx=0): self._cells = list() # artificial cells must be appended to a list in order to preserve - # the NEURON hoc objects and the corresonding python references + # the NEURON hoc objects and the corresponding python references # initialized by _ArtificialCell() self._drive_cells = list() diff --git a/hnn_core/network_models.py b/hnn_core/network_models.py index ffffb16c1..a86b29902 100644 --- a/hnn_core/network_models.py +++ b/hnn_core/network_models.py @@ -268,7 +268,7 @@ def calcium_model(params=None, add_drives_from_params=False, Returns ------- net : Instance of Network object - Network object used to store the Jones 2009 model with an impoved + Network object used to store the Jones 2009 model with an improved calcium channel distribution. See Also diff --git a/hnn_core/optimization/optimize_evoked.py b/hnn_core/optimization/optimize_evoked.py index 672cff1c0..381a4c1f0 100644 --- a/hnn_core/optimization/optimize_evoked.py +++ b/hnn_core/optimization/optimize_evoked.py @@ -110,7 +110,7 @@ def _split_by_evinput(drive_names, drive_dynamics, drive_syn_weights, tstop, def _generate_weights(evinput_params, tstop, dt, decay_multiplier): - """Calculation of weight function for wRMSE calcuation + """Calculation of weight function for wRMSE calculation Returns ------- @@ -392,7 +392,7 @@ def _get_drive_params(net, drive_names): target_receptor = net.connectivity[conn_idx]['receptor'] weight = net.connectivity[conn_idx]['nc_dict']['A_weight'] # note that for each drive, the weights dict should be unnested - # accross target cell types and receptors for ease-of-use when + # across target cell types and receptors for ease-of-use when # these values get restructured into a list downstream # legacy_mode hack: don't include invalid connections that have @@ -475,7 +475,7 @@ def optimize_evoked(net, tstop, n_trials, target_dpl, initial_dpl, maxiter=50, dt : float The integration time step (ms) of h.CVode during simulation. which_drives: 'all' or list - Evoked drives to optimize. If 'all', will opimize all evoked drives. + Evoked drives to optimize. If 'all', will optimize all evoked drives. If a subset list of evoked drives, will optimize only the evoked drives in the list. return_rmse : bool Returns list of unweighted RMSEs between the simulated and experimental dipole @@ -595,7 +595,7 @@ def optimize_evoked(net, tstop, n_trials, target_dpl, initial_dpl, maxiter=50, weights=opt_params['weights']) net_opt = net.copy() - # drive_params_updated must be a list for compatability with the args + # drive_params_updated must be a list for compatibility with the args # in the optimization engine, scipy.optimize.fmin_cobyla _myoptrun = partial(_optrun, drive_params_static=drive_static_params, diff --git a/hnn_core/parallel_backends.py b/hnn_core/parallel_backends.py index b022e5965..3f7d73d5d 100644 --- a/hnn_core/parallel_backends.py +++ b/hnn_core/parallel_backends.py @@ -112,7 +112,7 @@ def run_subprocess(command, obj, timeout, proc_queue=None, *args, **kwargs): """ proc_data_bytes = b'' # each loop while waiting will involve two Queue.get() timeouts, each - # 0.01s. This caclulation will error on the side of a longer timeout + # 0.01s. This calculation will error on the side of a longer timeout # than is specified because more is done each loop that just Queue.get() timeout_cycles = timeout / 0.02 @@ -695,7 +695,7 @@ def simulate(self, net, tstop, dt, n_trials, postproc=False): # just use the joblib backend for a single core if self.n_procs == 1: - print("MPIBackend is set to use 1 core: tranferring the " + print("MPIBackend is set to use 1 core: transferring the " "simulation to JoblibBackend....") return JoblibBackend(n_jobs=1).simulate(net, tstop=tstop, dt=dt, diff --git a/hnn_core/params.py b/hnn_core/params.py index 0718177f6..614219207 100644 --- a/hnn_core/params.py +++ b/hnn_core/params.py @@ -78,7 +78,7 @@ def read_params(params_fname): Returns ------- params : an instance of Params - Params containing paramter values from file + Params containing parameter values from file """ split_fname = op.splitext(params_fname) diff --git a/hnn_core/tests/conftest.py b/hnn_core/tests/conftest.py index 643f8697e..f2e200965 100644 --- a/hnn_core/tests/conftest.py +++ b/hnn_core/tests/conftest.py @@ -22,13 +22,13 @@ def pytest_runtest_makereport(item, call): if "incremental" in item.keywords: # incremental marker is used - # The following condition was modifed from the example linked above. + # The following condition was modified from the example linked above. # We don't want to step out of the incremental testing block if # a previous test was marked "Skipped". For instance if MPI tests # are skipped because mpi4py is not installed, still continue with # all other tests that do not require mpi4py if call.excinfo is not None and not call.excinfo.typename == "Skipped": - # the test has failed, but was not skiped + # the test has failed, but was not skipped # retrieve the class name of the test cls_name = str(item.cls) diff --git a/hnn_core/tests/test_cell.py b/hnn_core/tests/test_cell.py index 769497c9b..e9a234cf1 100644 --- a/hnn_core/tests/test_cell.py +++ b/hnn_core/tests/test_cell.py @@ -96,7 +96,7 @@ def test_cell(): new_Ra = 4.0 cell.modify_section(sec_name, L=new_L, diam=new_diam, cm=new_cm, Ra=new_Ra) - # Make sure distance betweeen `Section.end_pts` matches `Section.L` + # Make sure distance between `Section.end_pts` matches `Section.L` new_pts = np.array(cell.sections[sec_name].end_pts) new_dist = np.linalg.norm(new_pts[0, :] - new_pts[1, :]) np.isclose(new_L, new_dist) diff --git a/hnn_core/tests/test_cells_default.py b/hnn_core/tests/test_cells_default.py index 79e01a31c..39cd335d1 100644 --- a/hnn_core/tests/test_cells_default.py +++ b/hnn_core/tests/test_cells_default.py @@ -20,7 +20,7 @@ def test_cells_default(): assert 'apical_2' in l5p.sections # check that after building, the vertical sections have the length - # specified in get_L5Pyr_params_default (or overriden in a params file). + # specified in get_L5Pyr_params_default (or overridden in a params file). # Note that the lengths implied by _secs_L5Pyr are completely ignored: # NEURON extends the sections as needed to match the sec.L 's vertical_secs = ['basal_1', 'soma', 'apical_trunk', 'apical_1', 'apical_2', diff --git a/hnn_core/tests/test_drives.py b/hnn_core/tests/test_drives.py index 34bfbb77d..5abb480e2 100644 --- a/hnn_core/tests/test_drives.py +++ b/hnn_core/tests/test_drives.py @@ -148,7 +148,7 @@ def test_add_drives(): assert drive_conn['nc_dict']['A_weight'] == weights_ampa[target_type] assert drive_conn['nc_dict']['A_delay'] == syn_delays[target_type] - # Test drive targetting specific section + # Test drive targeting specific section # Section present on all cells indicated location = 'apical_tuft' weights_ampa_tuft = {'L2_pyramidal': 1.0, 'L5_pyramidal': 2.0} @@ -170,7 +170,7 @@ def test_add_drives(): weights_ampa=weights_ampa_no_tuft, synaptic_delays=syn_delays_no_tuft, n_drive_cells=n_drive_cells) - # Test probabalistic drive connections. + # Test probabilistic drive connections. # drive with cell_specific=False n_drive_cells = 10 probability = 0.5 # test that only half of possible connections are made diff --git a/hnn_core/tests/test_extracellular.py b/hnn_core/tests/test_extracellular.py index 89297f041..708001776 100644 --- a/hnn_core/tests/test_extracellular.py +++ b/hnn_core/tests/test_extracellular.py @@ -91,7 +91,7 @@ def test_extracellular_api(): with pytest.raises(ValueError, match='Electrode array positions must ' 'contain more than 1 contact'): _, _ = _get_laminar_z_coords([(1, 2, 3)]) - with pytest.raises(ValueError, match='Make sure the electrode postions ' + with pytest.raises(ValueError, match='Make sure the electrode positions ' 'are equispaced, colinear'): _, _ = _get_laminar_z_coords([(1, 1, 3), (1, 1, 4), (1, 1, 3.5)]) diff --git a/hnn_core/tests/test_network.py b/hnn_core/tests/test_network.py index a1f9f5418..a14da5c37 100644 --- a/hnn_core/tests/test_network.py +++ b/hnn_core/tests/test_network.py @@ -364,7 +364,7 @@ def test_network_drives(): assert network_builder._drive_cells[n_bursty_sources].gid ==\ net._n_cells + n_bursty_sources - # check that Network drive connectivity tranfers to NetworkBuilder + # check that Network drive connectivity transfers to NetworkBuilder n_pyr = len(net.gid_ranges['L2_pyramidal']) n_basket = len(net.gid_ranges['L2_basket']) @@ -572,7 +572,7 @@ def test_network_connectivity(): n_trials=1) network_builder = NetworkBuilder(net) - # start by checking that Network connectivity tranfers to NetworkBuilder + # start by checking that Network connectivity transfers to NetworkBuilder n_pyr = len(net.gid_ranges['L2_pyramidal']) n_basket = len(net.gid_ranges['L2_basket']) @@ -763,7 +763,7 @@ def test_network_connectivity(): assert 0 not in net.connectivity[conn_idx]['src_gids'] # Check that pick_connection returns empty lists when searching for - # a drive targetting the wrong location + # a drive targeting the wrong location conn_idxs = pick_connection(net, src_gids='evdist1', loc='proximal') assert len(conn_idxs) == 0 assert not pick_connection(net, src_gids='evprox1', loc='distal') @@ -810,7 +810,7 @@ def test_network_connectivity(): pick_connection(**kwargs) # Test removing connections from net.connectivity - # Needs to be updated if number of drives change in preceeding tests + # Needs to be updated if number of drives change in preceding tests net.clear_connectivity() assert len(net.connectivity) == 4 # 2 drives x 2 target cell types net.clear_drives() diff --git a/hnn_core/tests/test_optimize_evoked.py b/hnn_core/tests/test_optimize_evoked.py index c19363209..7d0923e5f 100644 --- a/hnn_core/tests/test_optimize_evoked.py +++ b/hnn_core/tests/test_optimize_evoked.py @@ -171,7 +171,7 @@ def test_optimize_evoked(): # static drive params should remain constant assert drive_static_params_opt == drive_static_params_orig - # ensure that only the drive that we wanted to optimzie over changed + # ensure that only the drive that we wanted to optimize over changed drive_evdist1_dynamics_offset, drive_evdist1_syn_weights_offset, \ drive_static_params_offset = _get_drive_params(net_offset, ['evdist1']) drive_evdist1_dynamics_opt, drive_evdist1_syn_weights_opt, \ diff --git a/hnn_core/tests/test_parallel_backends.py b/hnn_core/tests/test_parallel_backends.py index f1e179fcb..754dc6a0c 100644 --- a/hnn_core/tests/test_parallel_backends.py +++ b/hnn_core/tests/test_parallel_backends.py @@ -246,7 +246,7 @@ def test_compare_hnn_core(self, run_hnn_core_fixture, backend, n_jobs=1): @requires_psutil def test_mpi_failure(run_hnn_core_fixture): """Test that an MPI failure is handled and messages are printed""" - # this MPI paramter will cause a MPI job to fail + # this MPI parameter will cause a MPI job to fail environ["OMPI_MCA_btl"] = "self" with pytest.warns(UserWarning) as record: diff --git a/hnn_core/viz.py b/hnn_core/viz.py index 7b896f9b8..f63a7514e 100644 --- a/hnn_core/viz.py +++ b/hnn_core/viz.py @@ -1013,7 +1013,7 @@ def _update_target_plot(ax, conn, src_gid, src_type_pos, target_type_pos, inplane_distance): from .cell import _get_gaussian_connection - # Extract indeces to get position in network + # Extract indices to get position in network # Index in gid range aligns with net.pos_dict target_src_pair = conn['gid_pairs'][src_gid] target_indeces = np.where(np.in1d(target_range, target_src_pair))[0] diff --git a/pyproject.toml b/pyproject.toml index 7fb0fbb50..25cd619b7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,9 @@ [build-system] requires = ["setuptools>=40.8.0", "NEURON >=7.7; platform_system != 'Windows'"] -build-backend = "setuptools.build_meta:__legacy__" \ No newline at end of file +build-backend = "setuptools.build_meta:__legacy__" +[tool.codespell] +skip = '.git,*.pdf,*.svg' +check-hidden = true +# in jupyter notebooks - images and also some embedded outputs +ignore-regex = '^\s*"image/\S+": ".*|.*%22%3A%20.*' +ignore-words-list = 'tha,nam,sherif,dout' diff --git a/setup.py b/setup.py index c116695fb..ada4bfd3b 100644 --- a/setup.py +++ b/setup.py @@ -37,7 +37,7 @@ # # to make sure there are no residual mod files # -# also see following link to understand why build_py must be overriden: +# also see following link to understand why build_py must be overridden: # https://stackoverflow.com/questions/51243633/python-setuptools-setup-py-install-does-not-automatically-call-build class BuildMod(Command): user_options = []