Skip to content

Commit

Permalink
Merge pull request #142 from geometric-intelligence/ninamiolane-lint2
Browse files Browse the repository at this point in the history
Pass ruff linter on whole codebase
  • Loading branch information
ninamiolane authored Apr 8, 2024
2 parents f367123 + ab55f2f commit 1a29509
Show file tree
Hide file tree
Showing 24 changed files with 239 additions and 11,515 deletions.
7 changes: 1 addition & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,4 @@ repos:
types_or: [ python, pyi, jupyter ]
args: [ --fix ]
- id: ruff-format
types_or: [ python, pyi, jupyter ]

- repo: https://github.com/numpy/numpydoc
rev: v1.6.0
hooks:
- id: numpydoc-validation
types_or: [ python, pyi, jupyter ]
7 changes: 4 additions & 3 deletions neurometry/curvature/datasets/gridcells.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def generate_all_grids(
grids : numpy.ndarray, shape=(num_cells, num_fields_per_cell = (ceil(dims[0]/lx)+1)*(ceil(dims[1]/ly)+1),2)
All the grid cell lattices.
"""
lx = ly = 10 # TODO: FIX, these values are only placeholders.
lx = ly = 10 # TODO: FIX, these values are only placeholders.
# ref_lattice = create_reference_lattice(lx, ly, arena_dims, lattice_type = lattice_type)
ref_lattice = structures.get_lattice(
scale=grid_scale, lattice_type=lattice_type, dimensions=arena_dims
Expand All @@ -107,15 +107,16 @@ def generate_all_grids(
grids_warped = np.zeros((n_cells, *np.shape(ref_lattice)))

arena_dims = np.array(arena_dims)
rng = np.random.default_rng(seed=0)

for i in range(n_cells):
angle_i = np.random.normal(grid_orientation_mean, grid_orientation_std) * (
angle_i = rng.normal(grid_orientation_mean, grid_orientation_std) * (
np.pi / 180
)
rot_i = np.array(
[[np.cos(angle_i), -np.sin(angle_i)], [np.sin(angle_i), np.cos(angle_i)]]
)
phase_i = np.multiply([lx, ly], np.random.rand(2))
phase_i = np.multiply([lx, ly], rng.random(2))
lattice_i = np.matmul(rot_i, ref_lattice.T).T + phase_i
# lattice_i = np.where(abs(lattice_i) < arena_dims / 2, lattice_i, None)

Expand Down
54 changes: 28 additions & 26 deletions neurometry/curvature/datasets/synthetic.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,13 +70,14 @@ def load_images(n_scalars=10, n_angles=1000, img_size=256):
images = []
angles = []
scalars = []
rng = np.random.default_rng(seed=0)
for i_angle in range(n_angles):
angle = 360 * i_angle / n_angles
rot_image = skimage.transform.rotate(image, angle)
for i_scalar in range(n_scalars):
scalar = 1 + 0.2 * i_scalar
blur_image = skimage.filters.gaussian(rot_image, sigma=scalar)
noise = np.random.normal(loc=0.0, scale=0.05, size=blur_image.shape)
noise = rng.normal(loc=0.0, scale=0.05, size=blur_image.shape)
images.append((blur_image + noise).astype(np.float32))
angles.append(angle)
scalars.append(scalar)
Expand Down Expand Up @@ -240,40 +241,41 @@ def load_place_cells(n_times=10000, n_cells=40):
n_firing_per_cell = int(n_times / n_cells)
place_cells = []
labels = []
rng = np.random.default_rng(seed=0)
for _ in range(n_firing_per_cell):
for i_cell in range(n_cells):
cell_firings = np.zeros(n_cells)

if i_cell == 0:
cell_firings[-2] = np.random.poisson(1.0)
cell_firings[-1] = np.random.poisson(2.0)
cell_firings[0] = np.random.poisson(4.0)
cell_firings[1] = np.random.poisson(2.0)
cell_firings[2] = np.random.poisson(1.0)
cell_firings[-2] = rng.poisson(1.0)
cell_firings[-1] = rng.poisson(2.0)
cell_firings[0] = rng.poisson(4.0)
cell_firings[1] = rng.poisson(2.0)
cell_firings[2] = rng.poisson(1.0)
elif i_cell == 1:
cell_firings[-1] = np.random.poisson(1.0)
cell_firings[0] = np.random.poisson(2.0)
cell_firings[1] = np.random.poisson(4.0)
cell_firings[2] = np.random.poisson(2.0)
cell_firings[3] = np.random.poisson(1.0)
cell_firings[-1] = rng.poisson(1.0)
cell_firings[0] = rng.poisson(2.0)
cell_firings[1] = rng.poisson(4.0)
cell_firings[2] = rng.poisson(2.0)
cell_firings[3] = rng.poisson(1.0)
elif i_cell == n_cells - 2:
cell_firings[-4] = np.random.poisson(1.0)
cell_firings[-3] = np.random.poisson(2.0)
cell_firings[-2] = np.random.poisson(4.0)
cell_firings[-1] = np.random.poisson(2.0)
cell_firings[0] = np.random.poisson(1.0)
cell_firings[-4] = rng.poisson(1.0)
cell_firings[-3] = rng.poisson(2.0)
cell_firings[-2] = rng.poisson(4.0)
cell_firings[-1] = rng.poisson(2.0)
cell_firings[0] = rng.poisson(1.0)
elif i_cell == n_cells - 1:
cell_firings[-3] = np.random.poisson(1.0)
cell_firings[-2] = np.random.poisson(2.0)
cell_firings[-1] = np.random.poisson(4.0)
cell_firings[0] = np.random.poisson(2.0)
cell_firings[1] = np.random.poisson(1.0)
cell_firings[-3] = rng.poisson(1.0)
cell_firings[-2] = rng.poisson(2.0)
cell_firings[-1] = rng.poisson(4.0)
cell_firings[0] = rng.poisson(2.0)
cell_firings[1] = rng.poisson(1.0)
else:
cell_firings[i_cell - 2] = np.random.poisson(1.0)
cell_firings[i_cell - 1] = np.random.poisson(2.0)
cell_firings[i_cell] = np.random.poisson(4.0)
cell_firings[i_cell + 1] = np.random.poisson(2.0)
cell_firings[i_cell - 3] = np.random.poisson(1.0)
cell_firings[i_cell - 2] = rng.poisson(1.0)
cell_firings[i_cell - 1] = rng.poisson(2.0)
cell_firings[i_cell] = rng.poisson(4.0)
cell_firings[i_cell + 1] = rng.poisson(2.0)
cell_firings[i_cell - 3] = rng.poisson(1.0)
place_cells.append(cell_firings)
labels.append(i_cell / n_cells * 360)

Expand Down
3 changes: 2 additions & 1 deletion neurometry/curvature/datasets/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,9 +141,10 @@ def load(config):
indices = np.arange(len(dataset))

train_indices = np.arange(train_num)
rng = np.random.default_rng(seed=0)
if config.batch_shuffle:
# Note: this breaks the temporal ordering.
train_indices = np.random.choice(indices, train_num, replace=False)
train_indices = rng.choice(indices, train_num, replace=False)

test_indices = np.delete(indices, train_indices)
train_dataset = dataset[train_indices]
Expand Down
7 changes: 4 additions & 3 deletions neurometry/curvature/plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,14 @@

# Generate some sample data

theta = np.random.uniform(0, 2 * np.pi, 1000)
rng = np.random.default_rng(seed=0)
theta = rng.uniform(0, 2 * np.pi, 1000)

r = np.random.normal(1, 0.1, 1000)
r = rng.normal(1, 0.1, 1000)

x = r * np.cos(theta)
y = r * np.sin(theta)
z = np.random.normal(0, 0.1, 1000)
z = rng.normal(0, 0.1, 1000)


# Create a 3D scatter plot
Expand Down
22 changes: 13 additions & 9 deletions neurometry/datasets/load_rnn_grid_cells.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
import os
import random
import sys
from pathlib import Path

import matplotlib.pyplot as plt
import numpy as np

sys.path.append(str(Path(__file__).parent.parent))

from .rnn_grid_cells import config, dual_agent_activity, single_agent_activity, utils
# sys.path.append(str(Path(__file__).parent.parent))
from .rnn_grid_cells import (
config,
dual_agent_activity,
single_agent_activity,
utils,
)

# Loading single agent model

Expand Down Expand Up @@ -62,9 +64,10 @@ def load_activations(epochs, version="single", verbose=True):
options, _ = parser.parse_known_args()
options.run_ID = utils.generate_run_ID(options)
if type == "single":
activations_single_agent, rate_map_single_agent = (
single_agent_activity.main(options, epoch=epoch)
)
(
activations_single_agent,
rate_map_single_agent,
) = single_agent_activity.main(options, epoch=epoch)
activations.append(activations_single_agent)
rate_maps.append(rate_map_single_agent)
elif type == "dual":
Expand Down Expand Up @@ -92,8 +95,9 @@ def load_activations(epochs, version="single", verbose=True):


def plot_rate_map(indices, num_plots, activations):
rng = np.random.default_rng(seed=0)
if indices is None:
idxs = np.random.randint(0, 4095, num_plots)
idxs = rng.integers(0, 4095, num_plots)
else:
idxs = indices
num_plots = len(indices)
Expand Down
6 changes: 3 additions & 3 deletions neurometry/datasets/rnn_grid_cells/place_cells.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@ def __init__(self, options, us=None):
self.softmax = torch.nn.Softmax(dim=-1)

# Randomly tile place cell centers across environment
np.random.seed(0)
usx = np.random.uniform(-self.box_width / 2, self.box_width / 2, (self.Np,))
usy = np.random.uniform(-self.box_width / 2, self.box_width / 2, (self.Np,))
rng = np.random.default_rng(seed=0)
usx = rng.uniform(-self.box_width / 2, self.box_width / 2, (self.Np,))
usy = rng.uniform(-self.box_width / 2, self.box_width / 2, (self.Np,))
self.us = torch.tensor(np.vstack([usx, usy]).T)
# If using a GPU, put on GPU
self.us = self.us.to(self.device)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ def __init__(self, options, us=None):
self.softmax = torch.nn.Softmax(dim=-1)

# Randomly tile place cell centers across environment
np.random.seed(0)
usx = np.random.uniform(-self.box_width / 2, self.box_width / 2, (self.Np,))
usy = np.random.uniform(-self.box_width / 2, self.box_width / 2, (self.Np,))
rng = np.random.default_rng(seed=0)
usx = rng.uniform(-self.box_width / 2, self.box_width / 2, (self.Np,))
usy = rng.uniform(-self.box_width / 2, self.box_width / 2, (self.Np,))
self.us = torch.tensor(np.vstack([usx, usy]).T)
# If using a GPU, put on GPU
self.us = self.us.to(self.device)
Expand Down
19 changes: 7 additions & 12 deletions neurometry/datasets/rnn_grid_cells/scores.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,26 +189,21 @@ def get_scores(self, rate_map):
max_60_ind,
)

def plot_ratemap(self, ratemap, ax=None, title=None, *args, **kwargs): # pylint: disable=keyword-arg-before-vararg
def plot_ratemap(self, ratemap, ax=None, title=None, *args, **kwargs):
"""Plot ratemaps."""
if ax is None:
ax = plt.gca()
# Plot the ratemap
ax.imshow(ratemap, interpolation="none", *args, **kwargs)
# ax.pcolormesh(ratemap, *args, **kwargs)
ax.imshow(ratemap, *args, interpolation="none", **kwargs)
ax.axis("off")
if title is not None:
ax.set_title(title)

def plot_sac(self, sac, mask_params=None, ax=None, title=None, *args, **kwargs): # pylint: disable=keyword-arg-before-vararg
def plot_sac(self, sac, mask_params=None, ax=None, title=None, *args, **kwargs):
"""Plot spatial autocorrelogram."""
if ax is None:
ax = plt.gca()
# Plot the sac
useful_sac = sac * self._plotting_sac_mask
ax.imshow(useful_sac, interpolation="none", *args, **kwargs)
# ax.pcolormesh(useful_sac, *args, **kwargs)
# Plot a ring for the adequate mask
ax.imshow(useful_sac, *args, interpolation="none", **kwargs)
if mask_params is not None:
center = self._nbins - 1
ax.add_artist(
Expand Down Expand Up @@ -304,11 +299,11 @@ def get_sac_interp(self, cell):
yy = np.linspace(-1, 1, 99)
return scipy.interpolate.RegularGridInterpolator((xx, yy), sac)

def get_phi(
self, cell, interp=None, spacing_values=np.arange(0.01, 1.0, 0.01)
): # 0.15
def get_phi(self, cell, interp=None, spacing_values=None): # 0.15
"""Get orientation of grid cell."""

if spacing_values is None:
spacing_values = np.arange(0.01, 1.0, 0.01)
if interp is None:
interp = self.get_sac_interp(cell)

Expand Down
15 changes: 7 additions & 8 deletions neurometry/datasets/rnn_grid_cells/trajectory_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,19 +44,18 @@ def generate_trajectory(self, box_width, box_height, batch_size):
self.border_region = 0.03 # meters

# Initialize variables
rng = np.random.default_rng(seed=0)
position = np.zeros([batch_size, samples + 2, 2])
head_dir = np.zeros([batch_size, samples + 2])
position[:, 0, 0] = np.random.uniform(-box_width / 2, box_width / 2, batch_size)
position[:, 0, 1] = np.random.uniform(
-box_height / 2, box_height / 2, batch_size
)
head_dir[:, 0] = np.random.uniform(0, 2 * np.pi, batch_size)
position[:, 0, 0] = rng.uniform(-box_width / 2, box_width / 2, batch_size)
position[:, 0, 1] = rng.uniform(-box_height / 2, box_height / 2, batch_size)
head_dir[:, 0] = rng.uniform(0, 2 * np.pi, batch_size)
velocity = np.zeros([batch_size, samples + 2])

# Generate sequence of random boosts and turns
random_turn = np.random.normal(mu, sigma, [batch_size, samples + 1])
random_vel = np.random.rayleigh(b, [batch_size, samples + 1])
v = np.abs(np.random.normal(0, b * np.pi / 2, batch_size))
random_turn = rng.normal(mu, sigma, [batch_size, samples + 1])
random_vel = rng.rayleigh(b, [batch_size, samples + 1])
v = np.abs(rng.normal(0, b * np.pi / 2, batch_size))

for t in range(samples + 1):
# Update velocity
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,19 +44,18 @@ def generate_trajectory(self, box_width, box_height, batch_size):
self.border_region = 0.03 # meters

# Initialize variables
rng = np.random.default_rng(seed=0)
position = np.zeros([batch_size, samples + 2, 2])
head_dir = np.zeros([batch_size, samples + 2])
position[:, 0, 0] = np.random.uniform(-box_width / 2, box_width / 2, batch_size)
position[:, 0, 1] = np.random.uniform(
-box_height / 2, box_height / 2, batch_size
)
head_dir[:, 0] = np.random.uniform(0, 2 * np.pi, batch_size)
position[:, 0, 0] = rng.uniform(-box_width / 2, box_width / 2, batch_size)
position[:, 0, 1] = rng.uniform(-box_height / 2, box_height / 2, batch_size)
head_dir[:, 0] = rng.uniform(0, 2 * np.pi, batch_size)
velocity = np.zeros([batch_size, samples + 2])

# Generate sequence of random boosts and turns
random_turn = np.random.normal(mu, sigma, [batch_size, samples + 1])
random_vel = np.random.rayleigh(b, [batch_size, samples + 1])
v = np.abs(np.random.normal(0, b * np.pi / 2, batch_size))
random_turn = rng.normal(mu, sigma, [batch_size, samples + 1])
random_vel = rng.rayleigh(b, [batch_size, samples + 1])
v = np.abs(rng.normal(0, b * np.pi / 2, batch_size))

for t in range(samples + 1):
# Update velocity
Expand Down
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ repository="https://github.com/geometric-intelligence/neurometry"

[tool.ruff]
target-version = "py311"
extend-include = ["*.ipynb"]
extend-exclude = ["*.ipynb"]

[tool.ruff.format]
docstring-code-format = true
Expand All @@ -107,6 +107,7 @@ select = [
ignore = [
"E501", # line too long
"PERF203", # allow try-except within loops
"RUF012", # force typing
]

[tool.ruff.lint.pydocstyle]
Expand Down
2 changes: 1 addition & 1 deletion tutorials/01_methods_create_synthetic_data.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"# Synthetic Neural Manifolds"
"# Create Synthetic Neural Manifolds"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion tutorials/02_methods_estimate_manifold_dimension.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"# Neural Dimensionality Estimation"
"# Estimate Neural Dimensions"
]
},
{
Expand Down
10 changes: 9 additions & 1 deletion tutorials/03_methods_estimate_manifold_topology.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,15 @@
"tags": []
},
"source": [
"## Set Up + Imports"
"# Estimate Neural Topology"
]
},
{
"cell_type": "markdown",
"id": "1ff5d6ed",
"metadata": {},
"source": [
"### Set Up + Imports"
]
},
{
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,19 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "2720eebc",
"metadata": {},
"source": [
"# Explore Hyperbolic Geometry"
]
},
{
"cell_type": "markdown",
"id": "87c6f038-2b67-41ef-86a2-84d110626a3c",
"metadata": {},
"source": [
"## Imports & Setup"
"### Imports & Setup"
]
},
{
Expand Down
Loading

0 comments on commit 1a29509

Please sign in to comment.