Skip to content

Commit

Permalink
fix testing paths
Browse files Browse the repository at this point in the history
  • Loading branch information
Islast committed May 2, 2018
1 parent 2b2ce51 commit d9ad8bc
Show file tree
Hide file tree
Showing 13 changed files with 100 additions and 39 deletions.
10 changes: 4 additions & 6 deletions BrainNetworksInPython/scripts/make_corr_matrices.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,10 @@
"""
Tools to create a correlation matrix from regional measures
"""
# A Global import to make code python 2 and 3 compatible
from __future__ import print_function

# Other essential package imports
# Essential package imports
import os
import numpy as np
import pandas as pd
import stats_functions

def get_non_numeric_cols(df):
numeric = np.fromiter((np.issubdtype(y, np.number) for y in df.dtypes),bool)
Expand All @@ -26,6 +22,8 @@ def create_residuals_df(df, names, covars_list):
you choose to correct for before correlating the regions.
df should be numeric for the columns in names and covars_list
'''
import BrainNetworksInPython.scripts.stats_functions as sf

# Raise TypeError if any of the relevant columns are nonnumeric
non_numeric_cols = get_non_numeric_cols(df[names+covars_list])
if non_numeric_cols:
Expand All @@ -46,7 +44,7 @@ def create_residuals_df(df, names, covars_list):

# Calculate the residuals
for name in names:
df_res.loc[:, name] = stats_functions.residuals(x.T, df.loc[:, name])
df_res.loc[:, name] = sf.residuals(x.T, df.loc[:, name])

# Return the residuals data frame
return df_res
Expand Down
69 changes: 69 additions & 0 deletions BrainNetworksInPython/scripts/stat_functions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
#!/usr/bin/env python

# Essential package imports
import numpy as np

def residuals(x, y):
'''
Return residuals of least squares solution to y = AB where A =[[x 1]].
Uses numpy.linalg.lstsq to find B
'''

if len(x.shape) == 1:
x = x[np.newaxis, :]
A = np.vstack([x, np.ones(x.shape[-1])]).T
# get the least squares solution to AB = y
B = np.linalg.lstsq(A, y)[0]
# calculate and return the residuals
m, c = B[:-1], B[-1]
pre = np.sum(m * x.T, axis=1) + c
res = y - pre
return res

def partial_r(x, y, covars):
from scipy.stats import pearsonr

res_i = residuals(covars, x)
res_j = residuals(covars, y)
part_r = pearsonr(res_i, res_j)[0]
return part_r

def variance_partition(x1, x2, y):
'''
Describe the independent and shared explanatory
variance of two (possibly correlated) variables on
the dependent variable (y)
'''
from statsmodels.formula.api import ols
from scipy.stats import pearsonr
import pandas as pd

# Set up the data frame
df = pd.DataFrame( { 'Y' : y ,
'X1' : x1,
'X2' : x2 } )

# Get the overall r squared value for the
# multiple regression
Rsq = ols('Y ~ X1 + X2', data=df).fit().rsquared

# Next calculate the residuals of X1 after correlating
# with X2 (so units will be in those of X1) and vice versa
df['res_X1givenX2'] = residuals(df['X2'], df['X1'])
df['res_X2givenX1'] = residuals(df['X1'], df['X2'])

# Now calculate the pearson regressions for
# the residuals against the dependent variable to give
# the fraction of variance that each explains independently
# (a and c), along with the fraction of variance
# that is shared across both explanatory variables (b).
# d is the fraction of variance that is not explained
# by the model.
a = (pearsonr(df['res_X1givenX2'], df['Y'])[0])**2
c = (pearsonr(df['res_X2givenX1'], df['Y'])[0])**2
b = Rsq - a - c
d = 1.0 - Rsq

# Return these four fractions
return a, b, c, d

9 changes: 4 additions & 5 deletions BrainNetworksInPython/scripts/stats_functions.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
#!/usr/bin/env python

# Essential package imports
import numpy as np

def residuals(x, y):
'''
Return residuals of least squares solution to y = AB where A =[[x 1]].
Uses numpy.linalg.lstsq to find B
'''
import numpy as np

if len(x.shape) == 1:
x = x[np.newaxis, :]
Expand All @@ -19,8 +21,6 @@ def residuals(x, y):
return res

def partial_r(x, y, covars):

import numpy as np
from scipy.stats import pearsonr

res_i = residuals(covars, x)
Expand All @@ -35,7 +35,6 @@ def variance_partition(x1, x2, y):
the dependent variable (y)
'''
from statsmodels.formula.api import ols
import numpy as np
from scipy.stats import pearsonr
import pandas as pd

Expand Down Expand Up @@ -67,4 +66,4 @@ def variance_partition(x1, x2, y):

# Return these four fractions
return a, b, c, d


Binary file removed BrainNetworksInPython/tests/.fixture_hash(3, 4)
Binary file not shown.
Binary file removed BrainNetworksInPython/tests/.fixture_hash(3, 5)
Binary file not shown.
12 changes: 0 additions & 12 deletions BrainNetworksInPython/tests/make_graphs_test.py

This file was deleted.

File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,11 +1,7 @@
import os
import sys
sys.path.append(os.path.abspath(os.path.join('wrappers')))
sys.path.append(os.path.abspath(os.path.join('example_data')))
sys.path.append(os.path.abspath(os.path.join('scripts')))

import BrainNetworksInPython.make_corr_matrices as mcm
import BrainNetworksInPython.stats_functions as sf
from BrainNetworksInPython.scripts import stats_functions as sf
from BrainNetworksInPython.scripts import make_corr_matrices as mcm
import pytest
import pandas as pd
import numpy as np
Expand Down
9 changes: 9 additions & 0 deletions tests/make_graphs_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import os
import sys

import pytest
import pandas as pd
import numpy as np

import BrainNetworksInPython.scripts.make_graphs as mkg

Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import unittest
import os
import sys

class FixturesTest(unittest.TestCase):

Expand All @@ -8,7 +9,7 @@ class FixturesTest(unittest.TestCase):
def setUpClass(cls):
print('\nin set up - this takes about 80 secs')

from BrainNetworksInPython.tests.write_fixtures import generate_fixture_hashes, unpickle_hash
from tests.write_fixtures import generate_fixture_hashes, unpickle_hash
cls.hash_dict_new = generate_fixture_hashes()
cls.hash_dict_original = unpickle_hash()
# define dictionary keys for individual files for checking
Expand All @@ -17,7 +18,6 @@ def setUpClass(cls):
cls.gm = folder + '/network-analysis/GlobalMeasures_corrmat_file_COST010.csv'
cls.lm = folder + '/network-analysis/NodalMeasures_corrmat_file_COST010.csv'
cls.rich = folder + '/network-analysis/RICH_CLUB_corrmat_file_COST010.csv'
import

#--------------------------- Tests --------------------------------
# Each of these tests checks that ourly newly generated version of
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
#--------------------------- Write fixtures ---------------------------
# To regression test our wrappers we need examples. This script
# generates files. We save these files once, and regression-tests.py
# generates files. We save these files once, and regression_test.py
# re-generates these files to tests them for identicality with the
# presaved examples (fixtures). If they are found not to be identical
# it throws up an error.
#
# (*someday* I would like to replace saving files with saving hashes)
#
# The point of this is to check that throughout the changes we make to
# BrainNetworksInPython the functionality of this script stays the same
#
Expand All @@ -23,7 +21,7 @@ def recreate_correlation_matrix_fixture(folder):
##### the Whitaker_Vertes dataset #####
import BrainNetworksInPython.datasets.NSPN_WhitakerVertes_PNAS2016.data as data
centroids, regionalmeasures, names, covars, names_308_style = data._get_data()
from BrainNetworksInPython.corrmat_from_regionalmeasures import corrmat_from_regionalmeasures
from BrainNetworksInPython.wrappers.corrmat_from_regionalmeasures import corrmat_from_regionalmeasures
corrmat_path = os.getcwd()+folder+'/corrmat_file.txt'
corrmat_from_regionalmeasures(
regionalmeasures,
Expand All @@ -42,7 +40,7 @@ def recreate_network_analysis_fixture(folder, corrmat_path):
# calculate global measures
import random
random.seed(2984)
from BrainNetworksInPython.network_analysis_from_corrmat import network_analysis_from_corrmat
from BrainNetworksInPython.wrappers.network_analysis_from_corrmat import network_analysis_from_corrmat
network_analysis_from_corrmat(corrmat_path,
names,
centroids,
Expand Down
10 changes: 7 additions & 3 deletions tutorials/jupyter_demo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,9 @@
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"import sys\n",
Expand Down Expand Up @@ -205,7 +207,9 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"# We will be using the files we just used network_analysis_from _corrmat to create\n",
Expand Down Expand Up @@ -238,7 +242,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.4"
"version": "3.6.3"
}
},
"nbformat": 4,
Expand Down

0 comments on commit d9ad8bc

Please sign in to comment.